From 257ab726aaf3ba49fc5bbd2e9db0a24a0c462566 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 24 Apr 2024 15:26:18 +0200 Subject: [PATCH 001/361] Run the flake-regressions test suite --- .github/workflows/ci.yml | 28 +++++++++++++++++----------- scripts/flake-regressions.sh | 27 +++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 11 deletions(-) create mode 100755 scripts/flake-regressions.sh diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ca94ff956f0..be7d47787be 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -193,18 +193,24 @@ jobs: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main - uses: DeterminateSystems/magic-nix-cache-action@main - - run: nix build -L .#hydraJobs.tests.githubFlakes .#hydraJobs.tests.tarballFlakes .#hydraJobs.tests.functional_user + - run: nix build -L .#hydraJobs.tests.githubFlakes .#hydraJobs.tests.tarballFlakes - meson_build: - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, macos-latest] - runs-on: ${{ matrix.os }} + flake_regressions: + needs: vm_tests + runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - name: Checkout nix + uses: actions/checkout@v4 + - name: Checkout flake-regressions + uses: actions/checkout@v4 + with: + repository: DeterminateSystems/flake-regressions + path: flake-regressions + - name: Checkout flake-regressions-data + uses: actions/checkout@v4 + with: + repository: DeterminateSystems/flake-regressions-data + path: flake-regressions/tests - uses: DeterminateSystems/nix-installer-action@main - uses: DeterminateSystems/magic-nix-cache-action@main - # Only meson packages that don't have a tests.run derivation. - # Those that have it are already built and tested as part of nix flake check. - - run: nix build -L .#hydraJobs.build.{nix-cmd,nix-main}.$(nix-instantiate --eval --expr builtins.currentSystem | sed -e 's/"//g') + - run: nix build --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH scripts/flake-regressions.sh diff --git a/scripts/flake-regressions.sh b/scripts/flake-regressions.sh new file mode 100755 index 00000000000..e6cfbfa24f9 --- /dev/null +++ b/scripts/flake-regressions.sh @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +set -e + +echo "Nix version:" +nix --version + +cd flake-regressions + +status=0 + +flakes=$(ls -d tests/*/*/* | head -n25) + +echo "Running flake tests..." + +for flake in $flakes; do + + if ! REGENERATE=0 ./eval-flake.sh $flake; then + status=1 + echo "❌ $flake" + else + echo "✅ $flake" + fi + +done + +exit "$status" From 9f4194376981711a737179a9c92bc472ac43e881 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 14 May 2024 15:58:37 +0200 Subject: [PATCH 002/361] flake-regressions.sh: Make the sort order deterministic --- scripts/flake-regressions.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/flake-regressions.sh b/scripts/flake-regressions.sh index e6cfbfa24f9..5cc55bf4f77 100755 --- a/scripts/flake-regressions.sh +++ b/scripts/flake-regressions.sh @@ -9,7 +9,7 @@ cd flake-regressions status=0 -flakes=$(ls -d tests/*/*/* | head -n25) +flakes=$(ls -d tests/*/*/* | sort | head -n25) echo "Running flake tests..." From 9b7a3205584b49449829048f17d23db4405cfc6a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 10 Jun 2024 14:49:43 +0200 Subject: [PATCH 003/361] Use FlakeHub cache --- .github/workflows/ci.yml | 189 ++++----------------------------------- 1 file changed, 17 insertions(+), 172 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index be7d47787be..08815f7f0c0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,12 +4,13 @@ on: pull_request: push: -permissions: read-all +permissions: + id-token: "write" + contents: "read" jobs: tests: - needs: [check_secrets] strategy: fail-fast: false matrix: @@ -17,186 +18,28 @@ jobs: runs-on: ${{ matrix.os }} timeout-minutes: 60 steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: cachix/install-nix-action@V27 - with: - # The sandbox would otherwise be disabled by default on Darwin - extra_nix_config: "sandbox = true" - - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v15 - if: needs.check_secrets.outputs.cachix == 'true' - with: - name: '${{ env.CACHIX_NAME }}' - signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' - authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - - if: matrix.os == 'ubuntu-latest' - run: | - free -h - swapon --show - swap=$(swapon --show --noheadings | head -n 1 | awk '{print $1}') - echo "Found swap: $swap" - sudo swapoff $swap - # resize it (fallocate) - sudo fallocate -l 10G $swap - sudo mkswap $swap - sudo swapon $swap - free -h - ( - while sleep 60; do - free -h - done - ) & - - run: nix --experimental-features 'nix-command flakes' flake check -L - - # Steps to test CI automation in your own fork. - # Cachix: - # 1. Sign-up for https://www.cachix.org/ - # 2. Create a cache for $githubuser-nix-install-tests - # 3. Create a cachix auth token and save it in https://github.com/$githubuser/nix/settings/secrets/actions in "Repository secrets" as CACHIX_AUTH_TOKEN - # Dockerhub: - # 1. Sign-up for https://hub.docker.com/ - # 2. Store your dockerhub username as DOCKERHUB_USERNAME in "Repository secrets" of your fork repository settings (https://github.com/$githubuser/nix/settings/secrets/actions) - # 3. Create an access token in https://hub.docker.com/settings/security and store it as DOCKERHUB_TOKEN in "Repository secrets" of your fork - check_secrets: - permissions: - contents: none - name: Check Cachix and Docker secrets present for installer tests - runs-on: ubuntu-latest - outputs: - cachix: ${{ steps.secret.outputs.cachix }} - docker: ${{ steps.secret.outputs.docker }} - steps: - - name: Check for secrets - id: secret - env: - _CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }} - _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }} - run: | - echo "::set-output name=cachix::${{ env._CACHIX_SECRETS != '' }}" - echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}" - - installer: - needs: [tests, check_secrets] - if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true' - runs-on: ubuntu-latest - outputs: - installerURL: ${{ steps.prepare-installer.outputs.installerURL }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@V27 - with: - install_url: https://releases.nixos.org/nix/nix-2.20.3/install - - uses: cachix/cachix-action@v15 - with: - name: '${{ env.CACHIX_NAME }}' - signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' - authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - cachixArgs: '-v' - - id: prepare-installer - run: scripts/prepare-installer-for-github-actions - - installer_test: - needs: [installer, check_secrets] - if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true' - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, macos-latest] - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v4 - - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@V27 - with: - install_url: '${{needs.installer.outputs.installerURL}}' - install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" - - run: sudo apt install fish zsh - if: matrix.os == 'ubuntu-latest' - - run: brew install fish - if: matrix.os == 'macos-latest' - - run: exec bash -c "nix-instantiate -E 'builtins.currentTime' --eval" - - run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval" - - run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval" - - run: exec fish -c "nix-instantiate -E 'builtins.currentTime' --eval" - - run: exec bash -c "nix-channel --add https://releases.nixos.org/nixos/unstable/nixos-23.05pre466020.60c1d71f2ba nixpkgs" - - run: exec bash -c "nix-channel --update && nix-env -iA nixpkgs.hello && hello" - - docker_push_image: - needs: [check_secrets, tests] - permissions: - contents: read - packages: write - if: >- - github.event_name == 'push' && - github.ref_name == 'master' && - needs.check_secrets.outputs.cachix == 'true' && - needs.check_secrets.outputs.docker == 'true' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: cachix/install-nix-action@V27 - with: - install_url: https://releases.nixos.org/nix/nix-2.20.3/install - - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v15 - if: needs.check_secrets.outputs.cachix == 'true' - with: - name: '${{ env.CACHIX_NAME }}' - signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' - authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - - run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L - - run: docker load -i ./result/image.tar.gz - - run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION - - run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:master - # We'll deploy the newly built image to both Docker Hub and Github Container Registry. - # - # Push to Docker Hub first - - name: Login to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - run: docker push ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION - - run: docker push ${{ secrets.DOCKERHUB_USERNAME }}/nix:master - # Push to GitHub Container Registry as well - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Push image - run: | - IMAGE_ID=ghcr.io/${{ github.repository_owner }}/nix - # Change all uppercase to lowercase - IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]') - - docker tag nix:$NIX_VERSION $IMAGE_ID:$NIX_VERSION - docker tag nix:$NIX_VERSION $IMAGE_ID:latest - docker push $IMAGE_ID:$NIX_VERSION - docker push $IMAGE_ID:latest - # deprecated 2024-02-24 - docker tag nix:$NIX_VERSION $IMAGE_ID:master - docker push $IMAGE_ID:master + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - uses: DeterminateSystems/magic-nix-cache-action@main + - run: nix --experimental-features 'nix-command flakes' flake check -L vm_tests: + needs: tests runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true - uses: DeterminateSystems/magic-nix-cache-action@main - run: nix build -L .#hydraJobs.tests.githubFlakes .#hydraJobs.tests.tarballFlakes flake_regressions: - needs: vm_tests + needs: tests runs-on: ubuntu-22.04 steps: - name: Checkout nix @@ -212,5 +55,7 @@ jobs: repository: DeterminateSystems/flake-regressions-data path: flake-regressions/tests - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true - uses: DeterminateSystems/magic-nix-cache-action@main - run: nix build --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH scripts/flake-regressions.sh From 58bc627a6ca8e52b3c0fd27a107d7a5a74865879 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 10 Jun 2024 15:16:41 +0200 Subject: [PATCH 004/361] Fix spellcheck --- scripts/flake-regressions.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/flake-regressions.sh b/scripts/flake-regressions.sh index 5cc55bf4f77..d765311345c 100755 --- a/scripts/flake-regressions.sh +++ b/scripts/flake-regressions.sh @@ -9,13 +9,13 @@ cd flake-regressions status=0 -flakes=$(ls -d tests/*/*/* | sort | head -n25) +flakes=$(find tests -mindepth 3 -maxdepth 3 -type d -not -path '*/.*' | sort | head -n25) echo "Running flake tests..." for flake in $flakes; do - if ! REGENERATE=0 ./eval-flake.sh $flake; then + if ! REGENERATE=0 ./eval-flake.sh "$flake"; then status=1 echo "❌ $flake" else From f218f0e93ad88c7baa445a58c0d3b5489031d025 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 10 Jun 2024 15:21:41 +0200 Subject: [PATCH 005/361] Try without fetch-depth:0 --- .github/workflows/ci.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 08815f7f0c0..6bd09c8ebd8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,8 +19,6 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v4 - with: - fetch-depth: 0 - uses: DeterminateSystems/nix-installer-action@main with: flakehub: true From def2c29e97e3238fc0a1b758fab2eda461ea7c1b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 12 Jun 2024 16:03:34 +0200 Subject: [PATCH 006/361] Distinguish Determinate Nix in --version output --- src/libmain/shared.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index fc55fe3f1b2..d4870c3f257 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -293,7 +293,7 @@ void parseCmdLine(const std::string & programName, const Strings & args, void printVersion(const std::string & programName) { - std::cout << fmt("%1% (Nix) %2%", programName, nixVersion) << std::endl; + std::cout << fmt("%1% (Determinate Nix) %2%", programName, nixVersion) << std::endl; if (verbosity > lvlInfo) { Strings cfg; #if HAVE_BOEHMGC From 261a2e58fe9fee6856df3789540d176cb9d9ed9a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 12 Jun 2024 16:09:30 +0200 Subject: [PATCH 007/361] Add plumbing for the DetSys installer flake --- flake.nix | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/flake.nix b/flake.nix index d83c2ecad36..e7604588121 100644 --- a/flake.nix +++ b/flake.nix @@ -22,7 +22,6 @@ outputs = inputs@{ self, nixpkgs, nixpkgs-regression, libgit2, ... }: - let inherit (nixpkgs) lib; @@ -357,5 +356,9 @@ default = self.devShells.${system}.native-stdenvPackages; } ); - }; + + # Expected by the DeterminateSystems/nix-installer flake. + tarballs_indirect = forAllSystems (system: self.checks."${system}".binaryTarball); + tarballs_direct = forAllSystems (system: "${self.checks."${system}".binaryTarball}/nix-${self.packages."${system}".default.version}-${system}.tar.xz"); + }; } From b68d7396ca5f34f11cd32957413ea8ce5fbb3206 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 12 Jun 2024 16:36:06 +0200 Subject: [PATCH 008/361] Publish to FlakeHub --- .github/workflows/publish.yml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 .github/workflows/publish.yml diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000000..839ace59492 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,20 @@ +name: Publish on FlakeHub + +on: + push: + tags: + - "v*.*.*" + +publish: + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + - uses: "DeterminateSystems/flakehub-push@main" + with: + visibility: "private" + name: "DeterminateSystems/nix-priv" + tag: "${{ github.ref_name }}" From 361a5783efa1f9f733fb50068ff3967856a64db3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Jun 2024 15:22:44 +0200 Subject: [PATCH 009/361] Fix version check --- tests/functional/common/vars-and-functions.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/functional/common/vars-and-functions.sh b/tests/functional/common/vars-and-functions.sh index 4316a30d5ce..4a55da51588 100644 --- a/tests/functional/common/vars-and-functions.sh +++ b/tests/functional/common/vars-and-functions.sh @@ -182,10 +182,10 @@ if [[ $(uname) == Linux ]] && [[ -L /proc/self/ns/user ]] && unshare --user true fi isDaemonNewer () { - [[ -n "${NIX_DAEMON_PACKAGE:-}" ]] || return 0 - local requiredVersion="$1" - local daemonVersion=$($NIX_DAEMON_PACKAGE/bin/nix daemon --version | cut -d' ' -f3) - [[ $(nix eval --expr "builtins.compareVersions ''$daemonVersion'' ''$requiredVersion''") -ge 0 ]] + [[ -n "${NIX_DAEMON_PACKAGE:-}" ]] || return 0 + local requiredVersion="$1" + local daemonVersion=$($NIX_DAEMON_PACKAGE/bin/nix daemon --version | sed 's/.*) //') + [[ $(nix eval --expr "builtins.compareVersions ''$daemonVersion'' ''$requiredVersion''") -ge 0 ]] } skipTest () { From 245dbb7a61e47cf7673c21544cbc7e6158667900 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Jun 2024 15:22:53 +0200 Subject: [PATCH 010/361] Revert "Add plumbing for the DetSys installer flake" This reverts commit 0f9ea197055eabe5fac3ef93e49ba5552668fa08. --- flake.nix | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/flake.nix b/flake.nix index e7604588121..d83c2ecad36 100644 --- a/flake.nix +++ b/flake.nix @@ -22,6 +22,7 @@ outputs = inputs@{ self, nixpkgs, nixpkgs-regression, libgit2, ... }: + let inherit (nixpkgs) lib; @@ -356,9 +357,5 @@ default = self.devShells.${system}.native-stdenvPackages; } ); - - # Expected by the DeterminateSystems/nix-installer flake. - tarballs_indirect = forAllSystems (system: self.checks."${system}".binaryTarball); - tarballs_direct = forAllSystems (system: "${self.checks."${system}".binaryTarball}/nix-${self.packages."${system}".default.version}-${system}.tar.xz"); - }; + }; } From 3a6fd22b4a2f74cca82fbd8769102e82294ff260 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Jun 2024 17:09:19 +0200 Subject: [PATCH 011/361] Fix another version check --- tests/functional/store-info.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/store-info.sh b/tests/functional/store-info.sh index f37889fbb1e..306b2024d5a 100755 --- a/tests/functional/store-info.sh +++ b/tests/functional/store-info.sh @@ -8,7 +8,7 @@ STORE_INFO_JSON=$(nix store info --json) echo "$STORE_INFO" | grep "Store URL: ${NIX_REMOTE}" if [[ -v NIX_DAEMON_PACKAGE ]] && isDaemonNewer "2.7.0pre20220126"; then - DAEMON_VERSION=$($NIX_DAEMON_PACKAGE/bin/nix daemon --version | cut -d' ' -f3) + DAEMON_VERSION=$($NIX_DAEMON_PACKAGE/bin/nix daemon --version | sed 's/.*) //') echo "$STORE_INFO" | grep "Version: $DAEMON_VERSION" [[ "$(echo "$STORE_INFO_JSON" | jq -r ".version")" == "$DAEMON_VERSION" ]] fi From 590920eed2fbac2ef2d19dc2299d0dbcb279d24d Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Sun, 29 Oct 2023 21:50:35 +0000 Subject: [PATCH 012/361] Make the flakes experimental feature stable --- .github/workflows/ci.yml | 2 +- doc/manual/src/contributing/hacking.md | 3 +- src/libcmd/common-eval-args.cc | 2 - src/libcmd/installables.cc | 3 -- src/libexpr/primops/fetchTree.cc | 8 +-- src/libfetchers/github.cc | 5 -- src/libfetchers/indirect.cc | 5 -- src/libfetchers/path.cc | 5 -- src/libfetchers/registry.cc | 2 +- src/libflake/flake-settings.hh | 9 ++-- src/libflake/flake/flake.cc | 7 --- src/libutil/config.cc | 6 +-- src/libutil/config.hh | 2 +- src/libutil/experimental-features.cc | 7 ++- src/nix/flake.cc | 6 --- src/nix/main.cc | 1 - src/nix/nix.md | 6 +-- src/nix/repl.md | 2 +- tests/functional/ca/selfref-gc.sh | 2 +- tests/functional/common/init.sh | 3 +- tests/functional/config.sh | 4 +- tests/functional/experimental-features.sh | 60 +++++++++++------------ tests/functional/repl.sh | 6 +-- tests/nixos/github-flakes.nix | 2 +- tests/nixos/sourcehut-flakes.nix | 2 +- tests/nixos/tarball-flakes.nix | 2 +- tests/unit/libutil/config.cc | 4 +- 27 files changed, 59 insertions(+), 107 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6bd09c8ebd8..832aa3ff080 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: with: flakehub: true - uses: DeterminateSystems/magic-nix-cache-action@main - - run: nix --experimental-features 'nix-command flakes' flake check -L + - run: nix --experimental-features 'nix-command' flake check -L vm_tests: needs: tests diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index c128515e9ba..fc2d7221706 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -14,10 +14,9 @@ The following instructions assume you already have some version of Nix installed ## Building Nix with flakes -This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled. +This section assumes you are using Nix with the experimental feature [`nix-command`] enabled. See the [Building Nix](#building-nix) section for equivalent instructions using stable Nix interfaces. -[`flakes`]: @docroot@/contributing/experimental-features.md#xp-feature-flakes [`nix-command`]: @docroot@/contributing/experimental-features.md#xp-nix-command To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found: diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index 62745b6815f..92e7bd67842 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -22,7 +22,6 @@ EvalSettings evalSettings { { "flake", [](ref store, std::string_view rest) { - experimentalFeatureSettings.require(Xp::Flakes); // FIXME `parseFlakeRef` should take a `std::string_view`. auto flakeRef = parseFlakeRef(std::string { rest }, {}, true, false); debug("fetching flake search path element '%s''", rest); @@ -229,7 +228,6 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * bas } else if (hasPrefix(s, "flake:")) { - experimentalFeatureSettings.require(Xp::Flakes); auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false); auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first; return state.rootPath(CanonPath(state.store->toRealPath(storePath))); diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 6835c512c1c..eb7048d3930 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -394,9 +394,6 @@ void completeFlakeRefWithFragment( void completeFlakeRef(AddCompletions & completions, ref store, std::string_view prefix) { - if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) - return; - if (prefix == "") completions.add("."); diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 567b73f9a1b..50935a61ab3 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -163,15 +163,11 @@ static void fetchTree( } input = fetchers::Input::fromAttrs(std::move(attrs)); } else { - if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) - state.error( - "passing a string argument to 'fetchTree' requires the 'flakes' experimental feature" - ).atPos(pos).debugThrow(); input = fetchers::Input::fromURL(url); } } - if (!state.settings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes)) + if (!state.settings.pureEval && !input.isDirect()) input = lookupInRegistries(state.store, input).first; if (state.settings.pureEval && !input.isLocked()) { @@ -383,7 +379,6 @@ static RegisterPrimOp primop_fetchTree({ - `"mercurial"` *input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references). - The additional input types and the URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled. > **Example** > @@ -420,7 +415,6 @@ static RegisterPrimOp primop_fetchTree({ > ``` )", .fun = prim_fetchTree, - .experimentalFeature = Xp::FetchTree, }); static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v, diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index ddb41e63f9f..d878fb89568 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -299,11 +299,6 @@ struct GitArchiveInputScheme : InputScheme input.getNarHash().has_value()); } - std::optional experimentalFeature() const override - { - return Xp::Flakes; - } - std::optional getFingerprint(ref store, const Input & input) const override { if (auto rev = input.getRev()) diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index ba507863138..e271eabc651 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -102,11 +102,6 @@ struct IndirectInputScheme : InputScheme throw Error("indirect input '%s' cannot be fetched directly", input.to_string()); } - std::optional experimentalFeature() const override - { - return Xp::Flakes; - } - bool isDirect(const Input & input) const override { return false; } }; diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index 68958d55971..29ca25ce6d2 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -174,11 +174,6 @@ struct PathInputScheme : InputScheme return std::nullopt; } } - - std::optional experimentalFeature() const override - { - return Xp::Flakes; - } }; static auto rPathInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index 52cbac5e0a0..d6993417323 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -156,7 +156,7 @@ struct RegistrySettings : Config When empty, disables the global flake registry. )", - {}, true, Xp::Flakes}; + {}, true}; }; RegistrySettings registrySettings; diff --git a/src/libflake/flake-settings.hh b/src/libflake/flake-settings.hh index f97c175e8a3..4f986aefd0d 100644 --- a/src/libflake/flake-settings.hh +++ b/src/libflake/flake-settings.hh @@ -22,8 +22,7 @@ struct FlakeSettings : public Config "use-registries", "Whether to use flake registries to resolve flake references.", {}, - true, - Xp::Flakes}; + true}; Setting acceptFlakeConfig{ this, @@ -31,8 +30,7 @@ struct FlakeSettings : public Config "accept-flake-config", "Whether to accept nix configuration from a flake without prompting.", {}, - true, - Xp::Flakes}; + true}; Setting commitLockFileSummary{ this, @@ -43,8 +41,7 @@ struct FlakeSettings : public Config empty, the summary is generated based on the action performed. )", {"commit-lockfile-summary"}, - true, - Xp::Flakes}; + true}; }; // TODO: don't use a global variable. diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index 6f47b599229..21acb93eee2 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -343,8 +343,6 @@ LockedFlake lockFlake( const FlakeRef & topRef, const LockFlags & lockFlags) { - experimentalFeatureSettings.require(Xp::Flakes); - FlakeCache flakeCache; auto useRegistries = lockFlags.useRegistries.value_or(flakeSettings.useRegistries); @@ -744,8 +742,6 @@ void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & vRes) { - experimentalFeatureSettings.require(Xp::Flakes); - auto [lockFileStr, keyMap] = lockedFlake.lockFile.to_string(); auto overrides = state.buildBindings(lockedFlake.nodePaths.size()); @@ -837,7 +833,6 @@ static RegisterPrimOp r2({ ``` )", .fun = prim_getFlake, - .experimentalFeature = Xp::Flakes, }); static void prim_parseFlakeRef( @@ -881,7 +876,6 @@ static RegisterPrimOp r3({ ``` )", .fun = prim_parseFlakeRef, - .experimentalFeature = Xp::Flakes, }); @@ -938,7 +932,6 @@ static RegisterPrimOp r4({ ``` )", .fun = prim_flakeRefToString, - .experimentalFeature = Xp::Flakes, }); } diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 907ca7fc149..8abf4bc2362 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -341,11 +341,9 @@ template<> std::set BaseSetting res; for (auto & s : tokenizeString(str)) { - if (auto thisXpFeature = parseExperimentalFeature(s); thisXpFeature) { + if (auto thisXpFeature = parseExperimentalFeature(s); thisXpFeature) res.insert(thisXpFeature.value()); - if (thisXpFeature.value() == Xp::Flakes) - res.insert(Xp::FetchTree); - } else + else warn("unknown experimental feature '%s'", s); } return res; diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 1952ba1b8d7..a30d1b1ec64 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -386,7 +386,7 @@ struct ExperimentalFeatureSettings : Config { Example: ``` - experimental-features = nix-command flakes + experimental-features = nix-command ``` The following experimental features are available: diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc index 1c080e372f6..8ecf1e92b42 100644 --- a/src/libutil/experimental-features.cc +++ b/src/libutil/experimental-features.cc @@ -74,8 +74,9 @@ constexpr std::array xpFeatureDetails .tag = Xp::Flakes, .name = "flakes", .description = R"( - Enable flakes. See the manual entry for [`nix - flake`](@docroot@/command-ref/new-cli/nix3-flake.md) for details. + *Enabled for Determinate Nix Installer users since 2.19* + + See the manual entry for [`nix flake`](@docroot@/command-ref/new-cli/nix3-flake.md) for details. )", .trackingUrl = "https://github.com/NixOS/nix/milestone/27", }, @@ -83,6 +84,8 @@ constexpr std::array xpFeatureDetails .tag = Xp::FetchTree, .name = "fetch-tree", .description = R"( + *Enabled for Determinate Nix Installer users since 2.24* + Enable the use of the [`fetchTree`](@docroot@/language/builtins.md#builtins-fetchTree) built-in function in the Nix language. `fetchTree` exposes a generic interface for fetching remote file system trees from different types of remote sources. diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 84c659023a5..a86e36206b7 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -1462,12 +1462,6 @@ struct CmdFlake : NixMultiCommand #include "flake.md" ; } - - void run() override - { - experimentalFeatureSettings.require(Xp::Flakes); - NixMultiCommand::run(); - } }; static auto rCmdFlake = registerCommand("flake"); diff --git a/src/nix/main.cc b/src/nix/main.cc index c90bb25a7d3..85be80da452 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -412,7 +412,6 @@ void mainWrapped(int argc, char * * argv) if (argc == 2 && std::string(argv[1]) == "__dump-language") { experimentalFeatureSettings.experimentalFeatures = { - Xp::Flakes, Xp::FetchClosure, Xp::DynamicDerivations, Xp::FetchTree, diff --git a/src/nix/nix.md b/src/nix/nix.md index 4464bef370c..2f59db3afb5 100644 --- a/src/nix/nix.md +++ b/src/nix/nix.md @@ -69,11 +69,9 @@ That is, Nix will operate on the default flake output attribute of the flake in ### Flake output attribute > **Warning** \ -> Flake output attribute installables depend on both the -> [`flakes`](@docroot@/contributing/experimental-features.md#xp-feature-flakes) -> and +> Flake output attribute installables depend on the > [`nix-command`](@docroot@/contributing/experimental-features.md#xp-feature-nix-command) -> experimental features, and subject to change without notice. +> experimental feature, and subject to change without notice. Example: `nixpkgs#hello` diff --git a/src/nix/repl.md b/src/nix/repl.md index 32c08e24b24..e608dabf6f9 100644 --- a/src/nix/repl.md +++ b/src/nix/repl.md @@ -36,7 +36,7 @@ R""( Loading Installable ''... Added 1 variables. - # nix repl --extra-experimental-features 'flakes' nixpkgs + # nix repl nixpkgs Loading Installable 'flake:nixpkgs#'... Added 5 variables. diff --git a/tests/functional/ca/selfref-gc.sh b/tests/functional/ca/selfref-gc.sh index 24877889459..588515db521 100755 --- a/tests/functional/ca/selfref-gc.sh +++ b/tests/functional/ca/selfref-gc.sh @@ -4,7 +4,7 @@ source common.sh requireDaemonNewerThan "2.4pre20210626" -enableFeatures "ca-derivations nix-command flakes" +enableFeatures "ca-derivations nix-command" export NIX_TESTS_CA_BY_DEFAULT=1 cd .. diff --git a/tests/functional/common/init.sh b/tests/functional/common/init.sh index d33ad5d5744..482d62cc4cd 100755 --- a/tests/functional/common/init.sh +++ b/tests/functional/common/init.sh @@ -12,7 +12,7 @@ if isTestOnNixOS; then ! test -e "$test_nix_conf" cat > "$test_nix_conf_dir/nix.conf" < "$NIX_CONF_DIR"/nix.conf.extra <"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr + $gatedSetting = true +" expect 1 nix config show $gatedSetting 1>"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr [[ $(cat "$TEST_ROOT/stdout") = '' ]] -grepQuiet "Ignoring setting 'accept-flake-config' because experimental feature 'flakes' is not enabled" "$TEST_ROOT/stderr" -grepQuiet "error: could not find setting 'accept-flake-config'" "$TEST_ROOT/stderr" +grepQuiet "error: could not find setting '$gatedSetting'" "$TEST_ROOT/stderr" -# 'flakes' experimental-feature is disabled after, ignore and warn -NIX_CONFIG=' - accept-flake-config = true +# Experimental feature is disabled after, ignore and warn. +NIX_CONFIG=" + $gatedSetting = true experimental-features = nix-command -' expect 1 nix config show accept-flake-config 1>"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr +" expect 1 nix config show $gatedSetting 1>"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr [[ $(cat "$TEST_ROOT/stdout") = '' ]] -grepQuiet "Ignoring setting 'accept-flake-config' because experimental feature 'flakes' is not enabled" "$TEST_ROOT/stderr" -grepQuiet "error: could not find setting 'accept-flake-config'" "$TEST_ROOT/stderr" +grepQuiet "error: could not find setting '$gatedSetting'" "$TEST_ROOT/stderr" -# 'flakes' experimental-feature is enabled before, process -NIX_CONFIG=' - experimental-features = nix-command flakes - accept-flake-config = true -' nix config show accept-flake-config 1>"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr +# Experimental feature is enabled before, process. +NIX_CONFIG=" + experimental-features = nix-command $xpFeature + $gatedSetting = true +" nix config show $gatedSetting 1>"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr grepQuiet "true" "$TEST_ROOT/stdout" -grepQuietInverse "Ignoring setting 'accept-flake-config'" "$TEST_ROOT/stderr" -# 'flakes' experimental-feature is enabled after, process -NIX_CONFIG=' - accept-flake-config = true - experimental-features = nix-command flakes -' nix config show accept-flake-config 1>"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr +# Experimental feature is enabled after, process. +NIX_CONFIG=" + $gatedSetting = true + experimental-features = nix-command $xpFeature +" nix config show $gatedSetting 1>"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr grepQuiet "true" "$TEST_ROOT/stdout" -grepQuietInverse "Ignoring setting 'accept-flake-config'" "$TEST_ROOT/stderr" +grepQuietInverse "Ignoring setting '$gatedSetting'" "$TEST_ROOT/stderr" function exit_code_both_ways { - expect 1 nix --experimental-features 'nix-command' "$@" 1>/dev/null - nix --experimental-features 'nix-command flakes' "$@" 1>/dev/null + expect 1 nix --experimental-features 'nix-command ' "$@" 1>/dev/null + nix --experimental-features "nix-command $xpFeature" "$@" 1>/dev/null # Also, the order should not matter expect 1 nix "$@" --experimental-features 'nix-command' 1>/dev/null - nix "$@" --experimental-features 'nix-command flakes' 1>/dev/null + nix "$@" --experimental-features "nix-command $xpFeature" 1>/dev/null } -exit_code_both_ways show-config --flake-registry 'https://no' +exit_code_both_ways config show --auto-allocate-uids # Double check these are stable nix --experimental-features '' --help 1>/dev/null diff --git a/tests/functional/repl.sh b/tests/functional/repl.sh index 86cd6f458d0..40035785f58 100755 --- a/tests/functional/repl.sh +++ b/tests/functional/repl.sh @@ -140,9 +140,9 @@ EOF testReplResponse ' foo + baz ' "3" \ - ./flake ./flake\#bar --experimental-features 'flakes' + ./flake ./flake\#bar -# Test the `:reload` mechansim with flakes: +# Test the `:reload` mechanism with flakes: # - Eval `./flake#changingThing` # - Modify the flake # - Re-eval it @@ -153,7 +153,7 @@ sleep 1 # Leave the repl the time to eval 'foo' sed -i 's/beforeChange/afterChange/' flake/flake.nix echo ":reload" echo "changingThing" -) | nix repl ./flake --experimental-features 'flakes') +) | nix repl ./flake) echo "$replResult" | grepQuiet -s beforeChange echo "$replResult" | grepQuiet -s afterChange diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix index 221045009ee..9a1ed749ce8 100644 --- a/tests/nixos/github-flakes.nix +++ b/tests/nixos/github-flakes.nix @@ -143,7 +143,7 @@ in virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ]; virtualisation.memorySize = 4096; nix.settings.substituters = lib.mkForce [ ]; - nix.extraOptions = "experimental-features = nix-command flakes"; + nix.extraOptions = "experimental-features = nix-command"; networking.hosts.${(builtins.head nodes.github.networking.interfaces.eth1.ipv4.addresses).address} = [ "channels.nixos.org" "api.github.com" "github.com" ]; security.pki.certificateFiles = [ "${cert}/ca.crt" ]; diff --git a/tests/nixos/sourcehut-flakes.nix b/tests/nixos/sourcehut-flakes.nix index 04f3590e1d8..4eeab42db4d 100644 --- a/tests/nixos/sourcehut-flakes.nix +++ b/tests/nixos/sourcehut-flakes.nix @@ -104,7 +104,7 @@ in virtualisation.memorySize = 4096; nix.settings.substituters = lib.mkForce [ ]; nix.extraOptions = '' - experimental-features = nix-command flakes + experimental-features = nix-command flake-registry = https://git.sr.ht/~NixOS/flake-registry/blob/master/flake-registry.json ''; environment.systemPackages = [ pkgs.jq ]; diff --git a/tests/nixos/tarball-flakes.nix b/tests/nixos/tarball-flakes.nix index 84cf377ec5b..2a21d873880 100644 --- a/tests/nixos/tarball-flakes.nix +++ b/tests/nixos/tarball-flakes.nix @@ -51,7 +51,7 @@ in virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ]; virtualisation.memorySize = 4096; nix.settings.substituters = lib.mkForce [ ]; - nix.extraOptions = "experimental-features = nix-command flakes"; + nix.extraOptions = "experimental-features = nix-command"; }; }; diff --git a/tests/unit/libutil/config.cc b/tests/unit/libutil/config.cc index 886e70da50d..f3dc2876af7 100644 --- a/tests/unit/libutil/config.cc +++ b/tests/unit/libutil/config.cc @@ -191,7 +191,7 @@ namespace nix { "description", {}, true, - Xp::Flakes, + Xp::CaDerivations, }; setting.assign("value"); @@ -203,7 +203,7 @@ namespace nix { "description": "description\n", "documentDefault": true, "value": "value", - "experimentalFeature": "flakes" + "experimentalFeature": "ca-derivations" } })#"_json); } From 16c8f9016b9438e7445acd65445d89b424dd57dc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 10 Jul 2024 16:42:37 +0200 Subject: [PATCH 013/361] Remove unneeded --experimental-features --- .github/workflows/ci.yml | 2 +- src/libflake/flake-settings.hh | 7 +------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 832aa3ff080..d8c5439bbe7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: with: flakehub: true - uses: DeterminateSystems/magic-nix-cache-action@main - - run: nix --experimental-features 'nix-command' flake check -L + - run: nix flake check -L vm_tests: needs: tests diff --git a/src/libflake/flake-settings.hh b/src/libflake/flake-settings.hh index 4f986aefd0d..a601e120c4f 100644 --- a/src/libflake/flake-settings.hh +++ b/src/libflake/flake-settings.hh @@ -17,12 +17,7 @@ struct FlakeSettings : public Config FlakeSettings(); Setting useRegistries{ - this, - true, - "use-registries", - "Whether to use flake registries to resolve flake references.", - {}, - true}; + this, true, "use-registries", "Whether to use flake registries to resolve flake references.", {}, true}; Setting acceptFlakeConfig{ this, From 50d7ce6c6a2a98d949aa0b2147c9ce9f22a9f2e6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 10 Jul 2024 16:49:46 +0200 Subject: [PATCH 014/361] Actually remove the "flakes" experimental feature To avoid annoying warnings, this is now a "stabilized" feature. --- src/libutil/config.cc | 4 +++- src/libutil/experimental-features.cc | 18 +++++++----------- src/libutil/experimental-features.hh | 3 ++- 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 8abf4bc2362..9946bed7896 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -341,8 +341,10 @@ template<> std::set BaseSetting res; for (auto & s : tokenizeString(str)) { - if (auto thisXpFeature = parseExperimentalFeature(s); thisXpFeature) + if (auto thisXpFeature = parseExperimentalFeature(s)) res.insert(thisXpFeature.value()); + else if (stabilizedFeatures.count(s)) + debug("experimental feature '%s' is now stable", s); else warn("unknown experimental feature '%s'", s); } diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc index 8ecf1e92b42..b54a0cdc5b1 100644 --- a/src/libutil/experimental-features.cc +++ b/src/libutil/experimental-features.cc @@ -70,16 +70,6 @@ constexpr std::array xpFeatureDetails )", .trackingUrl = "https://github.com/NixOS/nix/milestone/42", }, - { - .tag = Xp::Flakes, - .name = "flakes", - .description = R"( - *Enabled for Determinate Nix Installer users since 2.19* - - See the manual entry for [`nix flake`](@docroot@/command-ref/new-cli/nix3-flake.md) for details. - )", - .trackingUrl = "https://github.com/NixOS/nix/milestone/27", - }, { .tag = Xp::FetchTree, .name = "fetch-tree", @@ -302,12 +292,18 @@ constexpr std::array xpFeatureDetails static_assert( []() constexpr { for (auto [index, feature] : enumerate(xpFeatureDetails)) - if (index != (size_t)feature.tag) + if (index != (size_t) feature.tag) return false; return true; }(), "array order does not match enum tag order"); +/** + * A set of previously experimental features that are now considered + * stable. We don't warn if users have these in `experimental-features`. + */ +std::set stabilizedFeatures{"flakes"}; + const std::optional parseExperimentalFeature(const std::string_view & name) { using ReverseXpMap = std::map; diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh index 1da2a3ff55d..f195c232c90 100644 --- a/src/libutil/experimental-features.hh +++ b/src/libutil/experimental-features.hh @@ -19,7 +19,6 @@ enum struct ExperimentalFeature { CaDerivations, ImpureDerivations, - Flakes, FetchTree, NixCommand, GitHashing, @@ -38,6 +37,8 @@ enum struct ExperimentalFeature VerifiedFetches, }; +extern std::set stabilizedFeatures; + /** * Just because writing `ExperimentalFeature::CaDerivations` is way too long */ From e638d0022336da31b461ee6cfee6c85867759294 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 10 Jul 2024 17:01:37 +0200 Subject: [PATCH 015/361] Stabilize the "nix-command" feature --- doc/manual/generate-manpage.nix | 5 ----- src/libstore/build/derivation-goal.cc | 4 +--- src/libutil/args.cc | 2 +- src/libutil/experimental-features.cc | 11 +---------- src/libutil/experimental-features.hh | 1 - src/nix/main.cc | 3 --- tests/functional/config.sh | 4 ++-- tests/functional/config/nix-with-bang-include.conf | 2 +- tests/functional/experimental-features.sh | 9 --------- 9 files changed, 6 insertions(+), 35 deletions(-) diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix index ba5667a4305..89fec9d1cd6 100644 --- a/doc/manual/generate-manpage.nix +++ b/doc/manual/generate-manpage.nix @@ -36,11 +36,6 @@ let let result = '' - > **Warning** \ - > This program is - > [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command) - > and its interface is subject to change. - # Name `${command}` - ${details.description} diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 64b8495e1bb..886e63263f8 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -994,9 +994,7 @@ void DerivationGoal::buildDone() msg += line; msg += "\n"; } - auto nixLogCommand = experimentalFeatureSettings.isEnabled(Xp::NixCommand) - ? "nix log" - : "nix-store -l"; + auto nixLogCommand = "nix log"; msg += fmt("For full logs, run '" ANSI_BOLD "%s %s" ANSI_NORMAL "'.", nixLogCommand, worker.store.printStorePath(drvPath)); diff --git a/src/libutil/args.cc b/src/libutil/args.cc index c202facdfea..13208b70fe8 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -583,7 +583,7 @@ Strings argvToStrings(int argc, char * * argv) std::optional Command::experimentalFeature () { - return { Xp::NixCommand }; + return {}; } MultiCommand::MultiCommand(std::string_view commandName, const Commands & commands_) diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc index b54a0cdc5b1..c69f84685b2 100644 --- a/src/libutil/experimental-features.cc +++ b/src/libutil/experimental-features.cc @@ -86,15 +86,6 @@ constexpr std::array xpFeatureDetails )", .trackingUrl = "https://github.com/NixOS/nix/milestone/31", }, - { - .tag = Xp::NixCommand, - .name = "nix-command", - .description = R"( - Enable the new `nix` subcommands. See the manual on - [`nix`](@docroot@/command-ref/new-cli/nix.md) for details. - )", - .trackingUrl = "https://github.com/NixOS/nix/milestone/28", - }, { .tag = Xp::GitHashing, .name = "git-hashing", @@ -302,7 +293,7 @@ static_assert( * A set of previously experimental features that are now considered * stable. We don't warn if users have these in `experimental-features`. */ -std::set stabilizedFeatures{"flakes"}; +std::set stabilizedFeatures{"flakes", "nix-command"}; const std::optional parseExperimentalFeature(const std::string_view & name) { diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh index f195c232c90..dddd5329a65 100644 --- a/src/libutil/experimental-features.hh +++ b/src/libutil/experimental-features.hh @@ -20,7 +20,6 @@ enum struct ExperimentalFeature CaDerivations, ImpureDerivations, FetchTree, - NixCommand, GitHashing, RecursiveNix, NoUrlLiterals, diff --git a/src/nix/main.cc b/src/nix/main.cc index 85be80da452..92b0277d20b 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -120,7 +120,6 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs .description = "Print full build logs on standard error.", .category = loggingCategory, .handler = {[&]() { logger->setPrintBuildLogs(true); }}, - .experimentalFeature = Xp::NixCommand, }); addFlag({ @@ -136,7 +135,6 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs .description = "Disable substituters and consider all previously downloaded files up-to-date.", .category = miscCategory, .handler = {[&]() { useNet = false; }}, - .experimentalFeature = Xp::NixCommand, }); addFlag({ @@ -144,7 +142,6 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs .description = "Consider all previously downloaded files out-of-date.", .category = miscCategory, .handler = {[&]() { refresh = true; }}, - .experimentalFeature = Xp::NixCommand, }); } diff --git a/tests/functional/config.sh b/tests/functional/config.sh index ef4d507312c..a1016a3686a 100755 --- a/tests/functional/config.sh +++ b/tests/functional/config.sh @@ -54,8 +54,8 @@ var=$(nix config show | grep '^allowed-uris =' | cut -d '=' -f 2 | xargs) # Test that we can !include a file. export NIX_USER_CONF_FILES=$here/config/nix-with-bang-include.conf -var=$(nix config show | grep '^experimental-features =' | cut -d '=' -f 2 | xargs) -[[ $var == nix-command ]] +var=$(nix config show | grep '^fsync-metadata =' | cut -d '=' -f 2 | xargs) +[[ $var == true ]] # Test that it's possible to load config from the environment prev=$(nix config show | grep '^cores' | cut -d '=' -f 2 | xargs) diff --git a/tests/functional/config/nix-with-bang-include.conf b/tests/functional/config/nix-with-bang-include.conf index fa600e6ff19..033e854817b 100644 --- a/tests/functional/config/nix-with-bang-include.conf +++ b/tests/functional/config/nix-with-bang-include.conf @@ -1,2 +1,2 @@ -experimental-features = nix-command +fsync-metadata = true !include ./missing-extra-config.conf \ No newline at end of file diff --git a/tests/functional/experimental-features.sh b/tests/functional/experimental-features.sh index d6f7f9e5672..0533a7c04e9 100755 --- a/tests/functional/experimental-features.sh +++ b/tests/functional/experimental-features.sh @@ -79,12 +79,3 @@ nix --experimental-features '' --help 1>/dev/null nix --experimental-features '' doctor --help 1>/dev/null nix --experimental-features '' repl --help 1>/dev/null nix --experimental-features '' upgrade-nix --help 1>/dev/null - -# These 3 arguments are currently given to all commands, which is wrong (as not -# all care). To deal with fixing later, we simply make them require the -# nix-command experimental features --- it so happens that the commands we wish -# stabilizing to do not need them anyways. -for arg in '--print-build-logs' '--offline' '--refresh'; do - nix --experimental-features 'nix-command' "$arg" --help 1>/dev/null - expect 1 nix --experimental-features '' "$arg" --help 1>/dev/null -done From 98a6af2c97cba5a666afb93464c17b9b2f8c1bad Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 10 Jul 2024 17:09:34 +0200 Subject: [PATCH 016/361] doc/manual/src/contributing/hacking.md: Remove non-flake instructions --- doc/manual/src/contributing/hacking.md | 23 ++--------------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index fc2d7221706..451b38976d2 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -14,11 +14,6 @@ The following instructions assume you already have some version of Nix installed ## Building Nix with flakes -This section assumes you are using Nix with the experimental feature [`nix-command`] enabled. -See the [Building Nix](#building-nix) section for equivalent instructions using stable Nix interfaces. - -[`nix-command`]: @docroot@/contributing/experimental-features.md#xp-nix-command - To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found: ```console @@ -105,7 +100,7 @@ nix (Nix) 2.12 To build a release version of Nix for the current operating system and CPU architecture: ```console -$ nix-build +$ nix build ``` You can also build Nix for one of the [supported platforms](#platforms). @@ -155,12 +150,6 @@ platform. Common solutions include [remote build machines] and [binary format em Given such a setup, executing the build only requires selecting the respective attribute. For example, to compile for `aarch64-linux`: -```console -$ nix-build --attr packages.aarch64-linux.default -``` - -or for Nix with the [`flakes`] and [`nix-command`] experimental features enabled: - ```console $ nix build .#packages.aarch64-linux.default ``` @@ -242,20 +231,12 @@ To build with one of those environments, you can use $ nix build .#nix-ccacheStdenv ``` -for flake-enabled Nix, or - -```console -$ nix-build --attr nix-ccacheStdenv -``` - -for classic Nix. - You can use any of the other supported environments in place of `nix-ccacheStdenv`. ## Editor integration The `clangd` LSP server is installed by default on the `clang`-based `devShell`s. -See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#nix-with-flakes) or in [classic Nix](#classic-nix). +See [supported compilation environments](#compilation-environments) and instructions how to [set up a shell with flakes](#nix-with-flakes). To use the LSP with your editor, you first need to [set up `clangd`](https://clangd.llvm.org/installation#project-setup) by running: From 891a5b387e767d72c1679dfb0bc6a07d7eb89267 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 10 Jul 2024 17:18:09 +0200 Subject: [PATCH 017/361] Remove warning about nix-command --- src/nix/nix.md | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/nix/nix.md b/src/nix/nix.md index 2f59db3afb5..5ac00b94074 100644 --- a/src/nix/nix.md +++ b/src/nix/nix.md @@ -48,11 +48,6 @@ manual](https://nixos.org/manual/nix/stable/). # Installables -> **Warning** \ -> Installables are part of the unstable -> [`nix-command` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-nix-command), -> and subject to change without notice. - Many `nix` subcommands operate on one or more *installables*. These are command line arguments that represent something that can be realised in the Nix store. @@ -68,11 +63,6 @@ That is, Nix will operate on the default flake output attribute of the flake in ### Flake output attribute -> **Warning** \ -> Flake output attribute installables depend on the -> [`nix-command`](@docroot@/contributing/experimental-features.md#xp-feature-nix-command) -> experimental feature, and subject to change without notice. - Example: `nixpkgs#hello` These have the form *flakeref*[`#`*attrpath*], where *flakeref* is a From e0ce16173432cc08aa65fc12e89a8d459b9d501d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 10 Jul 2024 18:45:00 +0200 Subject: [PATCH 018/361] Remove experimental warnings --- doc/manual/src/protocols/json/derivation.md | 6 ------ doc/manual/src/protocols/json/store-object-info.md | 6 ------ 2 files changed, 12 deletions(-) diff --git a/doc/manual/src/protocols/json/derivation.md b/doc/manual/src/protocols/json/derivation.md index f881dd70381..6af7c0dfb1d 100644 --- a/doc/manual/src/protocols/json/derivation.md +++ b/doc/manual/src/protocols/json/derivation.md @@ -1,11 +1,5 @@ # Derivation JSON Format -> **Warning** -> -> This JSON format is currently -> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command) -> and subject to change. - The JSON serialization of a [derivations](@docroot@/glossary.md#gloss-store-derivation) is a JSON object with the following fields: diff --git a/doc/manual/src/protocols/json/store-object-info.md b/doc/manual/src/protocols/json/store-object-info.md index 9f647a96c24..fee415eefc5 100644 --- a/doc/manual/src/protocols/json/store-object-info.md +++ b/doc/manual/src/protocols/json/store-object-info.md @@ -1,11 +1,5 @@ # Store object info JSON format -> **Warning** -> -> This JSON format is currently -> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command) -> and subject to change. - Info about a [store object]. * `path`: From 1b52a3add1d79633c2feb0ff206c096617962053 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 10 Jul 2024 18:45:23 +0200 Subject: [PATCH 019/361] Remove references to nix-command --- doc/manual/local.mk | 4 ++-- scripts/installer.nix | 2 +- src/libutil/config.hh | 2 +- tests/functional/ca/derivation-json.sh | 2 +- tests/functional/ca/selfref-gc.sh | 2 +- tests/functional/common/init.sh | 3 +-- tests/functional/common/vars-and-functions.sh | 2 +- tests/functional/config.sh | 2 +- tests/functional/config/nix-with-include.conf | 1 - .../config/nix-with-substituters.conf | 1 - tests/functional/dyn-drv/eval-outputOf.sh | 6 ++--- .../functional/dyn-drv/recursive-mod-json.nix | 2 +- tests/functional/experimental-features.sh | 24 +++++++++---------- tests/functional/impure-derivations.sh | 2 +- tests/functional/recursive.nix | 2 +- tests/functional/recursive.sh | 2 +- tests/installer/default.nix | 2 +- tests/nixos/authorization.nix | 2 -- tests/nixos/containers/containers.nix | 2 +- tests/nixos/fetch-git/testsupport/setup.nix | 1 - tests/nixos/git-submodules.nix | 1 - tests/nixos/github-flakes.nix | 1 - tests/nixos/nix-copy.nix | 1 - tests/nixos/sourcehut-flakes.nix | 1 - tests/nixos/tarball-flakes.nix | 1 - tests/repl-completion.nix | 4 ++-- 26 files changed, 32 insertions(+), 43 deletions(-) diff --git a/doc/manual/local.mk b/doc/manual/local.mk index 0cec5288504..d4cba066bee 100644 --- a/doc/manual/local.mk +++ b/doc/manual/local.mk @@ -35,7 +35,7 @@ dummy-env = env -i \ NIX_STATE_DIR=/dummy \ NIX_CONFIG='cores = 0' -nix-eval = $(dummy-env) $(doc_nix) eval --experimental-features nix-command -I nix=doc/manual --store dummy:// --impure --raw +nix-eval = $(dummy-env) $(doc_nix) eval -I nix=doc/manual --store dummy:// --impure --raw # re-implement mdBook's include directive to make it usable for terminal output and for proper @docroot@ substitution define process-includes @@ -121,7 +121,7 @@ $(d)/nix.json: $(doc_nix) @mv $@.tmp $@ $(d)/conf-file.json: $(doc_nix) - $(trace-gen) $(dummy-env) $(doc_nix) config show --json --experimental-features nix-command > $@.tmp + $(trace-gen) $(dummy-env) $(doc_nix) config show --json > $@.tmp @mv $@.tmp $@ $(d)/src/contributing/experimental-feature-descriptions.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features.nix $(doc_nix) diff --git a/scripts/installer.nix b/scripts/installer.nix index cc7759c2c8e..3d51d4916a3 100644 --- a/scripts/installer.nix +++ b/scripts/installer.nix @@ -25,7 +25,7 @@ runCommand "installer-script" { (tarball: let inherit (tarball.stdenv.hostPlatform) system; in '' \ - --replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \ + --replace '@tarballHash_${system}@' $(nix hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \ --replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \ '' ) diff --git a/src/libutil/config.hh b/src/libutil/config.hh index a30d1b1ec64..f1c7233baab 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -386,7 +386,7 @@ struct ExperimentalFeatureSettings : Config { Example: ``` - experimental-features = nix-command + experimental-features = ca-derivations ``` The following experimental features are available: diff --git a/tests/functional/ca/derivation-json.sh b/tests/functional/ca/derivation-json.sh index 1e2a8fe35f6..97f1657320a 100644 --- a/tests/functional/ca/derivation-json.sh +++ b/tests/functional/ca/derivation-json.sh @@ -19,7 +19,7 @@ drvPath3=$(nix derivation add --dry-run < "$TEST_HOME"/foo.json) [[ ! -e "$drvPath3" ]] # But the JSON is rejected without the experimental feature -expectStderr 1 nix derivation add < "$TEST_HOME"/foo.json --experimental-features nix-command | grepQuiet "experimental Nix feature 'ca-derivations' is disabled" +expectStderr 1 nix derivation add < "$TEST_HOME"/foo.json --experimental-features '' | grepQuiet "experimental Nix feature 'ca-derivations' is disabled" # Without --dry-run it is actually written drvPath4=$(nix derivation add < "$TEST_HOME"/foo.json) diff --git a/tests/functional/ca/selfref-gc.sh b/tests/functional/ca/selfref-gc.sh index 588515db521..a730bdab694 100755 --- a/tests/functional/ca/selfref-gc.sh +++ b/tests/functional/ca/selfref-gc.sh @@ -4,7 +4,7 @@ source common.sh requireDaemonNewerThan "2.4pre20210626" -enableFeatures "ca-derivations nix-command" +enableFeatures "ca-derivations" export NIX_TESTS_CA_BY_DEFAULT=1 cd .. diff --git a/tests/functional/common/init.sh b/tests/functional/common/init.sh index 482d62cc4cd..38b29d12297 100755 --- a/tests/functional/common/init.sh +++ b/tests/functional/common/init.sh @@ -12,7 +12,6 @@ if isTestOnNixOS; then ! test -e "$test_nix_conf" cat > "$test_nix_conf_dir/nix.conf" < "$NIX_CONF_DIR"/nix.conf <"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr [[ $(cat "$TEST_ROOT/stdout") = '' ]] @@ -43,14 +43,14 @@ grepQuiet "error: could not find setting '$gatedSetting'" "$TEST_ROOT/stderr" # Experimental feature is disabled after, ignore and warn. NIX_CONFIG=" $gatedSetting = true - experimental-features = nix-command + experimental-features = " expect 1 nix config show $gatedSetting 1>"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr [[ $(cat "$TEST_ROOT/stdout") = '' ]] grepQuiet "error: could not find setting '$gatedSetting'" "$TEST_ROOT/stderr" # Experimental feature is enabled before, process. NIX_CONFIG=" - experimental-features = nix-command $xpFeature + experimental-features = $xpFeature $gatedSetting = true " nix config show $gatedSetting 1>"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr grepQuiet "true" "$TEST_ROOT/stdout" @@ -58,18 +58,18 @@ grepQuiet "true" "$TEST_ROOT/stdout" # Experimental feature is enabled after, process. NIX_CONFIG=" $gatedSetting = true - experimental-features = nix-command $xpFeature + experimental-features = $xpFeature " nix config show $gatedSetting 1>"$TEST_ROOT"/stdout 2>"$TEST_ROOT"/stderr grepQuiet "true" "$TEST_ROOT/stdout" grepQuietInverse "Ignoring setting '$gatedSetting'" "$TEST_ROOT/stderr" function exit_code_both_ways { - expect 1 nix --experimental-features 'nix-command ' "$@" 1>/dev/null - nix --experimental-features "nix-command $xpFeature" "$@" 1>/dev/null + expect 1 nix --experimental-features '' "$@" 1>/dev/null + nix --experimental-features "$xpFeature" "$@" 1>/dev/null # Also, the order should not matter - expect 1 nix "$@" --experimental-features 'nix-command' 1>/dev/null - nix "$@" --experimental-features "nix-command $xpFeature" 1>/dev/null + expect 1 nix "$@" --experimental-features '' 1>/dev/null + nix "$@" --experimental-features "$xpFeature" 1>/dev/null } exit_code_both_ways config show --auto-allocate-uids diff --git a/tests/functional/impure-derivations.sh b/tests/functional/impure-derivations.sh index 5dea220fec7..69884c2932e 100755 --- a/tests/functional/impure-derivations.sh +++ b/tests/functional/impure-derivations.sh @@ -21,7 +21,7 @@ drvPath2=$(nix derivation add < $TEST_HOME/impure-drv.json) [[ "$drvPath" = "$drvPath2" ]] # But only with the experimental feature! -expectStderr 1 nix derivation add < $TEST_HOME/impure-drv.json --experimental-features nix-command | grepQuiet "experimental Nix feature 'impure-derivations' is disabled" +expectStderr 1 nix derivation add < $TEST_HOME/impure-drv.json --experimental-features '' | grepQuiet "experimental Nix feature 'impure-derivations' is disabled" nix build --dry-run --json --file ./impure-derivations.nix impure.all json=$(nix build -L --no-link --json --file ./impure-derivations.nix impure.all) diff --git a/tests/functional/recursive.nix b/tests/functional/recursive.nix index fa8cc04db2b..622049dca3e 100644 --- a/tests/functional/recursive.nix +++ b/tests/functional/recursive.nix @@ -14,7 +14,7 @@ mkDerivation rec { buildCommand = '' mkdir $out - opts="--experimental-features nix-command ${if (NIX_TESTS_CA_BY_DEFAULT == "1") then "--extra-experimental-features ca-derivations" else ""}" + opts="${if (NIX_TESTS_CA_BY_DEFAULT == "1") then "--extra-experimental-features ca-derivations" else ""}" PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH diff --git a/tests/functional/recursive.sh b/tests/functional/recursive.sh index 640fb92d2c5..fb0aa69752e 100755 --- a/tests/functional/recursive.sh +++ b/tests/functional/recursive.sh @@ -13,7 +13,7 @@ rm -f $TEST_ROOT/result export unreachable=$(nix store add-path ./recursive.sh) -NIX_BIN_DIR=$(dirname $(type -p nix)) nix --extra-experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L --impure --file ./recursive.nix +NIX_BIN_DIR=$(dirname $(type -p nix)) nix --extra-experimental-features 'recursive-nix' build -o $TEST_ROOT/result -L --impure --file ./recursive.nix [[ $(cat $TEST_ROOT/result/inner1) =~ blaat ]] diff --git a/tests/installer/default.nix b/tests/installer/default.nix index 4aed6eae489..3b75c5e0f4c 100644 --- a/tests/installer/default.nix +++ b/tests/installer/default.nix @@ -224,7 +224,7 @@ let source /etc/bashrc || true nix-env --version - nix --extra-experimental-features nix-command store info + nix store info out=\$(nix-build --no-substitute -E 'derivation { name = "foo"; system = "x86_64-linux"; builder = "/bin/sh"; args = ["-c" "echo foobar > \$out"]; }') [[ \$(cat \$out) = foobar ]] diff --git a/tests/nixos/authorization.nix b/tests/nixos/authorization.nix index fdeae06ed34..d80069e77d9 100644 --- a/tests/nixos/authorization.nix +++ b/tests/nixos/authorization.nix @@ -10,8 +10,6 @@ users.users.alice.isNormalUser = true; users.users.bob.isNormalUser = true; users.users.mallory.isNormalUser = true; - - nix.settings.experimental-features = "nix-command"; }; testScript = diff --git a/tests/nixos/containers/containers.nix b/tests/nixos/containers/containers.nix index 6773f5628a3..188012c9ba6 100644 --- a/tests/nixos/containers/containers.nix +++ b/tests/nixos/containers/containers.nix @@ -18,7 +18,7 @@ nix.settings.substituters = lib.mkForce [ ]; nix.extraOptions = '' - extra-experimental-features = nix-command auto-allocate-uids cgroups + extra-experimental-features = auto-allocate-uids cgroups extra-system-features = uid-range ''; nix.nixPath = [ "nixpkgs=${nixpkgs}" ]; diff --git a/tests/nixos/fetch-git/testsupport/setup.nix b/tests/nixos/fetch-git/testsupport/setup.nix index a81d5614b44..08195daa51d 100644 --- a/tests/nixos/fetch-git/testsupport/setup.nix +++ b/tests/nixos/fetch-git/testsupport/setup.nix @@ -74,7 +74,6 @@ in environment.variables = { _NIX_FORCE_HTTP = "1"; }; - nix.settings.experimental-features = ["nix-command" "flakes"]; }; setupScript = '' ''; diff --git a/tests/nixos/git-submodules.nix b/tests/nixos/git-submodules.nix index 570b1822bf6..6bcb75b5ed2 100644 --- a/tests/nixos/git-submodules.nix +++ b/tests/nixos/git-submodules.nix @@ -20,7 +20,6 @@ { programs.ssh.extraConfig = "ConnectTimeout 30"; environment.systemPackages = [ pkgs.git ]; - nix.extraOptions = "experimental-features = nix-command flakes"; }; }; diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix index 9a1ed749ce8..37ffa2b7085 100644 --- a/tests/nixos/github-flakes.nix +++ b/tests/nixos/github-flakes.nix @@ -143,7 +143,6 @@ in virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ]; virtualisation.memorySize = 4096; nix.settings.substituters = lib.mkForce [ ]; - nix.extraOptions = "experimental-features = nix-command"; networking.hosts.${(builtins.head nodes.github.networking.interfaces.eth1.ipv4.addresses).address} = [ "channels.nixos.org" "api.github.com" "github.com" ]; security.pki.certificateFiles = [ "${cert}/ca.crt" ]; diff --git a/tests/nixos/nix-copy.nix b/tests/nixos/nix-copy.nix index 7db5197aa8c..cd0cca63b45 100644 --- a/tests/nixos/nix-copy.nix +++ b/tests/nixos/nix-copy.nix @@ -23,7 +23,6 @@ in { { virtualisation.writableStore = true; virtualisation.additionalPaths = [ pkgA pkgD.drvPath ]; nix.settings.substituters = lib.mkForce [ ]; - nix.settings.experimental-features = [ "nix-command" ]; services.getty.autologinUser = "root"; programs.ssh.extraConfig = '' Host * diff --git a/tests/nixos/sourcehut-flakes.nix b/tests/nixos/sourcehut-flakes.nix index 4eeab42db4d..a1422ab964e 100644 --- a/tests/nixos/sourcehut-flakes.nix +++ b/tests/nixos/sourcehut-flakes.nix @@ -104,7 +104,6 @@ in virtualisation.memorySize = 4096; nix.settings.substituters = lib.mkForce [ ]; nix.extraOptions = '' - experimental-features = nix-command flake-registry = https://git.sr.ht/~NixOS/flake-registry/blob/master/flake-registry.json ''; environment.systemPackages = [ pkgs.jq ]; diff --git a/tests/nixos/tarball-flakes.nix b/tests/nixos/tarball-flakes.nix index 2a21d873880..e0327cac579 100644 --- a/tests/nixos/tarball-flakes.nix +++ b/tests/nixos/tarball-flakes.nix @@ -51,7 +51,6 @@ in virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ]; virtualisation.memorySize = 4096; nix.settings.substituters = lib.mkForce [ ]; - nix.extraOptions = "experimental-features = nix-command"; }; }; diff --git a/tests/repl-completion.nix b/tests/repl-completion.nix index 3ba198a9860..5cca0fc1cfa 100644 --- a/tests/repl-completion.nix +++ b/tests/repl-completion.nix @@ -10,7 +10,7 @@ runCommand "repl-completion" { ]; expectScript = '' # Regression https://github.com/NixOS/nix/pull/10778 - spawn nix repl --offline --extra-experimental-features nix-command + spawn nix repl --offline expect "nix-repl>" send "foo = import ./does-not-exist.nix\n" expect "nix-repl>" @@ -37,4 +37,4 @@ runCommand "repl-completion" { nix-store --init expect $expectScriptPath touch $out -'' \ No newline at end of file +'' From a00efcb36c1e76cf5c8defbaa75d1c2d97e08a6b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 Jul 2024 09:52:40 +0200 Subject: [PATCH 020/361] Fix daemon test --- tests/functional/common/vars-and-functions.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/functional/common/vars-and-functions.sh b/tests/functional/common/vars-and-functions.sh index a1f51dc88b2..e21a7ff0a05 100644 --- a/tests/functional/common/vars-and-functions.sh +++ b/tests/functional/common/vars-and-functions.sh @@ -123,7 +123,8 @@ startDaemon() { fi # Start the daemon, wait for the socket to appear. rm -f $NIX_DAEMON_SOCKET_PATH - PATH=$DAEMON_PATH nix daemon & + # TODO: remove the nix-command feature when we're no longer testing against old daemons. + PATH=$DAEMON_PATH nix daemon --extra-experimental-features nix-command & _NIX_TEST_DAEMON_PID=$! export _NIX_TEST_DAEMON_PID for ((i = 0; i < 300; i++)); do From 6ad333aeee5d0d594ef9b9119bdd38583eec1005 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 Jul 2024 11:13:55 +0200 Subject: [PATCH 021/361] GitHub CI: Use a bigger builder for x86_64-linux Also, don't use a matrix for the 'tests' workflow, since we don't want the 'vm_tests' and 'flake_regressions' workflows to depend on aarch64-darwin. --- .github/workflows/ci.yml | 27 +++++++++++---------------- .github/workflows/test.yml | 21 +++++++++++++++++++++ 2 files changed, 32 insertions(+), 16 deletions(-) create mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d8c5439bbe7..6b184288bc8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,23 +10,18 @@ permissions: jobs: - tests: - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, macos-latest] - runs-on: ${{ matrix.os }} - timeout-minutes: 60 - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - with: - flakehub: true - - uses: DeterminateSystems/magic-nix-cache-action@main - - run: nix flake check -L + test_x86_64-linux: + uses: ./.github/workflows/test.yml + with: + os: UbuntuLatest32Cores128G + + test_aarch64-darwin: + uses: ./.github/workflows/test.yml + with: + os: macos-latest vm_tests: - needs: tests + needs: test_x86_64-linux runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 @@ -37,7 +32,7 @@ jobs: - run: nix build -L .#hydraJobs.tests.githubFlakes .#hydraJobs.tests.tarballFlakes flake_regressions: - needs: tests + needs: test_x86_64-linux runs-on: ubuntu-22.04 steps: - name: Checkout nix diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000000..14e4c5fa58d --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,21 @@ +on: + workflow_call: + inputs: + os: + required: true + type: string + +jobs: + + tests: + strategy: + fail-fast: false + runs-on: ${{ inputs.os }} + timeout-minutes: 60 + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - uses: DeterminateSystems/magic-nix-cache-action@main + - run: nix flake check -L From 83173fef17f84ca5a397ced74921ec95a196a50d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 Jul 2024 11:19:08 +0200 Subject: [PATCH 022/361] Avoid superfluous duplicate jobs on PRs --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6b184288bc8..811c1c52423 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,6 +3,10 @@ name: "CI" on: pull_request: push: + branches: + - detsys-main + - main + - master permissions: id-token: "write" From 13e60dd649a5556de16418ffdfee247e27169364 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 Jul 2024 13:24:09 +0200 Subject: [PATCH 023/361] Build aarch64-linux --- .github/workflows/ci.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 811c1c52423..a3344f207c3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,6 +19,11 @@ jobs: with: os: UbuntuLatest32Cores128G + test_aarch64-linux: + uses: ./.github/workflows/test.yml + with: + os: UbuntuLatest32Cores128GArm + test_aarch64-darwin: uses: ./.github/workflows/test.yml with: From a4d1dfbab64cc83e029e4c34c5d2de97846bf474 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 Jul 2024 13:25:39 +0200 Subject: [PATCH 024/361] Split building and testing to improve parallelism --- .github/workflows/build.yml | 21 +++++++++++++++++++++ .github/workflows/ci.yml | 22 ++++++++++++++++++++-- 2 files changed, 41 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000000..1cb645063e1 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,21 @@ +on: + workflow_call: + inputs: + os: + required: true + type: string + +jobs: + + tests: + strategy: + fail-fast: false + runs-on: ${{ inputs.os }} + timeout-minutes: 60 + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - uses: DeterminateSystems/magic-nix-cache-action@main + - run: nix build diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a3344f207c3..fd89614f703 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,23 +14,41 @@ permissions: jobs: + build_x86_64-linux: + uses: ./.github/workflows/build.yml + with: + os: UbuntuLatest32Cores128G + + build_aarch64-linux: + uses: ./.github/workflows/build.yml + with: + os: UbuntuLatest32Cores128GArm + + build_aarch64-darwin: + uses: ./.github/workflows/build.yml + with: + os: macos-latest + test_x86_64-linux: uses: ./.github/workflows/test.yml + needs: build_x86_64-linux with: os: UbuntuLatest32Cores128G test_aarch64-linux: uses: ./.github/workflows/test.yml + needs: build_aarch64-linux with: os: UbuntuLatest32Cores128GArm test_aarch64-darwin: uses: ./.github/workflows/test.yml + needs: build_aarch64-darwin with: os: macos-latest vm_tests: - needs: test_x86_64-linux + needs: build_x86_64-linux runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 @@ -41,7 +59,7 @@ jobs: - run: nix build -L .#hydraJobs.tests.githubFlakes .#hydraJobs.tests.tarballFlakes flake_regressions: - needs: test_x86_64-linux + needs: build_x86_64-linux runs-on: ubuntu-22.04 steps: - name: Checkout nix From f862424423d786fe9ade91f2319fe6f24ca50648 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 Jul 2024 13:39:24 +0200 Subject: [PATCH 025/361] Fix job name --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1cb645063e1..ef7174c3090 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -7,7 +7,7 @@ on: jobs: - tests: + build: strategy: fail-fast: false runs-on: ${{ inputs.os }} From 6406619c441c35ba323212a234e8923f2a2087da Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 Jul 2024 16:49:49 +0200 Subject: [PATCH 026/361] Flake schemas This applies upstream https://github.com/NixOS/nix/pull/8892. --- Makefile.config.in | 1 + configure.ac | 6 + doc/manual/src/SUMMARY.md.in | 1 + doc/manual/src/protocols/flake-schemas.md | 60 ++ flake.lock | 16 + flake.nix | 6 +- package.nix | 3 + packaging/dependencies.nix | 2 + packaging/hydra.nix | 2 + src/libcmd/installable-flake.cc | 14 - src/libcmd/installable-flake.hh | 2 - src/libcmd/installables.cc | 5 - src/libexpr/eval-cache.cc | 6 + src/libexpr/eval-cache.hh | 7 + src/libflake/flake/flake.cc | 34 +- src/libflake/flake/flake.hh | 18 + src/nix/call-flake-schemas.nix | 43 ++ src/nix/flake-check.md | 58 +- src/nix/flake-schemas.cc | 221 ++++++ src/nix/flake-schemas.hh | 45 ++ src/nix/flake.cc | 885 +++++----------------- src/nix/local.mk | 6 + tests/functional/flakes/check.sh | 11 - tests/functional/flakes/show.sh | 43 +- tests/functional/fmt.sh | 4 +- 25 files changed, 691 insertions(+), 808 deletions(-) create mode 100644 doc/manual/src/protocols/flake-schemas.md create mode 100644 src/nix/call-flake-schemas.nix create mode 100644 src/nix/flake-schemas.cc create mode 100644 src/nix/flake-schemas.hh diff --git a/Makefile.config.in b/Makefile.config.in index 3100d207365..2ed716b5e7b 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -37,6 +37,7 @@ checkbindir = @checkbindir@ checklibdir = @checklibdir@ datadir = @datadir@ datarootdir = @datarootdir@ +default_flake_schemas = @default_flake_schemas@ docdir = @docdir@ embedded_sandbox_shell = @embedded_sandbox_shell@ exec_prefix = @exec_prefix@ diff --git a/configure.ac b/configure.ac index 4f66a3efcf6..caeb88b678d 100644 --- a/configure.ac +++ b/configure.ac @@ -435,6 +435,12 @@ if test "$embedded_sandbox_shell" = yes; then AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.]) fi + +AC_ARG_WITH(default-flake-schemas, AS_HELP_STRING([--with-default-flake-schemas=PATH],[path of the default flake schemas flake]), + default_flake_schemas=$withval, + [AC_MSG_FAILURE([--with-default-flake-schemas is missing])]) +AC_SUBST(default_flake_schemas) + ]) diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index a6a2101e9af..56e0dbeec66 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -113,6 +113,7 @@ - [Store Path Specification](protocols/store-path.md) - [Nix Archive (NAR) Format](protocols/nix-archive.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) + - [Flake Schemas](protocols/flake-schemas.md) - [C API](c-api.md) - [Glossary](glossary.md) - [Contributing](contributing/index.md) diff --git a/doc/manual/src/protocols/flake-schemas.md b/doc/manual/src/protocols/flake-schemas.md new file mode 100644 index 00000000000..f6cdd6165b8 --- /dev/null +++ b/doc/manual/src/protocols/flake-schemas.md @@ -0,0 +1,60 @@ +# Flake Schemas + +Flake schemas are a mechanism to allow tools like `nix flake show` and `nix flake check` to enumerate and check the contents of a flake +in a generic way, without requiring built-in knowledge of specific flake output types like `packages` or `nixosConfigurations`. + +A flake can define schemas for its outputs by defining a `schemas` output. `schemas` should be an attribute set with an attribute for +every output type that you want to be supported. If a flake does not have a `schemas` attribute, Nix uses a built-in set of schemas (namely https://github.com/DeterminateSystems/flake-schemas). + +A schema is an attribute set with the following attributes: + +* `version`: Should be set to 1. +* `doc`: A string containing documentation about the flake output type in Markdown format. +* `allowIFD` (defaults to `true`): Whether the evaluation of the output attributes of this flake can read from derivation outputs. +* `inventory`: A function that returns the contents of the flake output (described below). + +# Inventory + +The `inventory` function returns a *node* describing the contents of the flake output. A node is either a *leaf node* or a *non-leaf node*. This allows nested flake output attributes to be described (e.g. `x86_64-linux.hello` inside a `packages` output). + +Non-leaf nodes must have the following attribute: + +* `children`: An attribute set of nodes. If this attribute is missing, the attribute if a leaf node. + +Leaf nodes can have the following attributes: + +* `derivation`: The main derivation of this node, if any. It must evaluate for `nix flake check` and `nix flake show` to succeed. + +* `evalChecks`: An attribute set of Boolean values, used by `nix flake check`. Each attribute must evaluate to `true`. + +* `isFlakeCheck`: Whether `nix flake check` should build the `derivation` attribute of this node. + +* `shortDescription`: A one-sentence description of the node (such as the `meta.description` attribute in Nixpkgs). + +* `what`: A brief human-readable string describing the type of the node, e.g. `"package"` or `"development environment"`. This is used by tools like `nix flake show` to describe the contents of a flake. + +Both leaf and non-leaf nodes can have the following attributes: + +* `forSystems`: A list of Nix system types (e.g. `["x86_64-linux"]`) supported by this node. This is used by tools to skip nodes that cannot be built on the user's system. Setting this on a non-leaf node allows all the children to be skipped, regardless of the `forSystems` attributes of the children. If this attribute is not set, the node is never skipped. + +# Example + +Here is a schema that checks that every element of the `nixosConfigurations` flake output evaluates and builds correctly (meaning that it has a `config.system.build.toplevel` attribute that yields a buildable derivation). + +```nix +outputs = { + schemas.nixosConfigurations = { + version = 1; + doc = '' + The `nixosConfigurations` flake output defines NixOS system configurations. + ''; + inventory = output: { + children = builtins.mapAttrs (configName: machine: + { + what = "NixOS configuration"; + derivation = machine.config.system.build.toplevel; + }) output; + }; + }; +}; +``` diff --git a/flake.lock b/flake.lock index f64e3ea3712..8ea495401db 100644 --- a/flake.lock +++ b/flake.lock @@ -36,6 +36,21 @@ "type": "github" } }, + "flake-schemas": { + "locked": { + "lastModified": 1719857163, + "narHash": "sha256-wM+8JtoKBkahHiKn+EM1ikurMnitwRQrZ91hipJIJK8=", + "owner": "DeterminateSystems", + "repo": "flake-schemas", + "rev": "61a02d7183d4241962025e6c6307a22a0bb72a21", + "type": "github" + }, + "original": { + "owner": "DeterminateSystems", + "repo": "flake-schemas", + "type": "github" + } + }, "flake-utils": { "locked": { "lastModified": 1667395993, @@ -145,6 +160,7 @@ "inputs": { "flake-compat": "flake-compat", "flake-parts": "flake-parts", + "flake-schemas": "flake-schemas", "libgit2": "libgit2", "nixpkgs": "nixpkgs", "nixpkgs-23-11": "nixpkgs-23-11", diff --git a/flake.nix b/flake.nix index d83c2ecad36..256ff66cda7 100644 --- a/flake.nix +++ b/flake.nix @@ -8,6 +8,7 @@ inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446"; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; }; + inputs.flake-schemas.url = "github:DeterminateSystems/flake-schemas"; # dev tooling inputs.flake-parts.url = "github:hercules-ci/flake-parts"; @@ -20,8 +21,7 @@ inputs.pre-commit-hooks.inputs.flake-compat.follows = ""; inputs.pre-commit-hooks.inputs.gitignore.follows = ""; - outputs = inputs@{ self, nixpkgs, nixpkgs-regression, libgit2, ... }: - + outputs = inputs@{ self, nixpkgs, nixpkgs-regression, libgit2, flake-schemas, ... }: let inherit (nixpkgs) lib; @@ -157,6 +157,8 @@ }; in { + schemas = flake-schemas.schemas; + # A Nixpkgs overlay that overrides the 'nix' and # 'nix-perl-bindings' packages. overlays.default = overlayFor (p: p.stdenv); diff --git a/package.nix b/package.nix index c3e565399e8..99ffd5e4062 100644 --- a/package.nix +++ b/package.nix @@ -38,6 +38,8 @@ , busybox-sandbox-shell ? null +, flake-schemas + # Configuration Options #: # This probably seems like too many degrees of freedom, but it @@ -260,6 +262,7 @@ in { (lib.enableFeature enableMarkdown "markdown") (lib.enableFeature installUnitTests "install-unit-tests") (lib.withFeatureAs true "readline-flavor" readlineFlavor) + "--with-default-flake-schemas=${flake-schemas}" ] ++ lib.optionals (!forDevShell) [ "--sysconfdir=/etc" ] ++ lib.optionals installUnitTests [ diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 34b3449718d..4f7a6daabce 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -114,4 +114,6 @@ scope: { inherit resolvePath filesetToSource; mkMesonDerivation = f: stdenv.mkDerivation (lib.extends localSourceLayer f); + + inherit (inputs) flake-schemas; } diff --git a/packaging/hydra.nix b/packaging/hydra.nix index 4dfaf9bbfaa..d563402318a 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -28,6 +28,8 @@ let test-daemon = daemon; doBuild = false; + + inherit (inputs) flake-schemas; }; # Technically we could just return `pkgs.nixComponents`, but for Hydra it's diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index d42fa7aaccc..899919550e6 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -43,20 +43,6 @@ std::vector InstallableFlake::getActualAttrPaths() return res; } -Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake) -{ - auto vFlake = state.allocValue(); - - callFlake(state, lockedFlake, *vFlake); - - auto aOutputs = vFlake->attrs()->get(state.symbols.create("outputs")); - assert(aOutputs); - - state.forceValue(*aOutputs->value, aOutputs->value->determinePos(noPos)); - - return aOutputs->value; -} - static std::string showAttrPaths(const std::vector & paths) { std::string s; diff --git a/src/libcmd/installable-flake.hh b/src/libcmd/installable-flake.hh index 314918c140d..30240a35ae3 100644 --- a/src/libcmd/installable-flake.hh +++ b/src/libcmd/installable-flake.hh @@ -52,8 +52,6 @@ struct InstallableFlake : InstallableValue std::vector getActualAttrPaths(); - Value * getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake); - DerivedPathsWithInfo toDerivedPaths() override; std::pair toValue(EvalState & state) override; diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index eb7048d3930..d10df2e54b5 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -444,11 +444,6 @@ ref openEvalCache( : std::nullopt; auto rootLoader = [&state, lockedFlake]() { - /* For testing whether the evaluation cache is - complete. */ - if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0") - throw Error("not everything is cached, but evaluation is not allowed"); - auto vFlake = state.allocValue(); flake::callFlake(state, *lockedFlake, *vFlake); diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 2630c34d563..d43577cfd9a 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -368,6 +368,12 @@ Value * EvalCache::getRootValue() { if (!value) { debug("getting root value"); + + /* For testing whether the evaluation cache is + complete. */ + if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0") + throw Error("not everything is cached, but evaluation is not allowed"); + value = allocRootValue(rootLoader()); } return *value; diff --git a/src/libexpr/eval-cache.hh b/src/libexpr/eval-cache.hh index b1911e3a4f7..a6c8ad011c8 100644 --- a/src/libexpr/eval-cache.hh +++ b/src/libexpr/eval-cache.hh @@ -34,7 +34,11 @@ class EvalCache : public std::enable_shared_from_this friend struct CachedEvalError; std::shared_ptr db; + +public: EvalState & state; + +private: typedef std::function RootLoader; RootLoader rootLoader; RootValue value; @@ -89,7 +93,10 @@ class AttrCursor : public std::enable_shared_from_this friend class EvalCache; friend struct CachedEvalError; +public: ref root; + +private: typedef std::optional, Symbol>> Parent; Parent parent; RootValue _value; diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index 21acb93eee2..c69c4d66e38 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -204,7 +204,7 @@ static std::map parseFlakeInputs( return inputs; } -static Flake readFlake( +Flake readFlake( EvalState & state, const FlakeRef & originalRef, const FlakeRef & resolvedRef, @@ -336,19 +336,15 @@ static LockFile readLockFile(const SourcePath & lockFilePath) : LockFile(); } -/* Compute an in-memory lock file for the specified top-level flake, - and optionally write it to file, if the flake is writable. */ LockedFlake lockFlake( EvalState & state, const FlakeRef & topRef, - const LockFlags & lockFlags) + const LockFlags & lockFlags, + Flake flake, + FlakeCache & flakeCache) { - FlakeCache flakeCache; - auto useRegistries = lockFlags.useRegistries.value_or(flakeSettings.useRegistries); - auto flake = getFlake(state, topRef, useRegistries, flakeCache); - if (lockFlags.applyNixConfig) { flake.config.apply(); state.store->setOptions(); @@ -738,6 +734,28 @@ LockedFlake lockFlake( } } +LockedFlake lockFlake( + EvalState & state, + const FlakeRef & topRef, + const LockFlags & lockFlags) +{ + FlakeCache flakeCache; + + auto useRegistries = lockFlags.useRegistries.value_or(flakeSettings.useRegistries); + + return lockFlake(state, topRef, lockFlags, getFlake(state, topRef, useRegistries, flakeCache), flakeCache); +} + +LockedFlake lockFlake( + EvalState & state, + const FlakeRef & topRef, + const LockFlags & lockFlags, + Flake flake) +{ + FlakeCache flakeCache; + return lockFlake(state, topRef, lockFlags, std::move(flake), flakeCache); +} + void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & vRes) diff --git a/src/libflake/flake/flake.hh b/src/libflake/flake/flake.hh index 1ba085f0f46..2ac12b59047 100644 --- a/src/libflake/flake/flake.hh +++ b/src/libflake/flake/flake.hh @@ -193,11 +193,29 @@ struct LockFlags std::set inputUpdates; }; +Flake readFlake( + EvalState & state, + const FlakeRef & originalRef, + const FlakeRef & resolvedRef, + const FlakeRef & lockedRef, + const SourcePath & rootDir, + const InputPath & lockRootPath); + +/** + * Compute an in-memory lock file for the specified top-level flake, + * and optionally write it to file, if the flake is writable. + */ LockedFlake lockFlake( EvalState & state, const FlakeRef & flakeRef, const LockFlags & lockFlags); +LockedFlake lockFlake( + EvalState & state, + const FlakeRef & topRef, + const LockFlags & lockFlags, + Flake flake); + void callFlake( EvalState & state, const LockedFlake & lockedFlake, diff --git a/src/nix/call-flake-schemas.nix b/src/nix/call-flake-schemas.nix new file mode 100644 index 00000000000..cd6d4c3ae53 --- /dev/null +++ b/src/nix/call-flake-schemas.nix @@ -0,0 +1,43 @@ +/* The flake providing default schemas. */ +defaultSchemasFlake: + +/* The flake whose contents we want to extract. */ +flake: + +let + + # Helper functions. + + mapAttrsToList = f: attrs: map (name: f name attrs.${name}) (builtins.attrNames attrs); + +in + +rec { + outputNames = builtins.attrNames flake.outputs; + + allSchemas = (flake.outputs.schemas or defaultSchemasFlake.schemas) // schemaOverrides; + + schemaOverrides = {}; # FIXME + + schemas = + builtins.listToAttrs (builtins.concatLists (mapAttrsToList + (outputName: output: + if allSchemas ? ${outputName} then + [{ name = outputName; value = allSchemas.${outputName}; }] + else + [ ]) + flake.outputs)); + + inventory = + builtins.mapAttrs + (outputName: output: + if schemas ? ${outputName} && schemas.${outputName}.version == 1 + then + { output = schemas.${outputName}.inventory output; + inherit (schemas.${outputName}) doc; + } + else + { unknown = true; } + ) + flake.outputs; +} diff --git a/src/nix/flake-check.md b/src/nix/flake-check.md index c8307f8d85b..71dd916407e 100644 --- a/src/nix/flake-check.md +++ b/src/nix/flake-check.md @@ -18,56 +18,20 @@ R""( # Description This command verifies that the flake specified by flake reference -*flake-url* can be evaluated successfully (as detailed below), and -that the derivations specified by the flake's `checks` output can be -built successfully. +*flake-url* can be evaluated and built successfully according to its +`schemas` flake output. For every flake output that has a schema +definition, `nix flake check` uses the schema to extract the contents +of the output. Then, for every item in the contents: + +* It evaluates the elements of the `evalChecks` attribute set returned + by the schema for that item, printing an error or warning for every + check that fails to evaluate or that evaluates to `false`. + +* It builds `derivation` attribute returned by the schema for that + item, if the item has the `isFlakeCheck` attribute. If the `keep-going` option is set to `true`, Nix will keep evaluating as much as it can and report the errors as it encounters them. Otherwise it will stop at the first error. -# Evaluation checks - -The following flake output attributes must be derivations: - -* `checks.`*system*`.`*name* -* `defaultPackage.`*system* -* `devShell.`*system* -* `devShells.`*system*`.`*name* -* `nixosConfigurations.`*name*`.config.system.build.toplevel` -* `packages.`*system*`.`*name* - -The following flake output attributes must be [app -definitions](./nix3-run.md): - -* `apps.`*system*`.`*name* -* `defaultApp.`*system* - -The following flake output attributes must be [template -definitions](./nix3-flake-init.md): - -* `defaultTemplate` -* `templates.`*name* - -The following flake output attributes must be *Nixpkgs overlays*: - -* `overlay` -* `overlays.`*name* - -The following flake output attributes must be *NixOS modules*: - -* `nixosModule` -* `nixosModules.`*name* - -The following flake output attributes must be -[bundlers](./nix3-bundle.md): - -* `bundlers.`*name* -* `defaultBundler` - -In addition, the `hydraJobs` output is evaluated in the same way as -Hydra's `hydra-eval-jobs` (i.e. as a arbitrarily deeply nested -attribute set of derivations). Similarly, the -`legacyPackages`.*system* output is evaluated like `nix-env --query --available `. - )"" diff --git a/src/nix/flake-schemas.cc b/src/nix/flake-schemas.cc new file mode 100644 index 00000000000..b93aaa4a4fe --- /dev/null +++ b/src/nix/flake-schemas.cc @@ -0,0 +1,221 @@ +#include "flake-schemas.hh" +#include "eval-settings.hh" +#include "fetch-to-store.hh" +#include "memory-source-accessor.hh" + +namespace nix::flake_schemas { + +using namespace eval_cache; +using namespace flake; + +static LockedFlake getBuiltinDefaultSchemasFlake(EvalState & state) +{ + auto accessor = make_ref(); + + accessor->setPathDisplay("«builtin-flake-schemas»"); + + accessor->addFile( + CanonPath("flake.nix"), +#include "builtin-flake-schemas.nix.gen.hh" + ); + + // FIXME: remove this when we have lazy trees. + auto storePath = fetchToStore(*state.store, {accessor}, FetchMode::Copy); + state.allowPath(storePath); + + // Construct a dummy flakeref. + auto flakeRef = parseFlakeRef( + fmt("tarball+https://builtin-flake-schemas?narHash=%s", + state.store->queryPathInfo(storePath)->narHash.to_string(HashFormat::SRI, true))); + + auto flake = readFlake(state, flakeRef, flakeRef, flakeRef, state.rootPath(state.store->toRealPath(storePath)), {}); + + return lockFlake(state, flakeRef, {}, flake); +} + +std::tuple, ref> +call(EvalState & state, std::shared_ptr lockedFlake, std::optional defaultSchemasFlake) +{ + auto fingerprint = lockedFlake->getFingerprint(state.store); + + std::string callFlakeSchemasNix = +#include "call-flake-schemas.nix.gen.hh" + ; + + auto lockedDefaultSchemasFlake = + defaultSchemasFlake ? flake::lockFlake(state, *defaultSchemasFlake, {}) : getBuiltinDefaultSchemasFlake(state); + auto lockedDefaultSchemasFlakeFingerprint = lockedDefaultSchemasFlake.getFingerprint(state.store); + + std::optional fingerprint2; + if (fingerprint && lockedDefaultSchemasFlakeFingerprint) + fingerprint2 = hashString( + HashAlgorithm::SHA256, + fmt("app:%s:%s:%s", + hashString(HashAlgorithm::SHA256, callFlakeSchemasNix).to_string(HashFormat::Base16, false), + fingerprint->to_string(HashFormat::Base16, false), + lockedDefaultSchemasFlakeFingerprint->to_string(HashFormat::Base16, false))); + + // FIXME: merge with openEvalCache(). + auto cache = make_ref( + evalSettings.useEvalCache && evalSettings.pureEval ? fingerprint2 : std::nullopt, + state, + [&state, lockedFlake, callFlakeSchemasNix, lockedDefaultSchemasFlake]() { + auto vCallFlakeSchemas = state.allocValue(); + state.eval( + state.parseExprFromString(callFlakeSchemasNix, state.rootPath(CanonPath::root)), *vCallFlakeSchemas); + + auto vFlake = state.allocValue(); + flake::callFlake(state, *lockedFlake, *vFlake); + + auto vDefaultSchemasFlake = state.allocValue(); + if (vFlake->type() == nAttrs && vFlake->attrs()->get(state.symbols.create("schemas"))) + vDefaultSchemasFlake->mkNull(); + else + flake::callFlake(state, lockedDefaultSchemasFlake, *vDefaultSchemasFlake); + + auto vRes = state.allocValue(); + Value * args[] = {vDefaultSchemasFlake, vFlake}; + state.callFunction(*vCallFlakeSchemas, 2, args, *vRes, noPos); + + return vRes; + }); + + return {cache, cache->getRoot()->getAttr("inventory")}; +} + +/* Derive the flake output attribute path from the cursor used to + traverse the inventory. We do this so we don't have to maintain a + separate attrpath for that. */ +std::vector toAttrPath(ref cursor) +{ + auto attrPath = cursor->getAttrPath(); + std::vector res; + auto i = attrPath.begin(); + assert(i != attrPath.end()); + ++i; // skip "inventory" + assert(i != attrPath.end()); + res.push_back(*i++); // copy output name + if (i != attrPath.end()) + ++i; // skip "outputs" + while (i != attrPath.end()) { + ++i; // skip "children" + if (i != attrPath.end()) + res.push_back(*i++); + } + return res; +} + +std::string toAttrPathStr(ref cursor) +{ + return concatStringsSep(".", cursor->root->state.symbols.resolve(toAttrPath(cursor))); +} + +void forEachOutput( + ref inventory, + std::function output, const std::string & doc, bool isLast)> f) +{ + // FIXME: handle non-IFD outputs first. + // evalSettings.enableImportFromDerivation.setDefault(false); + + auto outputNames = inventory->getAttrs(); + for (const auto & [i, outputName] : enumerate(outputNames)) { + auto output = inventory->getAttr(outputName); + try { + auto isUnknown = (bool) output->maybeGetAttr("unknown"); + Activity act(*logger, lvlInfo, actUnknown, fmt("evaluating '%s'", toAttrPathStr(output))); + f(outputName, + isUnknown ? std::shared_ptr() : output->getAttr("output"), + isUnknown ? "" : output->getAttr("doc")->getString(), + i + 1 == outputNames.size()); + } catch (Error & e) { + e.addTrace(nullptr, "while evaluating the flake output '%s':", toAttrPathStr(output)); + throw; + } + } +} + +void visit( + std::optional system, + ref node, + std::function leaf)> visitLeaf, + std::function)> visitNonLeaf, + std::function node, const std::vector & systems)> visitFiltered) +{ + Activity act(*logger, lvlInfo, actUnknown, fmt("evaluating '%s'", toAttrPathStr(node))); + + /* Apply the system type filter. */ + if (system) { + if (auto forSystems = node->maybeGetAttr("forSystems")) { + auto systems = forSystems->getListOfStrings(); + if (std::find(systems.begin(), systems.end(), system) == systems.end()) { + visitFiltered(node, systems); + return; + } + } + } + + if (auto children = node->maybeGetAttr("children")) { + visitNonLeaf([&](ForEachChild f) { + auto attrNames = children->getAttrs(); + for (const auto & [i, attrName] : enumerate(attrNames)) { + try { + f(attrName, children->getAttr(attrName), i + 1 == attrNames.size()); + } catch (Error & e) { + // FIXME: make it a flake schema attribute whether to ignore evaluation errors. + if (node->root->state.symbols[toAttrPath(node)[0]] != "legacyPackages") { + e.addTrace(nullptr, "while evaluating the flake output attribute '%s':", toAttrPathStr(node)); + throw; + } + } + } + }); + } + + else + visitLeaf(ref(node)); +} + +std::optional what(ref leaf) +{ + if (auto what = leaf->maybeGetAttr("what")) + return what->getString(); + else + return std::nullopt; +} + +std::optional shortDescription(ref leaf) +{ + if (auto what = leaf->maybeGetAttr("shortDescription")) { + auto s = trim(what->getString()); + if (s != "") + return s; + } + return std::nullopt; +} + +std::shared_ptr derivation(ref leaf) +{ + return leaf->maybeGetAttr("derivation"); +} + +MixFlakeSchemas::MixFlakeSchemas() +{ + addFlag( + {.longName = "default-flake-schemas", + .description = "The URL of the flake providing default flake schema definitions.", + .labels = {"flake-ref"}, + .handler = {&defaultFlakeSchemas}, + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeRef(completions, getStore(), prefix); + }}}); +} + +std::optional MixFlakeSchemas::getDefaultFlakeSchemas() +{ + if (!defaultFlakeSchemas) + return std::nullopt; + else + return parseFlakeRef(*defaultFlakeSchemas, absPath(".")); +} + +} diff --git a/src/nix/flake-schemas.hh b/src/nix/flake-schemas.hh new file mode 100644 index 00000000000..9d1ba75a0ed --- /dev/null +++ b/src/nix/flake-schemas.hh @@ -0,0 +1,45 @@ +#include "eval-cache.hh" +#include "flake/flake.hh" +#include "command.hh" + +namespace nix::flake_schemas { + +using namespace eval_cache; + +std::tuple, ref> +call(EvalState & state, std::shared_ptr lockedFlake, std::optional defaultSchemasFlake); + +std::vector toAttrPath(ref cursor); + +std::string toAttrPathStr(ref cursor); + +void forEachOutput( + ref inventory, + std::function output, const std::string & doc, bool isLast)> f); + +typedef std::function attr, bool isLast)> ForEachChild; + +void visit( + std::optional system, + ref node, + std::function leaf)> visitLeaf, + std::function)> visitNonLeaf, + std::function node, const std::vector & systems)> visitFiltered); + +std::optional what(ref leaf); + +std::optional shortDescription(ref leaf); + +std::shared_ptr derivation(ref leaf); + +/* Some helper functions for processing flake schema output. */ +struct MixFlakeSchemas : virtual Args, virtual StoreCommand +{ + std::optional defaultFlakeSchemas; + + MixFlakeSchemas(); + + std::optional getDefaultFlakeSchemas(); +}; + +} diff --git a/src/nix/flake.cc b/src/nix/flake.cc index a86e36206b7..691632e2ef1 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -17,6 +17,7 @@ #include "eval-cache.hh" #include "markdown.hh" #include "users.hh" +#include "flake-schemas.hh" #include #include @@ -164,31 +165,6 @@ struct CmdFlakeLock : FlakeCommand } }; -static void enumerateOutputs(EvalState & state, Value & vFlake, - std::function callback) -{ - auto pos = vFlake.determinePos(noPos); - state.forceAttrs(vFlake, pos, "while evaluating a flake to get its outputs"); - - auto aOutputs = vFlake.attrs()->get(state.symbols.create("outputs")); - assert(aOutputs); - - state.forceAttrs(*aOutputs->value, pos, "while evaluating the outputs of a flake"); - - auto sHydraJobs = state.symbols.create("hydraJobs"); - - /* Hack: ensure that hydraJobs is evaluated before anything - else. This way we can disable IFD for hydraJobs and then enable - it for other outputs. */ - if (auto attr = aOutputs->value->attrs()->get(sHydraJobs)) - callback(state.symbols[attr->name], *attr->value, attr->pos); - - for (auto & attr : *aOutputs->value->attrs()) { - if (attr.name != sHydraJobs) - callback(state.symbols[attr.name], *attr.value, attr.pos); - } -} - struct CmdFlakeMetadata : FlakeCommand, MixJSON { std::string description() override @@ -319,7 +295,7 @@ struct CmdFlakeInfo : CmdFlakeMetadata } }; -struct CmdFlakeCheck : FlakeCommand +struct CmdFlakeCheck : FlakeCommand, flake_schemas::MixFlakeSchemas { bool build = true; bool checkAllSystems = false; @@ -360,16 +336,26 @@ struct CmdFlakeCheck : FlakeCommand auto state = getEvalState(); lockFlags.applyNixConfig = true; - auto flake = lockFlake(); + auto flake = std::make_shared(lockFlake()); auto localSystem = std::string(settings.thisSystem.get()); + auto [cache, inventory] = flake_schemas::call(*state, flake, getDefaultFlakeSchemas()); + + std::vector drvPaths; + + std::set uncheckedOutputs; + std::set omittedSystems; + + std::function node)> visit; + bool hasErrors = false; + auto reportError = [&](const Error & e) { try { throw e; } catch (Error & e) { if (settings.keepGoing) { - ignoreException(); + logError({.msg = e.info().msg}); hasErrors = true; } else @@ -377,428 +363,70 @@ struct CmdFlakeCheck : FlakeCommand } }; - std::set omittedSystems; - - // FIXME: rewrite to use EvalCache. - - auto resolve = [&] (PosIdx p) { - return state->positions[p]; - }; - - auto argHasName = [&] (Symbol arg, std::string_view expected) { - std::string_view name = state->symbols[arg]; - return - name == expected - || name == "_" - || (hasPrefix(name, "_") && name.substr(1) == expected); - }; - - auto checkSystemName = [&](const std::string & system, const PosIdx pos) { - // FIXME: what's the format of "system"? - if (system.find('-') == std::string::npos) - reportError(Error("'%s' is not a valid system type, at %s", system, resolve(pos))); - }; - - auto checkSystemType = [&](const std::string & system, const PosIdx pos) { - if (!checkAllSystems && system != localSystem) { - omittedSystems.insert(system); - return false; - } else { - return true; - } - }; - - auto checkDerivation = [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional { - try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking derivation %s", attrPath)); - auto packageInfo = getDerivation(*state, v, false); - if (!packageInfo) - throw Error("flake attribute '%s' is not a derivation", attrPath); - else { - // FIXME: check meta attributes - auto storePath = packageInfo->queryDrvPath(); - if (storePath) { - logger->log(lvlInfo, - fmt("derivation evaluated to %s", - store->printStorePath(storePath.value()))); - } - return storePath; - } - } catch (Error & e) { - e.addTrace(resolve(pos), HintFmt("while checking the derivation '%s'", attrPath)); - reportError(e); - } - return std::nullopt; - }; - - std::vector drvPaths; - - auto checkApp = [&](const std::string & attrPath, Value & v, const PosIdx pos) { - try { - #if 0 - // FIXME - auto app = App(*state, v); - for (auto & i : app.context) { - auto [drvPathS, outputName] = NixStringContextElem::parse(i); - store->parseStorePath(drvPathS); - } - #endif - } catch (Error & e) { - e.addTrace(resolve(pos), HintFmt("while checking the app definition '%s'", attrPath)); - reportError(e); - } - }; - - auto checkOverlay = [&](const std::string & attrPath, Value & v, const PosIdx pos) { - try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking overlay '%s'", attrPath)); - state->forceValue(v, pos); - if (!v.isLambda()) { - throw Error("overlay is not a function, but %s instead", showType(v)); - } - if (v.payload.lambda.fun->hasFormals() - || !argHasName(v.payload.lambda.fun->arg, "final")) - throw Error("overlay does not take an argument named 'final'"); - // FIXME: if we have a 'nixpkgs' input, use it to - // evaluate the overlay. - } catch (Error & e) { - e.addTrace(resolve(pos), HintFmt("while checking the overlay '%s'", attrPath)); - reportError(e); - } - }; - - auto checkModule = [&](const std::string & attrPath, Value & v, const PosIdx pos) { - try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking NixOS module '%s'", attrPath)); - state->forceValue(v, pos); - } catch (Error & e) { - e.addTrace(resolve(pos), HintFmt("while checking the NixOS module '%s'", attrPath)); - reportError(e); - } - }; - - std::function checkHydraJobs; - - checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) { - try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking Hydra job '%s'", attrPath)); - state->forceAttrs(v, pos, ""); - - if (state->isDerivation(v)) - throw Error("jobset should not be a derivation at top-level"); - - for (auto & attr : *v.attrs()) { - state->forceAttrs(*attr.value, attr.pos, ""); - auto attrPath2 = concatStrings(attrPath, ".", state->symbols[attr.name]); - if (state->isDerivation(*attr.value)) { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking Hydra job '%s'", attrPath2)); - checkDerivation(attrPath2, *attr.value, attr.pos); - } else - checkHydraJobs(attrPath2, *attr.value, attr.pos); - } - - } catch (Error & e) { - e.addTrace(resolve(pos), HintFmt("while checking the Hydra jobset '%s'", attrPath)); - reportError(e); - } - }; - - auto checkNixOSConfiguration = [&](const std::string & attrPath, Value & v, const PosIdx pos) { - try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking NixOS configuration '%s'", attrPath)); - Bindings & bindings(*state->allocBindings(0)); - auto vToplevel = findAlongAttrPath(*state, "config.system.build.toplevel", bindings, v).first; - state->forceValue(*vToplevel, pos); - if (!state->isDerivation(*vToplevel)) - throw Error("attribute 'config.system.build.toplevel' is not a derivation"); - } catch (Error & e) { - e.addTrace(resolve(pos), HintFmt("while checking the NixOS configuration '%s'", attrPath)); - reportError(e); - } - }; - - auto checkTemplate = [&](const std::string & attrPath, Value & v, const PosIdx pos) { - try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking template '%s'", attrPath)); - - state->forceAttrs(v, pos, ""); - - if (auto attr = v.attrs()->get(state->symbols.create("path"))) { - if (attr->name == state->symbols.create("path")) { - NixStringContext context; - auto path = state->coerceToPath(attr->pos, *attr->value, context, ""); - if (!path.pathExists()) - throw Error("template '%s' refers to a non-existent path '%s'", attrPath, path); - // TODO: recursively check the flake in 'path'. - } - } else - throw Error("template '%s' lacks attribute 'path'", attrPath); - - if (auto attr = v.attrs()->get(state->symbols.create("description"))) - state->forceStringNoCtx(*attr->value, attr->pos, ""); - else - throw Error("template '%s' lacks attribute 'description'", attrPath); - - for (auto & attr : *v.attrs()) { - std::string_view name(state->symbols[attr.name]); - if (name != "path" && name != "description" && name != "welcomeText") - throw Error("template '%s' has unsupported attribute '%s'", attrPath, name); - } - } catch (Error & e) { - e.addTrace(resolve(pos), HintFmt("while checking the template '%s'", attrPath)); - reportError(e); - } - }; - - auto checkBundler = [&](const std::string & attrPath, Value & v, const PosIdx pos) { - try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking bundler '%s'", attrPath)); - state->forceValue(v, pos); - if (!v.isLambda()) - throw Error("bundler must be a function"); - // TODO: check types of inputs/outputs? - } catch (Error & e) { - e.addTrace(resolve(pos), HintFmt("while checking the template '%s'", attrPath)); - reportError(e); - } - }; - + visit = [&](ref node) { - Activity act(*logger, lvlInfo, actUnknown, "evaluating flake"); - - auto vFlake = state->allocValue(); - flake::callFlake(*state, flake, *vFlake); - - enumerateOutputs(*state, - *vFlake, - [&](const std::string & name, Value & vOutput, const PosIdx pos) { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking flake output '%s'", name)); - - try { - evalSettings.enableImportFromDerivation.setDefault(name != "hydraJobs"); - - state->forceValue(vOutput, pos); - - std::string_view replacement = - name == "defaultPackage" ? "packages..default" : - name == "defaultApp" ? "apps..default" : - name == "defaultTemplate" ? "templates.default" : - name == "defaultBundler" ? "bundlers..default" : - name == "overlay" ? "overlays.default" : - name == "devShell" ? "devShells..default" : - name == "nixosModule" ? "nixosModules.default" : - ""; - if (replacement != "") - warn("flake output attribute '%s' is deprecated; use '%s' instead", name, replacement); - - if (name == "checks") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs()) { - auto drvPath = checkDerivation( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); - if (drvPath && attr_name == settings.thisSystem.get()) { - drvPaths.push_back(DerivedPath::Built { - .drvPath = makeConstantStorePathRef(*drvPath), - .outputs = OutputsSpec::All { }, - }); - } - } - } - } - } - - else if (name == "formatter") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - checkApp( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); - }; - } - } + flake_schemas::visit( + checkAllSystems ? std::optional() : localSystem, + node, - else if (name == "packages" || name == "devShells") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs()) - checkDerivation( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); - }; - } - } - - else if (name == "apps") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs()) - checkApp( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); - }; - } - } - - else if (name == "defaultPackage" || name == "devShell") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - checkDerivation( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); - }; - } - } - - else if (name == "defaultApp") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos) ) { - checkApp( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); - }; - } - } - - else if (name == "legacyPackages") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - checkSystemName(state->symbols[attr.name], attr.pos); - checkSystemType(state->symbols[attr.name], attr.pos); - // FIXME: do getDerivations? - } - } - - else if (name == "overlay") - checkOverlay(name, vOutput, pos); - - else if (name == "overlays") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) - checkOverlay(fmt("%s.%s", name, state->symbols[attr.name]), - *attr.value, attr.pos); - } - - else if (name == "nixosModule") - checkModule(name, vOutput, pos); - - else if (name == "nixosModules") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) - checkModule(fmt("%s.%s", name, state->symbols[attr.name]), - *attr.value, attr.pos); - } - - else if (name == "nixosConfigurations") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) - checkNixOSConfiguration(fmt("%s.%s", name, state->symbols[attr.name]), - *attr.value, attr.pos); - } - - else if (name == "hydraJobs") - checkHydraJobs(name, vOutput, pos); - - else if (name == "defaultTemplate") - checkTemplate(name, vOutput, pos); - - else if (name == "templates") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) - checkTemplate(fmt("%s.%s", name, state->symbols[attr.name]), - *attr.value, attr.pos); + [&](ref leaf) + { + if (auto evalChecks = leaf->maybeGetAttr("evalChecks")) { + auto checkNames = evalChecks->getAttrs(); + for (auto & checkName : checkNames) { + // FIXME: update activity + auto cursor = evalChecks->getAttr(checkName); + auto b = cursor->getBool(); + if (!b) + reportError(Error("Evaluation check '%s' failed.", flake_schemas::toAttrPathStr(cursor))); } + } - else if (name == "defaultBundler") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - checkBundler( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); - }; + if (auto drv = flake_schemas::derivation(leaf)) { + if (auto isFlakeCheck = leaf->maybeGetAttr("isFlakeCheck")) { + if (isFlakeCheck->getBool()) { + auto drvPath = drv->forceDerivation(); + drvPaths.push_back(DerivedPath::Built { + .drvPath = makeConstantStorePathRef(drvPath), + .outputs = OutputsSpec::All { }, + }); } } + } + }, - else if (name == "bundlers") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs()) { - checkBundler( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); - } - }; - } - } + [&](std::function forEachChild) + { + forEachChild([&](Symbol attrName, ref node, bool isLast) + { + visit(node); + }); + }, + + [&](ref node, const std::vector & systems) { + for (auto & s : systems) + omittedSystems.insert(s); + }); + }; - else if ( - name == "lib" - || name == "darwinConfigurations" - || name == "darwinModules" - || name == "flakeModule" - || name == "flakeModules" - || name == "herculesCI" - || name == "homeConfigurations" - || name == "homeModule" - || name == "homeModules" - || name == "nixopsConfigurations" - ) - // Known but unchecked community attribute - ; - - else - warn("unknown flake output '%s'", name); + flake_schemas::forEachOutput(inventory, [&](Symbol outputName, std::shared_ptr output, const std::string & doc, bool isLast) + { + if (output) { + visit(ref(output)); + } else + uncheckedOutputs.insert(state->symbols[outputName]); + }); - } catch (Error & e) { - e.addTrace(resolve(pos), HintFmt("while checking flake output '%s'", name)); - reportError(e); - } - }); - } + if (!uncheckedOutputs.empty()) + warn("The following flake outputs are unchecked: %s.", + concatStringsSep(", ", uncheckedOutputs)); // FIXME: quote if (build && !drvPaths.empty()) { Activity act(*logger, lvlInfo, actUnknown, fmt("running %d flake checks", drvPaths.size())); store->buildPaths(drvPaths); } + if (hasErrors) throw Error("some errors were encountered during the evaluation"); @@ -808,7 +436,7 @@ struct CmdFlakeCheck : FlakeCommand "Use '--all-systems' to check all.", concatStringsSep(", ", omittedSystems) ); - }; + } }; }; @@ -1092,7 +720,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun } }; -struct CmdFlakeShow : FlakeCommand, MixJSON +struct CmdFlakeShow : FlakeCommand, MixJSON, flake_schemas::MixFlakeSchemas { bool showLegacy = false; bool showAllSystems = false; @@ -1125,267 +753,158 @@ struct CmdFlakeShow : FlakeCommand, MixJSON void run(nix::ref store) override { - evalSettings.enableImportFromDerivation.setDefault(false); - auto state = getEvalState(); auto flake = std::make_shared(lockFlake()); auto localSystem = std::string(settings.thisSystem.get()); - std::function &attrPath, - const Symbol &attr)> hasContent; - - // For frameworks it's important that structures are as lazy as possible - // to prevent infinite recursions, performance issues and errors that - // aren't related to the thing to evaluate. As a consequence, they have - // to emit more attributes than strictly (sic) necessary. - // However, these attributes with empty values are not useful to the user - // so we omit them. - hasContent = [&]( - eval_cache::AttrCursor & visitor, - const std::vector &attrPath, - const Symbol &attr) -> bool - { - auto attrPath2(attrPath); - attrPath2.push_back(attr); - auto attrPathS = state->symbols.resolve(attrPath2); - const auto & attrName = state->symbols[attr]; - - auto visitor2 = visitor.getAttr(attrName); - - try { - if ((attrPathS[0] == "apps" - || attrPathS[0] == "checks" - || attrPathS[0] == "devShells" - || attrPathS[0] == "legacyPackages" - || attrPathS[0] == "packages") - && (attrPathS.size() == 1 || attrPathS.size() == 2)) { - for (const auto &subAttr : visitor2->getAttrs()) { - if (hasContent(*visitor2, attrPath2, subAttr)) { - return true; - } - } - return false; - } - - if ((attrPathS.size() == 1) - && (attrPathS[0] == "formatter" - || attrPathS[0] == "nixosConfigurations" - || attrPathS[0] == "nixosModules" - || attrPathS[0] == "overlays" - )) { - for (const auto &subAttr : visitor2->getAttrs()) { - if (hasContent(*visitor2, attrPath2, subAttr)) { - return true; - } - } - return false; - } - - // If we don't recognize it, it's probably content - return true; - } catch (EvalError & e) { - // Some attrs may contain errors, e.g. legacyPackages of - // nixpkgs. We still want to recurse into it, instead of - // skipping it at all. - return true; - } - }; + auto [cache, inventory] = flake_schemas::call(*state, flake, getDefaultFlakeSchemas()); - std::function & attrPath, - const std::string & headerPrefix, - const std::string & nextPrefix)> visit; - - visit = [&]( - eval_cache::AttrCursor & visitor, - const std::vector & attrPath, - const std::string & headerPrefix, - const std::string & nextPrefix) - -> nlohmann::json - { - auto j = nlohmann::json::object(); - - auto attrPathS = state->symbols.resolve(attrPath); - - Activity act(*logger, lvlInfo, actUnknown, - fmt("evaluating '%s'", concatStringsSep(".", attrPathS))); + if (json) { + std::function node, nlohmann::json & obj)> visit; - try { - auto recurse = [&]() - { - if (!json) - logger->cout("%s", headerPrefix); - std::vector attrs; - for (const auto &attr : visitor.getAttrs()) { - if (hasContent(visitor, attrPath, attr)) - attrs.push_back(attr); - } + visit = [&](ref node, nlohmann::json & obj) + { + flake_schemas::visit( + showAllSystems ? std::optional() : localSystem, + node, + + [&](ref leaf) + { + obj.emplace("leaf", true); + + if (auto what = flake_schemas::what(leaf)) + obj.emplace("what", what); + + if (auto shortDescription = flake_schemas::shortDescription(leaf)) + obj.emplace("shortDescription", shortDescription); + + if (auto drv = flake_schemas::derivation(leaf)) + obj.emplace("derivationName", drv->getAttr(state->sName)->getString()); + + // FIXME: add more stuff + }, + + [&](std::function forEachChild) + { + auto children = nlohmann::json::object(); + forEachChild([&](Symbol attrName, ref node, bool isLast) + { + auto j = nlohmann::json::object(); + try { + visit(node, j); + } catch (EvalError & e) { + // FIXME: make it a flake schema attribute whether to ignore evaluation errors. + if (node->root->state.symbols[flake_schemas::toAttrPath(node)[0]] == "legacyPackages") + j.emplace("failed", true); + else + throw; + } + children.emplace(state->symbols[attrName], std::move(j)); + }); + obj.emplace("children", std::move(children)); + }, + + [&](ref node, const std::vector & systems) + { + obj.emplace("filtered", true); + }); + }; - for (const auto & [i, attr] : enumerate(attrs)) { - const auto & attrName = state->symbols[attr]; - bool last = i + 1 == attrs.size(); - auto visitor2 = visitor.getAttr(attrName); - auto attrPath2(attrPath); - attrPath2.push_back(attr); - auto j2 = visit(*visitor2, attrPath2, - fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, nextPrefix, last ? treeLast : treeConn, attrName), - nextPrefix + (last ? treeNull : treeLine)); - if (json) j.emplace(attrName, std::move(j2)); - } - }; + auto res = nlohmann::json::object(); - auto showDerivation = [&]() - { - auto name = visitor.getAttr(state->sName)->getString(); - if (json) { - std::optional description; - if (auto aMeta = visitor.maybeGetAttr(state->sMeta)) { - if (auto aDescription = aMeta->maybeGetAttr(state->sDescription)) - description = aDescription->getString(); - } - j.emplace("type", "derivation"); - j.emplace("name", name); - if (description) - j.emplace("description", *description); - } else { - logger->cout("%s: %s '%s'", - headerPrefix, - attrPath.size() == 2 && attrPathS[0] == "devShell" ? "development environment" : - attrPath.size() >= 2 && attrPathS[0] == "devShells" ? "development environment" : - attrPath.size() == 3 && attrPathS[0] == "checks" ? "derivation" : - attrPath.size() >= 1 && attrPathS[0] == "hydraJobs" ? "derivation" : - "package", - name); - } - }; - - if (attrPath.size() == 0 - || (attrPath.size() == 1 && ( - attrPathS[0] == "defaultPackage" - || attrPathS[0] == "devShell" - || attrPathS[0] == "formatter" - || attrPathS[0] == "nixosConfigurations" - || attrPathS[0] == "nixosModules" - || attrPathS[0] == "defaultApp" - || attrPathS[0] == "templates" - || attrPathS[0] == "overlays")) - || ((attrPath.size() == 1 || attrPath.size() == 2) - && (attrPathS[0] == "checks" - || attrPathS[0] == "packages" - || attrPathS[0] == "devShells" - || attrPathS[0] == "apps")) - ) - { - recurse(); - } + flake_schemas::forEachOutput(inventory, [&](Symbol outputName, std::shared_ptr output, const std::string & doc, bool isLast) + { + auto j = nlohmann::json::object(); + + if (!showLegacy && state->symbols[outputName] == "legacyPackages") { + j.emplace("skipped", true); + } else if (output) { + j.emplace("doc", doc); + auto j2 = nlohmann::json::object(); + visit(ref(output), j2); + j.emplace("output", std::move(j2)); + } else + j.emplace("unknown", true); - else if ( - (attrPath.size() == 2 && (attrPathS[0] == "defaultPackage" || attrPathS[0] == "devShell" || attrPathS[0] == "formatter")) - || (attrPath.size() == 3 && (attrPathS[0] == "checks" || attrPathS[0] == "packages" || attrPathS[0] == "devShells")) - ) - { - if (!showAllSystems && std::string(attrPathS[1]) != localSystem) { - if (!json) - logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--all-systems' to show)", headerPrefix)); - else { - logger->warn(fmt("%s omitted (use '--all-systems' to show)", concatStringsSep(".", attrPathS))); - } - } else { - if (visitor.isDerivation()) - showDerivation(); - else - throw Error("expected a derivation"); - } - } + res.emplace(state->symbols[outputName], j); + }); - else if (attrPath.size() > 0 && attrPathS[0] == "hydraJobs") { - if (visitor.isDerivation()) - showDerivation(); - else - recurse(); - } + logger->cout("%s", res.dump()); + } - else if (attrPath.size() > 0 && attrPathS[0] == "legacyPackages") { - if (attrPath.size() == 1) - recurse(); - else if (!showLegacy){ - if (!json) - logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix)); - else { - logger->warn(fmt("%s omitted (use '--legacy' to show)", concatStringsSep(".", attrPathS))); - } - } else if (!showAllSystems && std::string(attrPathS[1]) != localSystem) { - if (!json) - logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--all-systems' to show)", headerPrefix)); - else { - logger->warn(fmt("%s omitted (use '--all-systems' to show)", concatStringsSep(".", attrPathS))); - } - } else { - if (visitor.isDerivation()) - showDerivation(); - else if (attrPath.size() <= 2) - // FIXME: handle recurseIntoAttrs - recurse(); - } - } + else { + logger->cout(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef); - else if ( - (attrPath.size() == 2 && attrPathS[0] == "defaultApp") || - (attrPath.size() == 3 && attrPathS[0] == "apps")) - { - auto aType = visitor.maybeGetAttr("type"); - if (!aType || aType->getString() != "app") - state->error("not an app definition").debugThrow(); - if (json) { - j.emplace("type", "app"); - } else { - logger->cout("%s: app", headerPrefix); - } - } + std::function node, + const std::string & headerPrefix, + const std::string & prevPrefix)> visit; - else if ( - (attrPath.size() == 1 && attrPathS[0] == "defaultTemplate") || - (attrPath.size() == 2 && attrPathS[0] == "templates")) - { - auto description = visitor.getAttr("description")->getString(); - if (json) { - j.emplace("type", "template"); - j.emplace("description", description); - } else { - logger->cout("%s: template: " ANSI_BOLD "%s" ANSI_NORMAL, headerPrefix, description); - } - } + visit = [&]( + ref node, + const std::string & headerPrefix, + const std::string & prevPrefix) + { + flake_schemas::visit( + showAllSystems ? std::optional() : localSystem, + node, + + [&](ref leaf) + { + auto s = headerPrefix; + + if (auto what = flake_schemas::what(leaf)) + s += fmt(": %s", *what); + + if (auto drv = flake_schemas::derivation(leaf)) + s += fmt(ANSI_ITALIC " [%s]" ANSI_NORMAL, drv->getAttr(state->sName)->getString()); + + logger->cout(s); + }, + + [&](std::function forEachChild) + { + logger->cout(headerPrefix); + forEachChild([&](Symbol attrName, ref node, bool isLast) + { + visit(node, + fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, prevPrefix, + isLast ? treeLast : treeConn, state->symbols[attrName]), + prevPrefix + (isLast ? treeNull : treeLine)); + }); + }, + + [&](ref node, const std::vector & systems) + { + logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--all-systems' to show)", headerPrefix)); + }); + }; - else { - auto [type, description] = - (attrPath.size() == 1 && attrPathS[0] == "overlay") - || (attrPath.size() == 2 && attrPathS[0] == "overlays") ? std::make_pair("nixpkgs-overlay", "Nixpkgs overlay") : - attrPath.size() == 2 && attrPathS[0] == "nixosConfigurations" ? std::make_pair("nixos-configuration", "NixOS configuration") : - (attrPath.size() == 1 && attrPathS[0] == "nixosModule") - || (attrPath.size() == 2 && attrPathS[0] == "nixosModules") ? std::make_pair("nixos-module", "NixOS module") : - std::make_pair("unknown", "unknown"); - if (json) { - j.emplace("type", type); - } else { - logger->cout("%s: " ANSI_WARNING "%s" ANSI_NORMAL, headerPrefix, description); - } + flake_schemas::forEachOutput(inventory, [&](Symbol outputName, std::shared_ptr output, const std::string & doc, bool isLast) + { + auto headerPrefix = fmt( + ANSI_GREEN "%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, + isLast ? treeLast : treeConn, state->symbols[outputName]); + + if (!showLegacy && state->symbols[outputName] == "legacyPackages") { + logger->cout(headerPrefix); + logger->cout( + ANSI_GREEN "%s" "%s" ANSI_NORMAL ANSI_ITALIC "%s" ANSI_NORMAL, + isLast ? treeNull : treeLine, + treeLast, + "(skipped; use '--legacy' to show)"); + } else if (output) { + visit(ref(output), headerPrefix, isLast ? treeNull : treeLine); + } else { + logger->cout(headerPrefix); + logger->cout( + ANSI_GREEN "%s" "%s" ANSI_NORMAL ANSI_ITALIC "%s" ANSI_NORMAL, + isLast ? treeNull : treeLine, + treeLast, + "(unknown flake output)"); } - } catch (EvalError & e) { - if (!(attrPath.size() > 0 && attrPathS[0] == "legacyPackages")) - throw; - } - - return j; - }; - - auto cache = openEvalCache(*state, flake); - - auto j = visit(*cache->getRoot(), {}, fmt(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef), ""); - if (json) - logger->cout("%s", j.dump()); + }); + } } }; diff --git a/src/nix/local.mk b/src/nix/local.mk index 28b30b58619..43a22a2afb3 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -55,3 +55,9 @@ $(d)/main.cc: \ $(d)/profile.cc: $(d)/profile.md $(d)/profile.md: $(d)/profiles.md.gen.hh + +src/nix/flake.cc: src/nix/call-flake-schemas.nix.gen.hh src/nix/builtin-flake-schemas.nix.gen.hh + +src/nix/builtin-flake-schemas.nix: $(default_flake_schemas)/flake.nix + $(trace-gen) cp $^ $@ + @chmod +w $@ diff --git a/tests/functional/flakes/check.sh b/tests/functional/flakes/check.sh index 3b83dcafe4b..48a0d333aac 100755 --- a/tests/functional/flakes/check.sh +++ b/tests/functional/flakes/check.sh @@ -16,17 +16,6 @@ EOF nix flake check $flakeDir -cat > $flakeDir/flake.nix < $flakeDir/flake.nix < show-output.json nix eval --impure --expr ' let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); in -assert show_output.packages.someOtherSystem.default == {}; -assert show_output.packages.${builtins.currentSystem}.default.name == "simple"; -assert show_output.legacyPackages.${builtins.currentSystem} == {}; +assert show_output.packages.output.children.someOtherSystem.filtered; +assert show_output.packages.output.children.${builtins.currentSystem}.children.default.derivationName == "simple"; +assert show_output.legacyPackages.skipped; true ' @@ -26,8 +26,8 @@ nix flake show --json --all-systems > show-output.json nix eval --impure --expr ' let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); in -assert show_output.packages.someOtherSystem.default.name == "simple"; -assert show_output.legacyPackages.${builtins.currentSystem} == {}; +assert show_output.packages.output.children.someOtherSystem.children.default.derivationName == "simple"; +assert show_output.legacyPackages.skipped; true ' @@ -36,34 +36,7 @@ nix flake show --json --legacy > show-output.json nix eval --impure --expr ' let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); in -assert show_output.legacyPackages.${builtins.currentSystem}.hello.name == "simple"; -true -' - -# Test that attributes are only reported when they have actual content -cat >flake.nix < show-output.json -nix eval --impure --expr ' -let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); -in -assert show_output == { }; +assert show_output.legacyPackages.output.children.${builtins.currentSystem}.children.hello.derivationName == "simple"; true ' @@ -83,7 +56,7 @@ nix flake show --json --legacy --all-systems > show-output.json nix eval --impure --expr ' let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); in -assert show_output.legacyPackages.${builtins.currentSystem}.AAAAAASomeThingsFailToEvaluate == { }; -assert show_output.legacyPackages.${builtins.currentSystem}.simple.name == "simple"; +assert show_output.legacyPackages.output.children.${builtins.currentSystem}.children.AAAAAASomeThingsFailToEvaluate.failed; +assert show_output.legacyPackages.output.children.${builtins.currentSystem}.children.simple.derivationName == "simple"; true ' diff --git a/tests/functional/fmt.sh b/tests/functional/fmt.sh index b29fe64d6bc..b0a0b2e5f7b 100755 --- a/tests/functional/fmt.sh +++ b/tests/functional/fmt.sh @@ -32,4 +32,6 @@ cat << EOF > flake.nix EOF nix fmt ./file ./folder | grep 'Formatting: ./file ./folder' nix flake check -nix flake show | grep -P "package 'formatter'" + +clearStore +nix flake show | grep -P "package.*\[formatter\]" From b80c3191d9c147c672f97c5dc4bb0536725af16f Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Thu, 11 Jul 2024 09:49:06 -0700 Subject: [PATCH 027/361] Use Markdown tables for config --- doc/manual/src/protocols/flake-schemas.md | 36 +++++++++++++---------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/doc/manual/src/protocols/flake-schemas.md b/doc/manual/src/protocols/flake-schemas.md index f6cdd6165b8..b1dfa5da6f0 100644 --- a/doc/manual/src/protocols/flake-schemas.md +++ b/doc/manual/src/protocols/flake-schemas.md @@ -8,34 +8,38 @@ every output type that you want to be supported. If a flake does not have a `sch A schema is an attribute set with the following attributes: -* `version`: Should be set to 1. -* `doc`: A string containing documentation about the flake output type in Markdown format. -* `allowIFD` (defaults to `true`): Whether the evaluation of the output attributes of this flake can read from derivation outputs. -* `inventory`: A function that returns the contents of the flake output (described below). +| Attribute | Description | Default | +| :---------- | :---------------------------------------------------------------------------------------------- | :------ | +| `version` | Should be set to 1 | | +| `doc` | A string containing documentation about the flake output type in Markdown format. | | +| `allowIFD` | Whether the evaluation of the output attributes of this flake can read from derivation outputs. | `true` | +| `inventory` | A function that returns the contents of the flake output (described [below](#inventory)). | | # Inventory -The `inventory` function returns a *node* describing the contents of the flake output. A node is either a *leaf node* or a *non-leaf node*. This allows nested flake output attributes to be described (e.g. `x86_64-linux.hello` inside a `packages` output). +The `inventory` function returns a _node_ describing the contents of the flake output. A node is either a _leaf node_ or a _non-leaf node_. This allows nested flake output attributes to be described (e.g. `x86_64-linux.hello` inside a `packages` output). Non-leaf nodes must have the following attribute: -* `children`: An attribute set of nodes. If this attribute is missing, the attribute if a leaf node. +| Attribute | Description | +| :--------- | :------------------------------------------------------------------------------------- | +| `children` | An attribute set of nodes. If this attribute is missing, the attribute is a leaf node. | Leaf nodes can have the following attributes: -* `derivation`: The main derivation of this node, if any. It must evaluate for `nix flake check` and `nix flake show` to succeed. - -* `evalChecks`: An attribute set of Boolean values, used by `nix flake check`. Each attribute must evaluate to `true`. - -* `isFlakeCheck`: Whether `nix flake check` should build the `derivation` attribute of this node. - -* `shortDescription`: A one-sentence description of the node (such as the `meta.description` attribute in Nixpkgs). - -* `what`: A brief human-readable string describing the type of the node, e.g. `"package"` or `"development environment"`. This is used by tools like `nix flake show` to describe the contents of a flake. +| Attribute | Description | +| :----------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `derivation` | The main derivation of this node, if any. It must evaluate for `nix flake check` and `nix flake show` to succeed. | +| `evalChecks` | An attribute set of Boolean values, used by `nix flake check`. Each attribute must evaluate to `true`. | +| `isFlakeCheck` | Whether `nix flake check` should build the `derivation` attribute of this node. | +| `shortDescription` | A one-sentence description of the node (such as the `meta.description` attribute in Nixpkgs). | +| `what` | A brief human-readable string describing the type of the node, e.g. `"package"` or `"development environment"`. This is used by tools like `nix flake show` to describe the contents of a flake. | Both leaf and non-leaf nodes can have the following attributes: -* `forSystems`: A list of Nix system types (e.g. `["x86_64-linux"]`) supported by this node. This is used by tools to skip nodes that cannot be built on the user's system. Setting this on a non-leaf node allows all the children to be skipped, regardless of the `forSystems` attributes of the children. If this attribute is not set, the node is never skipped. +| Attribute | Description | +| :----------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `forSystems` | A list of Nix system types (e.g. `["x86_64-linux"]`) supported by this node. This is used by tools to skip nodes that cannot be built on the user's system. Setting this on a non-leaf node allows all the children to be skipped, regardless of the `forSystems` attributes of the children. If this attribute is not set, the node is never skipped. | # Example From 855e71632421620f86af817b17103a7a562a93a9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Jul 2024 16:37:57 +0200 Subject: [PATCH 028/361] Fix formatting --- src/nix/flake-schemas.cc | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/nix/flake-schemas.cc b/src/nix/flake-schemas.cc index afd0dd5cf0f..0047f27ce61 100644 --- a/src/nix/flake-schemas.cc +++ b/src/nix/flake-schemas.cc @@ -25,7 +25,8 @@ static LockedFlake getBuiltinDefaultSchemasFlake(EvalState & state) state.allowPath(storePath); // Construct a dummy flakeref. - auto flakeRef = parseFlakeRef(fetchSettings, + auto flakeRef = parseFlakeRef( + fetchSettings, fmt("tarball+https://builtin-flake-schemas?narHash=%s", state.store->queryPathInfo(storePath)->narHash.to_string(HashFormat::SRI, true))); @@ -43,8 +44,9 @@ call(EvalState & state, std::shared_ptr lockedFlake, std::op #include "call-flake-schemas.nix.gen.hh" ; - auto lockedDefaultSchemasFlake = - defaultSchemasFlake ? flake::lockFlake(flakeSettings, state, *defaultSchemasFlake, {}) : getBuiltinDefaultSchemasFlake(state); + auto lockedDefaultSchemasFlake = defaultSchemasFlake + ? flake::lockFlake(flakeSettings, state, *defaultSchemasFlake, {}) + : getBuiltinDefaultSchemasFlake(state); auto lockedDefaultSchemasFlakeFingerprint = lockedDefaultSchemasFlake.getFingerprint(state.store); std::optional fingerprint2; From 206e32e2d7c72c940a4348648f5de46122c495c9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 31 Jul 2024 23:37:43 +0200 Subject: [PATCH 029/361] Mark release --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 9e8592e3a6d..3e3cfdc0435 100644 --- a/flake.nix +++ b/flake.nix @@ -24,7 +24,7 @@ let inherit (nixpkgs) lib; - officialRelease = false; + officialRelease = true; version = lib.fileContents ./.version + versionSuffix; versionSuffix = From 0a167ffd1f57864ce042d83f9d1f17ef5126c442 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Aug 2024 10:41:11 +0200 Subject: [PATCH 030/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index ad2261920c0..0f5dfbe8769 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.24.0 +2.24.1 From fe6a7c805c1882f755c5b5de9bf1c21c55e73254 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Aug 2024 16:51:57 +0200 Subject: [PATCH 031/361] Fix the S3 store It was failing with: error: AWS error fetching 'nix-cache-info': The specified bucket does not exist because `S3BinaryCacheStoreImpl` had a `bucketName` field that shadowed the inherited `bucketName from `S3BinaryCacheStoreConfig`. (cherry picked from commit 9b5b7b796341eca437fe08bb278c49dfbae2deaa) --- src/libstore/s3-binary-cache-store.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 92ab47cd66d..21175b1ebfd 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -220,8 +220,6 @@ std::string S3BinaryCacheStoreConfig::doc() struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual S3BinaryCacheStore { - std::string bucketName; - Stats stats; S3Helper s3Helper; From fa78d7f72fc2f36b9a31d9d37ceedf097583590c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Aug 2024 18:56:02 +0200 Subject: [PATCH 032/361] PathSubstitutionGoal: Fix spurious "failed" count in the progress bar It is not an error if queryPathInfo() indicates that a path does not exist in the substituter. Fixes #11198. This was broken in 846869da0ed0580beb7f827b303fef9a8386de37. (cherry picked from commit 0a00bd07b2421acfb21751a718292affa8c6e837) --- src/libstore/build/substitution-goal.cc | 8 +++++--- src/libstore/build/substitution-goal.hh | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 7deeb47487d..0152f180828 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -145,8 +145,10 @@ Goal::Co PathSubstitutionGoal::init() /* None left. Terminate this goal and let someone else deal with it. */ - worker.failedSubstitutions++; - worker.updateProgress(); + if (substituterFailed) { + worker.failedSubstitutions++; + worker.updateProgress(); + } /* Hack: don't indicate failure if there were no substituters. In that case the calling derivation should just do a @@ -158,7 +160,7 @@ Goal::Co PathSubstitutionGoal::init() } -Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref sub, std::shared_ptr info, bool& substituterFailed) +Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref sub, std::shared_ptr info, bool & substituterFailed) { trace("all references realised"); diff --git a/src/libstore/build/substitution-goal.hh b/src/libstore/build/substitution-goal.hh index c1de45379f1..f2cf797e5d2 100644 --- a/src/libstore/build/substitution-goal.hh +++ b/src/libstore/build/substitution-goal.hh @@ -66,7 +66,7 @@ public: */ Co init() override; Co gotInfo(); - Co tryToRun(StorePath subPath, nix::ref sub, std::shared_ptr info, bool& substituterFailed); + Co tryToRun(StorePath subPath, nix::ref sub, std::shared_ptr info, bool & substituterFailed); Co finished(); /** From b1941c9f8a40b6aeb42d0ddc20af85c54a9bd80f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Fri, 2 Aug 2024 11:12:06 +0200 Subject: [PATCH 033/361] allow to c api with older c versions In the FFI world we have many tools that are not gcc/clang and therefore not always support the latest C standard. This fixes support with cffi i.e. used in https://github.com/tweag/python-nix (cherry picked from commit 739418504c4d2f28fb5f45151b1c83707c3571e2) --- src/libexpr-c/nix_api_expr.h | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/libexpr-c/nix_api_expr.h b/src/libexpr-c/nix_api_expr.h index adf8b65b1a3..1764b49f321 100644 --- a/src/libexpr-c/nix_api_expr.h +++ b/src/libexpr-c/nix_api_expr.h @@ -14,6 +14,16 @@ #include "nix_api_util.h" #include +#ifndef __has_c_attribute +# define __has_c_attribute(x) 0 +#endif + +#if __has_c_attribute(deprecated) +# define NIX_DEPRECATED(msg) [[deprecated(msg)]] +#else +# define NIX_DEPRECATED(msg) +#endif + #ifdef __cplusplus extern "C" { #endif @@ -45,7 +55,7 @@ typedef struct EvalState EvalState; // nix::EvalState * @see nix_value_incref, nix_value_decref */ typedef struct nix_value nix_value; -[[deprecated("use nix_value instead")]] typedef nix_value Value; +NIX_DEPRECATED("use nix_value instead") typedef nix_value Value; // Function prototypes /** From 4036c3aafb7a6c4c625e68cc14acf6b529be2cb2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 8 Aug 2024 15:02:48 +0200 Subject: [PATCH 034/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 0f5dfbe8769..5827d9bfd7b 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.24.1 +2.24.2 From 450252c92c3b5d0e7e71398fdc9f7630cf197326 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 8 Aug 2024 17:21:00 +0200 Subject: [PATCH 035/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 5827d9bfd7b..29690d10f08 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.24.2 +2.24.3 From 5b62a1dbd60f716b88c9da5a78ae1ea533cc82d9 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 16 Aug 2024 07:09:27 -0700 Subject: [PATCH 036/361] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/63d37ccd2d178d54e7fb691d7ec76000740ea24a?narHash=sha256-7cCC8%2BTdq1%2B3OPyc3%2BgVo9dzUNkNIQfwSDJ2HSi2u3o%3D' (2024-07-21) → 'github:NixOS/nixpkgs/c3d4ac725177c030b1e289015989da2ad9d56af0?narHash=sha256-sqLwJcHYeWLOeP/XoLwAtYjr01TISlkOfz%2BNG82pbdg%3D' (2024-08-15) (cherry picked from commit 8866d2cd838902d45782541efe08efc1e1f1a2ab) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 2ac413a6986..b5d0b881c5c 100644 --- a/flake.lock +++ b/flake.lock @@ -80,11 +80,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1721548954, - "narHash": "sha256-7cCC8+Tdq1+3OPyc3+gVo9dzUNkNIQfwSDJ2HSi2u3o=", + "lastModified": 1723688146, + "narHash": "sha256-sqLwJcHYeWLOeP/XoLwAtYjr01TISlkOfz+NG82pbdg=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "63d37ccd2d178d54e7fb691d7ec76000740ea24a", + "rev": "c3d4ac725177c030b1e289015989da2ad9d56af0", "type": "github" }, "original": { From d550139191cfddb313f431d7f2c68d7873a62991 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 16 Aug 2024 07:22:30 -0700 Subject: [PATCH 037/361] ci: check that all outputs for all systems can evaluate (cherry picked from commit aa3d35c1f4145c9532620a20d6727c2214eab054) --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4eb9cf10dc7..e9397621eac 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -49,6 +49,7 @@ jobs: done ) & - run: nix --experimental-features 'nix-command flakes' flake check -L + - run: nix --experimental-features 'nix-command flakes' flake show --all-systems --json # Steps to test CI automation in your own fork. # Cachix: From 4e707b8e577a9f41f91fc4b6ddb1ac5c3bb47b97 Mon Sep 17 00:00:00 2001 From: Andrew Marshall Date: Thu, 8 Aug 2024 14:29:40 -0400 Subject: [PATCH 038/361] libstore: fix port binding in __darwinAllowLocalNetworking sandbox In d60c3f7f7c83134b5b4470ed84b6d5ed38e28753, this was changed to close a hole in the sandbox. Unfortunately, this was too restrictive such that it made local port binding fail, thus making derivations that needed `__darwinAllowLocalNetworking` gain nearly nothing, and thus largely fail (as the primary use for it is to enable port binding). This unfortunately does mean that a sandboxed build process can, in coordination with an actor outside the sandbox, escape the sandbox by binding a port and connecting to it externally to send data. I do not see a way around this with my experimentation and understanding of the (quite undocumented) macOS sandbox profile API. Notably it seems not possible to use the sandbox to do any of: - Restrict the remote IP of inbound network requests - Restrict the address being bound to As such, the `(local ip "*:*")` here appears to be functionally no different than `(local ip "localhost:*")` (however it *should* be different than removing the filter entirely, as that would make it also apply to non-IP networking). Doing `(allow network-inbound (require-all (local ip "localhost:*") (remote ip "localhost:*")))` causes listening to fail. Note that `network-inbound` implies `network-bind`. (cherry picked from commit 00f6db36fd72c9e82e923ce89d0ddb7d2e738528) --- src/libstore/unix/build/sandbox-defaults.sb | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/unix/build/sandbox-defaults.sb b/src/libstore/unix/build/sandbox-defaults.sb index 6da01b7356b..15cd6daf5e0 100644 --- a/src/libstore/unix/build/sandbox-defaults.sb +++ b/src/libstore/unix/build/sandbox-defaults.sb @@ -49,6 +49,7 @@ R""( (if (param "_ALLOW_LOCAL_NETWORKING") (begin (allow network* (remote ip "localhost:*")) + (allow network-inbound (local ip "*:*")) ; required to bind and listen ; Allow access to /etc/resolv.conf (which is a symlink to ; /private/var/run/resolv.conf). From 90fb4e8890c393d860521cb13e892a5cd19ab395 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Aug 2024 12:46:17 +0200 Subject: [PATCH 039/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 29690d10f08..b71a29b1f95 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.24.3 +2.24.4 From 7befd60c01c1593dd2db86fd4c695c3e9f26416e Mon Sep 17 00:00:00 2001 From: Tom Bereknyei Date: Sun, 18 Aug 2024 22:35:54 -0400 Subject: [PATCH 040/361] fix: check to see if there are any lines before (cherry picked from commit 59db8fd62b5300afbbabb1e8a12d547b336a3bdf) --- src/nix-build/nix-build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 0ce987d8a5c..a5b9e1e548e 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -163,7 +163,7 @@ static void main_nix_build(int argc, char * * argv) script = argv[1]; try { auto lines = tokenizeString(readFile(script), "\n"); - if (std::regex_search(lines.front(), std::regex("^#!"))) { + if (!lines.empty() && std::regex_search(lines.front(), std::regex("^#!"))) { lines.pop_front(); inShebang = true; for (int i = 2; i < argc; ++i) From 30a57328d2b53ec8d140af0065f4875501d5c28d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 23 Aug 2024 13:15:30 +0200 Subject: [PATCH 041/361] Backport https://github.com/NixOS/nix/pull/11152 --- .../src/installation/prerequisites-source.md | 2 - src/libexpr/eval-gc.cc | 134 ------------------ src/libstore/store-api.cc | 16 ++- src/libutil/serialise.cc | 71 +--------- src/libutil/serialise.hh | 23 --- 5 files changed, 15 insertions(+), 231 deletions(-) diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md index 4aafa6d27ab..c346a0a4b27 100644 --- a/doc/manual/src/installation/prerequisites-source.md +++ b/doc/manual/src/installation/prerequisites-source.md @@ -39,8 +39,6 @@ `pkgconfig` and the Boehm garbage collector, and pass the flag `--enable-gc` to `configure`. - For `bdw-gc` <= 8.2.4 Nix needs a [small patch](https://github.com/NixOS/nix/blob/ac4d2e7b857acdfeac35ac8a592bdecee2d29838/boehmgc-traceable_allocator-public.diff) to be applied. - - The `boost` library of version 1.66.0 or higher. It can be obtained from the official web site . diff --git a/src/libexpr/eval-gc.cc b/src/libexpr/eval-gc.cc index 2f0e8c0c90b..07ce05a2c73 100644 --- a/src/libexpr/eval-gc.cc +++ b/src/libexpr/eval-gc.cc @@ -32,122 +32,6 @@ static void * oomHandler(size_t requested) throw std::bad_alloc(); } -class BoehmGCStackAllocator : public StackAllocator -{ - boost::coroutines2::protected_fixedsize_stack stack{ - // We allocate 8 MB, the default max stack size on NixOS. - // A smaller stack might be quicker to allocate but reduces the stack - // depth available for source filter expressions etc. - std::max(boost::context::stack_traits::default_size(), static_cast(8 * 1024 * 1024))}; - - // This is specific to boost::coroutines2::protected_fixedsize_stack. - // The stack protection page is included in sctx.size, so we have to - // subtract one page size from the stack size. - std::size_t pfss_usable_stack_size(boost::context::stack_context & sctx) - { - return sctx.size - boost::context::stack_traits::page_size(); - } - -public: - boost::context::stack_context allocate() override - { - auto sctx = stack.allocate(); - - // Stacks generally start at a high address and grow to lower addresses. - // Architectures that do the opposite are rare; in fact so rare that - // boost_routine does not implement it. - // So we subtract the stack size. - GC_add_roots(static_cast(sctx.sp) - pfss_usable_stack_size(sctx), sctx.sp); - return sctx; - } - - void deallocate(boost::context::stack_context sctx) override - { - GC_remove_roots(static_cast(sctx.sp) - pfss_usable_stack_size(sctx), sctx.sp); - stack.deallocate(sctx); - } -}; - -static BoehmGCStackAllocator boehmGCStackAllocator; - -/** - * When a thread goes into a coroutine, we lose its original sp until - * control flow returns to the thread. - * While in the coroutine, the sp points outside the thread stack, - * so we can detect this and push the entire thread stack instead, - * as an approximation. - * The coroutine's stack is covered by `BoehmGCStackAllocator`. - * This is not an optimal solution, because the garbage is scanned when a - * coroutine is active, for both the coroutine and the original thread stack. - * However, the implementation is quite lean, and usually we don't have active - * coroutines during evaluation, so this is acceptable. - */ -void fixupBoehmStackPointer(void ** sp_ptr, void * _pthread_id) -{ - void *& sp = *sp_ptr; - auto pthread_id = reinterpret_cast(_pthread_id); -# ifndef __APPLE__ - pthread_attr_t pattr; -# endif - size_t osStackSize; - // The low address of the stack, which grows down. - void * osStackLimit; - void * osStackBase; - -# ifdef __APPLE__ - osStackSize = pthread_get_stacksize_np(pthread_id); - osStackLimit = pthread_get_stackaddr_np(pthread_id); -# else - if (pthread_attr_init(&pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_init failed"); - } -# ifdef HAVE_PTHREAD_GETATTR_NP - if (pthread_getattr_np(pthread_id, &pattr)) { - throw Error("fixupBoehmStackPointer: pthread_getattr_np failed"); - } -# elif HAVE_PTHREAD_ATTR_GET_NP - if (!pthread_attr_init(&pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_init failed"); - } - if (!pthread_attr_get_np(pthread_id, &pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_get_np failed"); - } -# else -# error "Need one of `pthread_attr_get_np` or `pthread_getattr_np`" -# endif - if (pthread_attr_getstack(&pattr, &osStackLimit, &osStackSize)) { - throw Error("fixupBoehmStackPointer: pthread_attr_getstack failed"); - } - if (pthread_attr_destroy(&pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_destroy failed"); - } -# endif - osStackBase = (char *) osStackLimit + osStackSize; - // NOTE: We assume the stack grows down, as it does on all architectures we support. - // Architectures that grow the stack up are rare. - if (sp >= osStackBase || sp < osStackLimit) { // sp is outside the os stack - sp = osStackLimit; - } -} - -/* Disable GC while this object lives. Used by CoroutineContext. - * - * Boehm keeps a count of GC_disable() and GC_enable() calls, - * and only enables GC when the count matches. - */ -class BoehmDisableGC -{ -public: - BoehmDisableGC() - { - GC_disable(); - }; - ~BoehmDisableGC() - { - GC_enable(); - }; -}; - static inline void initGCReal() { /* Initialise the Boehm garbage collector. */ @@ -168,24 +52,6 @@ static inline void initGCReal() GC_set_oom_fn(oomHandler); - StackAllocator::defaultAllocator = &boehmGCStackAllocator; - -// TODO: Remove __APPLE__ condition. -// Comment suggests an implementation that works on darwin and windows -// https://github.com/ivmai/bdwgc/issues/362#issuecomment-1936672196 -# if GC_VERSION_MAJOR >= 8 && GC_VERSION_MINOR >= 2 && GC_VERSION_MICRO >= 4 && !defined(__APPLE__) - GC_set_sp_corrector(&fixupBoehmStackPointer); - - if (!GC_get_sp_corrector()) { - printTalkative("BoehmGC on this platform does not support sp_corrector; will disable GC inside coroutines"); - /* Used to disable GC when entering coroutines on macOS */ - create_coro_gc_hook = []() -> std::shared_ptr { return std::make_shared(); }; - } -# else -# warning \ - "BoehmGC version does not support GC while coroutine exists. GC will be disabled inside coroutines. Consider updating bdw-gc to 8.2.4 or later." -# endif - /* Set the initial heap size to something fairly big (25% of physical RAM, up to a maximum of 384 MiB) so that in most cases we don't need to garbage collect at all. (Collection has a diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index b3e5ad014cf..8eef340ccb4 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -210,14 +210,16 @@ StorePath Store::addToStore( fsm = FileSerialisationMethod::NixArchive; break; } - auto source = sinkToSource([&](Sink & sink) { - dumpPath(path, sink, fsm, filter); + std::optional storePath; + auto sink = sourceToSink([&](Source & source) { + LengthSource lengthSource(source); + storePath = addToStoreFromDump(lengthSource, name, fsm, method, hashAlgo, references, repair); + if (lengthSource.total >= settings.warnLargePathThreshold) + warn("copied large path '%s' to the store (%s)", path, renderSize(lengthSource.total)); }); - LengthSource lengthSource(*source); - auto storePath = addToStoreFromDump(lengthSource, name, fsm, method, hashAlgo, references, repair); - if (lengthSource.total >= settings.warnLargePathThreshold) - warn("copied large path '%s' to the store (%s)", path, renderSize(lengthSource.total)); - return storePath; + dumpPath(path, *sink, fsm, filter); + sink->finish(); + return storePath.value(); } void Store::addMultipleToStore( diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 4899134d7c3..5352a436b44 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -171,55 +171,6 @@ size_t StringSource::read(char * data, size_t len) #error Coroutines are broken in this version of Boost! #endif -/* A concrete datatype allow virtual dispatch of stack allocation methods. */ -struct VirtualStackAllocator { - StackAllocator *allocator = StackAllocator::defaultAllocator; - - boost::context::stack_context allocate() { - return allocator->allocate(); - } - - void deallocate(boost::context::stack_context sctx) { - allocator->deallocate(sctx); - } -}; - - -/* This class reifies the default boost coroutine stack allocation strategy with - a virtual interface. */ -class DefaultStackAllocator : public StackAllocator { - boost::coroutines2::default_stack stack; - - boost::context::stack_context allocate() override { - return stack.allocate(); - } - - void deallocate(boost::context::stack_context sctx) override { - stack.deallocate(sctx); - } -}; - -static DefaultStackAllocator defaultAllocatorSingleton; - -StackAllocator *StackAllocator::defaultAllocator = &defaultAllocatorSingleton; - - -std::shared_ptr (*create_coro_gc_hook)() = []() -> std::shared_ptr { - return {}; -}; - -/* This class is used for entry and exit hooks on coroutines */ -class CoroutineContext { - /* Disable GC when entering the coroutine without the boehm patch, - * since it doesn't find the main thread stack in this case. - * std::shared_ptr performs type-erasure, so it will call the right - * deleter. */ - const std::shared_ptr coro_gc_hook = create_coro_gc_hook(); -public: - CoroutineContext() {}; - ~CoroutineContext() {}; -}; - std::unique_ptr sourceToSink(std::function fun) { struct SourceToSink : FinishSink @@ -241,14 +192,12 @@ std::unique_ptr sourceToSink(std::function fun) cur = in; if (!coro) { - CoroutineContext ctx; - coro = coro_t::push_type(VirtualStackAllocator{}, [&](coro_t::pull_type & yield) { - LambdaSource source([&](char *out, size_t out_len) { + coro = coro_t::push_type([&](coro_t::pull_type & yield) { + LambdaSource source([&](char * out, size_t out_len) { if (cur.empty()) { yield(); - if (yield.get()) { - return (size_t)0; - } + if (yield.get()) + throw EndOfFile("coroutine has finished"); } size_t n = std::min(cur.size(), out_len); @@ -263,20 +212,14 @@ std::unique_ptr sourceToSink(std::function fun) if (!*coro) { unreachable(); } if (!cur.empty()) { - CoroutineContext ctx; (*coro)(false); } } void finish() override { - if (!coro) return; - if (!*coro) unreachable(); - { - CoroutineContext ctx; + if (coro && *coro) (*coro)(true); - } - if (*coro) unreachable(); } }; @@ -307,8 +250,7 @@ std::unique_ptr sinkToSource( size_t read(char * data, size_t len) override { if (!coro) { - CoroutineContext ctx; - coro = coro_t::pull_type(VirtualStackAllocator{}, [&](coro_t::push_type & yield) { + coro = coro_t::pull_type([&](coro_t::push_type & yield) { LambdaSink sink([&](std::string_view data) { if (!data.empty()) yield(std::string(data)); }); @@ -320,7 +262,6 @@ std::unique_ptr sinkToSource( if (pos == cur.size()) { if (!cur.empty()) { - CoroutineContext ctx; (*coro)(); } cur = coro->get(); diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index c7290dcef9d..e9f3e3a4a2f 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -557,27 +557,4 @@ struct FramedSink : nix::BufferedSink }; }; -/** - * Stack allocation strategy for sinkToSource. - * Mutable to avoid a boehm gc dependency in libutil. - * - * boost::context doesn't provide a virtual class, so we define our own. - */ -struct StackAllocator { - virtual boost::context::stack_context allocate() = 0; - virtual void deallocate(boost::context::stack_context sctx) = 0; - - /** - * The stack allocator to use in sinkToSource and potentially elsewhere. - * It is reassigned by the initGC() method in libexpr. - */ - static StackAllocator *defaultAllocator; -}; - -/* Disabling GC when entering a coroutine (without the boehm patch). - mutable to avoid boehm gc dependency in libutil. - */ -extern std::shared_ptr (*create_coro_gc_hook)(); - - } From 9d8669b14a402a8fd440fdce0ab3d874319a6984 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 23 Aug 2024 16:15:11 +0200 Subject: [PATCH 042/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index b71a29b1f95..23a93836aed 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.24.4 +2.24.5 From 0c25bea7cca21cc8e56ce9ed5b5391289fd30e04 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 2 Sep 2024 17:28:11 +0200 Subject: [PATCH 043/361] Respect max-substitution-jobs again This broke in #11005. Any number of PathSubstitutionGoals would be woken up by a single build slot becoming available. If there are a lot of substitution goals active, this could lead to us running out of file descriptors (especially on macOS where the default limit is 256). (cherry picked from commit a33cb8af5693af56dd69073dc5dddb4c6900ad7a) --- src/libstore/build/substitution-goal.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 0152f180828..a26eea8201f 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -183,7 +183,7 @@ Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref sub, /* Make sure that we are allowed to start a substitution. Note that even if maxSubstitutionJobs == 0, we still allow a substituter to run. This prevents infinite waiting. */ - if (worker.getNrSubstitutions() >= std::max(1U, (unsigned int) settings.maxSubstitutionJobs)) { + while (worker.getNrSubstitutions() >= std::max(1U, (unsigned int) settings.maxSubstitutionJobs)) { worker.waitForBuildSlot(shared_from_this()); co_await Suspend{}; } From c21f664e82aef1d44d71e1c5cc4e0021b4f8a1b8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 2 Sep 2024 17:28:55 +0200 Subject: [PATCH 044/361] "unsigned" -> size_t Slight cleanup. (cherry picked from commit b7acd1c4145c7316085f2a12bfa26ef742ac6146) --- src/libstore/build/worker.cc | 4 ++-- src/libstore/build/worker.hh | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index ab0ba67b521..dbe86f43f6a 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -184,13 +184,13 @@ void Worker::wakeUp(GoalPtr goal) } -unsigned Worker::getNrLocalBuilds() +size_t Worker::getNrLocalBuilds() { return nrLocalBuilds; } -unsigned Worker::getNrSubstitutions() +size_t Worker::getNrSubstitutions() { return nrSubstitutions; } diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh index 33a7bf01517..e083dbea6d1 100644 --- a/src/libstore/build/worker.hh +++ b/src/libstore/build/worker.hh @@ -92,12 +92,12 @@ private: * Number of build slots occupied. This includes local builds but does not * include substitutions or remote builds via the build hook. */ - unsigned int nrLocalBuilds; + size_t nrLocalBuilds; /** * Number of substitution slots occupied. */ - unsigned int nrSubstitutions; + size_t nrSubstitutions; /** * Maps used to prevent multiple instantiations of a goal for the @@ -235,12 +235,12 @@ public: * Return the number of local build processes currently running (but not * remote builds via the build hook). */ - unsigned int getNrLocalBuilds(); + size_t getNrLocalBuilds(); /** * Return the number of substitution processes currently running. */ - unsigned int getNrSubstitutions(); + size_t getNrSubstitutions(); /** * Registers a running child process. `inBuildSlot` means that From ea7abb58b59562952262a0ef43e30f9f85639cd4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 3 Sep 2024 16:51:36 +0200 Subject: [PATCH 045/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 23a93836aed..c5f92d6f8fd 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.24.5 +2.24.6 From 0679505d8ce991416650504e409d8c2055a8f6bd Mon Sep 17 00:00:00 2001 From: "Travis A. Everett" Date: Tue, 2 Jul 2024 21:02:45 -0500 Subject: [PATCH 046/361] install-darwin: fix _nixbld uids for macOS sequoia Starting in macOS 15 Sequoia, macOS daemon UIDs are encroaching on our default UIDs of 301-332. This commit relocates our range up to avoid clashing with the current UIDs of 301-304 and buy us a little time while still leaving headroom for people installing more than 32 users. (cherry picked from commit df36ff0d1e60f59eb3e0442fa335252421ec8057) --- scripts/bigsur-nixbld-user-migration.sh | 2 +- scripts/install-darwin-multi-user.sh | 12 +++++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/scripts/bigsur-nixbld-user-migration.sh b/scripts/bigsur-nixbld-user-migration.sh index 0eb312e07cd..bc42e02e6b2 100755 --- a/scripts/bigsur-nixbld-user-migration.sh +++ b/scripts/bigsur-nixbld-user-migration.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -((NEW_NIX_FIRST_BUILD_UID=301)) +((NEW_NIX_FIRST_BUILD_UID=350)) id_available(){ dscl . list /Users UniqueID | grep -E '\b'"$1"'\b' >/dev/null diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index 24c9052f91c..bd1a54ad873 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -4,7 +4,17 @@ set -eu set -o pipefail # System specific settings -export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-301}" +# Notes: +# - up to macOS Big Sur we used the same GID/UIDs as Linux (30000:30001-32) +# - we changed UID to 301 because Big Sur updates failed into recovery mode +# we're targeting the 200-400 UID range for role users mentioned in the +# usage note for sysadminctl +# - we changed UID to 350 because Sequoia now uses UIDs 300-304 for its own +# daemon users +# - we changed GID to 350 alongside above just because it hides the nixbld +# group from the Users & Groups settings panel :) +export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-350}" +export NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-350}" export NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d" readonly NIX_DAEMON_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist From c5a0e624d94505d6544ed2175ecaa08d78cf4b6e Mon Sep 17 00:00:00 2001 From: "Travis A. Everett" Date: Tue, 2 Jul 2024 21:22:35 -0500 Subject: [PATCH 047/361] install-darwin: move nixbld gid to match first UID (cherry picked from commit 75567423fb6163559575c38867cda09b754364d7) --- scripts/install-multi-user.sh | 6 ++---- scripts/install-systemd-multi-user.sh | 1 + 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index 6aee073e3f9..a487d459f40 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -23,10 +23,10 @@ readonly RED='\033[31m' # installer allows overriding build user count to speed up installation # as creating each user takes non-trivial amount of time on macos readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32} -readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}" readonly NIX_BUILD_GROUP_NAME="nixbld" # each system specific installer must set these: # NIX_FIRST_BUILD_UID +# NIX_BUILD_GROUP_ID # NIX_BUILD_USER_NAME_TEMPLATE # Please don't change this. We don't support it, because the # default shell profile that comes with Nix doesn't support it. @@ -530,9 +530,7 @@ It seems the build group $NIX_BUILD_GROUP_NAME already exists, but with the UID $primary_group_id. This script can't really handle that right now, so I'm going to give up. -You can fix this by editing this script and changing the -NIX_BUILD_GROUP_ID variable near the top to from $NIX_BUILD_GROUP_ID -to $primary_group_id and re-run. +You can export NIX_BUILD_GROUP_ID=$primary_group_id and re-run. EOF else row " Exists" "Yes" diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh index a62ed7e3aa4..a79a699906a 100755 --- a/scripts/install-systemd-multi-user.sh +++ b/scripts/install-systemd-multi-user.sh @@ -5,6 +5,7 @@ set -o pipefail # System specific settings export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}" +export NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}" export NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" readonly SERVICE_SRC=/lib/systemd/system/nix-daemon.service From 8d0414d682b18323bab362d31e8f1c43125a63d4 Mon Sep 17 00:00:00 2001 From: Emily Date: Mon, 26 Aug 2024 17:59:58 +0100 Subject: [PATCH 048/361] install-darwin: increment base UID by 1 (#15) (cherry picked from commit 11cf29b15c8ea144035eb6a9d9f31bb05eee2048) --- scripts/bigsur-nixbld-user-migration.sh | 2 +- scripts/install-darwin-multi-user.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/bigsur-nixbld-user-migration.sh b/scripts/bigsur-nixbld-user-migration.sh index bc42e02e6b2..57f65da7212 100755 --- a/scripts/bigsur-nixbld-user-migration.sh +++ b/scripts/bigsur-nixbld-user-migration.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -((NEW_NIX_FIRST_BUILD_UID=350)) +((NEW_NIX_FIRST_BUILD_UID=351)) id_available(){ dscl . list /Users UniqueID | grep -E '\b'"$1"'\b' >/dev/null diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index bd1a54ad873..89c66b8f41c 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -9,11 +9,11 @@ set -o pipefail # - we changed UID to 301 because Big Sur updates failed into recovery mode # we're targeting the 200-400 UID range for role users mentioned in the # usage note for sysadminctl -# - we changed UID to 350 because Sequoia now uses UIDs 300-304 for its own +# - we changed UID to 351 because Sequoia now uses UIDs 300-304 for its own # daemon users # - we changed GID to 350 alongside above just because it hides the nixbld # group from the Users & Groups settings panel :) -export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-350}" +export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-351}" export NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-350}" export NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d" From 437f7a0042a7eb27e379c65557acd492e62c6496 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Aug 2024 15:47:02 +0200 Subject: [PATCH 049/361] fetchers::downloadTarball(): Return a cacheable accessor downloadTarball() is used by `-I foo=` etc. fetchToStore() needs the accessor to have a fingerprint to enable caching. Fixes #11271. (cherry picked from commit 9f6ee93f488c8935b560588ad7ba321d9618f588) --- src/libcmd/common-eval-args.cc | 4 +++- src/libexpr/eval.cc | 4 +++- src/libexpr/primops/fetchTree.cc | 6 +++++- src/libfetchers/tarball.cc | 20 ++++++++++++++++++-- src/libfetchers/tarball.hh | 9 ++++++--- 5 files changed, 35 insertions(+), 8 deletions(-) diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index fcef92487cb..ae9994a05f6 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -171,7 +171,9 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * bas { if (EvalSettings::isPseudoUrl(s)) { auto accessor = fetchers::downloadTarball( - EvalSettings::resolvePseudoUrl(s)).accessor; + state.store, + state.fetchSettings, + EvalSettings::resolvePseudoUrl(s)); auto storePath = fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy); return state.rootPath(CanonPath(state.store->toRealPath(storePath))); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index de5d85821ef..0bb1a5ea6cc 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -3083,7 +3083,9 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pa if (EvalSettings::isPseudoUrl(value)) { try { auto accessor = fetchers::downloadTarball( - EvalSettings::resolvePseudoUrl(value)).accessor; + store, + fetchSettings, + EvalSettings::resolvePseudoUrl(value)); auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy); return finish(store->toRealPath(storePath)); } catch (Error & e) { diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index f79b6b7b83a..0e49cbc71ad 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -501,7 +501,11 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v // https://github.com/NixOS/nix/issues/4313 auto storePath = unpack - ? fetchToStore(*state.store, fetchers::downloadTarball(*url).accessor, FetchMode::Copy, name) + ? fetchToStore( + *state.store, + fetchers::downloadTarball(state.store, state.fetchSettings, *url), + FetchMode::Copy, + name) : fetchers::downloadFile(state.store, *url, name).storePath; if (expectedHash) { diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 457210542dc..dd4f3b78086 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -102,7 +102,7 @@ DownloadFileResult downloadFile( }; } -DownloadTarballResult downloadTarball( +static DownloadTarballResult downloadTarball_( const std::string & url, const Headers & headers) { @@ -202,6 +202,22 @@ DownloadTarballResult downloadTarball( return attrsToResult(infoAttrs); } +ref downloadTarball( + ref store, + const Settings & settings, + const std::string & url) +{ + /* Go through Input::getAccessor() to ensure that the resulting + accessor has a fingerprint. */ + fetchers::Attrs attrs; + attrs.insert_or_assign("type", "tarball"); + attrs.insert_or_assign("url", url); + + auto input = Input::fromAttrs(settings, std::move(attrs)); + + return input.getAccessor(store).first; +} + // An input scheme corresponding to a curl-downloadable resource. struct CurlInputScheme : InputScheme { @@ -353,7 +369,7 @@ struct TarballInputScheme : CurlInputScheme { auto input(_input); - auto result = downloadTarball(getStrAttr(input.attrs, "url"), {}); + auto result = downloadTarball_(getStrAttr(input.attrs, "url"), {}); result.accessor->setPathDisplay("«" + input.to_string() + "»"); diff --git a/src/libfetchers/tarball.hh b/src/libfetchers/tarball.hh index d9bdd123d58..2042041d5ad 100644 --- a/src/libfetchers/tarball.hh +++ b/src/libfetchers/tarball.hh @@ -14,6 +14,8 @@ struct SourceAccessor; namespace nix::fetchers { +struct Settings; + struct DownloadFileResult { StorePath storePath; @@ -40,8 +42,9 @@ struct DownloadTarballResult * Download and import a tarball into the Git cache. The result is the * Git tree hash of the root directory. */ -DownloadTarballResult downloadTarball( - const std::string & url, - const Headers & headers = {}); +ref downloadTarball( + ref store, + const Settings & settings, + const std::string & url); } From f0cffa7300cec037fd5bf8adb40a2657f3af3bda Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Sat, 17 Aug 2024 08:31:41 +0200 Subject: [PATCH 050/361] replace backport github action with mergify The current backport action cannot automerge because the github action bot does not trigger github CI actions. Mergify instead does not have this limitation and can also use a merge queue. On top we have now a declarative configuration to allow contributers to add new tests to required without having access to the github org. An example pull request and backport can be seen here: https://github.com/Mic92/nix-1/pull/4 and here: https://github.com/Mic92/nix-1/pull/5 To complete the setup the mergify app must be enabled for this repository. It's already installed in the nixos organization for nixos-hardware and other repositories. (cherry picked from commit 80f20fa4cb75ad48d74047ca060869bb9138f776) --- .github/workflows/backport.yml | 32 ------------ .mergify.yml | 92 ++++++++++++++++++++++++++++++++++ 2 files changed, 92 insertions(+), 32 deletions(-) delete mode 100644 .github/workflows/backport.yml create mode 100644 .mergify.yml diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml deleted file mode 100644 index dd110de6c2a..00000000000 --- a/.github/workflows/backport.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Backport -on: - pull_request_target: - types: [closed, labeled] -permissions: - contents: read -jobs: - backport: - name: Backport Pull Request - permissions: - # for zeebe-io/backport-action - contents: write - pull-requests: write - if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name)) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - # required to find all branches - fetch-depth: 0 - - name: Create backport PRs - # should be kept in sync with `version` - uses: zeebe-io/backport-action@v3.0.2 - with: - # Config README: https://github.com/zeebe-io/backport-action#backport-action - github_token: ${{ secrets.GITHUB_TOKEN }} - github_workspace: ${{ github.workspace }} - pull_description: |- - Automatic backport to `${target_branch}`, triggered by a label in #${pull_number}. - # should be kept in sync with `uses` - version: v0.0.5 diff --git a/.mergify.yml b/.mergify.yml new file mode 100644 index 00000000000..663c45d92f4 --- /dev/null +++ b/.mergify.yml @@ -0,0 +1,92 @@ +queue_rules: + - name: default + # all required tests need to go here + merge_conditions: + - check-success=installer + - check-success=installer_test (macos-latest) + - check-success=installer_test (ubuntu-latest) + - check-success=tests (macos-latest) + - check-success=tests (ubuntu-latest) + - check-success=vm_tests + merge_method: rebase + batch_size: 5 + +pull_request_rules: + - name: merge using the merge queue + conditions: + - base=master + - label~=merge-queue|dependencies + actions: + queue: {} + +# The rules below will first create backport pull requests and put those in a merge queue. + + - name: backport patches to 2.18 + conditions: + - label=backport 2.18-maintenance + actions: + backport: + branches: + - 2.18-maintenance + labels: + - merge-queue + + - name: backport patches to 2.19 + conditions: + - label=backport 2.19-maintenance + actions: + backport: + branches: + - 2.19-maintenance + labels: + - merge-queue + + - name: backport patches to 2.20 + conditions: + - label=backport 2.20-maintenance + actions: + backport: + branches: + - 2.20-maintenance + labels: + - merge-queue + + - name: backport patches to 2.21 + conditions: + - label=backport 2.21-maintenance + actions: + backport: + branches: + - 2.21-maintenance + labels: + - merge-queue + + - name: backport patches to 2.22 + conditions: + - label=backport 2.22-maintenance + actions: + backport: + branches: + - 2.22-maintenance + labels: + - merge-queue + + - name: backport patches to 2.23 + conditions: + - label=backport 2.23-maintenance + actions: + backport: + branches: + - 2.23-maintenance + labels: + - merge-queue + + - name: backport patches to 2.24 + conditions: + - label=backport 2.24-maintenance + actions: + backport: + branches: + - "2.24-maintenance" + labels: + - merge-queue From 12fa019ae558641df0a23a7973d64e687b2d8ba8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 4 Sep 2024 21:43:59 +0200 Subject: [PATCH 051/361] NAR parser: Fix check for duplicate / incorrectly sorted entries "prevName" was always empty because it was declared in the wrong scope. (cherry picked from commit 495d32e1b8e5d5143f048d1be755a96bea822b19) --- src/libutil/archive.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index e2ebcda0c57..35376039836 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -214,11 +214,13 @@ static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath else if (t == "directory") { sink.createDirectory(path); + std::string prevName; + while (1) { s = getString(); if (s == "entry") { - std::string name, prevName; + std::string name; s = getString(); if (s != "(") throw badArchive("expected open tag"); From 6187ee468f1ffd5ff4f931b9e027e718d12f9f20 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 5 Sep 2024 16:41:15 +0200 Subject: [PATCH 052/361] Add test case for NARs with duplicate directory entries This test was made by @puckipedia. (cherry picked from commit 83d5b32803e5b828967a27b1ea93c5728d3a4d0a) --- tests/functional/duplicate.nar | Bin 0 -> 1400 bytes tests/functional/local.mk | 2 +- tests/functional/{case-hack.sh => nars.sh} | 9 +++++---- 3 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 tests/functional/duplicate.nar rename tests/functional/{case-hack.sh => nars.sh} (79%) diff --git a/tests/functional/duplicate.nar b/tests/functional/duplicate.nar new file mode 100644 index 0000000000000000000000000000000000000000..1d0993ed4cab41a6d45907ac0c17026afd5471a2 GIT binary patch literal 1400 zcmdT@+it=z49zZ#4T*h25D#ojRW~kz9 z$BsP}-LYn0DAbktf#N+v9qTBW&+onV;7jX2S0C@V9t<{lr}pt&I-XgF4v29E z3g3EyMu?&G+_E0O>ztu< "$TEST_ROOT/case.nar" cmp case.nar "$TEST_ROOT/case.nar" From f160d3ac68f67497f7f4948fa7a236790c7fee12 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 5 Sep 2024 16:48:43 +0200 Subject: [PATCH 053/361] Test that nix-store --restore fails if the output already exists This restores the behaviour from before the std::filesystem refactorings. (cherry picked from commit da1ad28912334bb57f923afb4745273fd68f695c) --- src/libutil/fs-sink.cc | 3 ++- tests/functional/nars.sh | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index f15324d0a9f..696cd17eaf7 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -71,7 +71,8 @@ static GlobalConfig::Register r1(&restoreSinkSettings); void RestoreSink::createDirectory(const CanonPath & path) { - std::filesystem::create_directory(dstPath / path.rel()); + if (!std::filesystem::create_directory(dstPath / path.rel())) + throw Error("path '%s' already exists", (dstPath / path.rel()).string()); }; struct RestoreRegularFile : CreateRegularFileSink { diff --git a/tests/functional/nars.sh b/tests/functional/nars.sh index c58d12cd59d..106bd10fcf1 100755 --- a/tests/functional/nars.sh +++ b/tests/functional/nars.sh @@ -10,6 +10,9 @@ clearStore rm -rf "$TEST_ROOT/out" expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "NAR directory is not sorted" +# Check that nix-store --restore fails if the output already exists. +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "path '.*/out/' already exists" + # Check whether restoring and dumping a NAR that contains case # collisions is round-tripping, even on a case-insensitive system. rm -rf "$TEST_ROOT/case" From 0cfc9bf1334a340b2123221e9fead71ab2b3307e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 5 Sep 2024 16:54:12 +0200 Subject: [PATCH 054/361] More tests (cherry picked from commit 77c090cdbd56220895a2447efae79f68ed7861c5) --- tests/functional/nars.sh | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/functional/nars.sh b/tests/functional/nars.sh index 106bd10fcf1..b2b6b2b1ae5 100755 --- a/tests/functional/nars.sh +++ b/tests/functional/nars.sh @@ -13,6 +13,17 @@ expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet # Check that nix-store --restore fails if the output already exists. expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "path '.*/out/' already exists" +rm -rf "$TEST_ROOT/out" +echo foo > "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "cannot create directory.*File exists" + +rm -rf "$TEST_ROOT/out" +ln -s "$TEST_ROOT/out2" "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "cannot create directory.*File exists" + +mkdir -p "$TEST_ROOT/out2" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "path '.*/out/' already exists" + # Check whether restoring and dumping a NAR that contains case # collisions is round-tripping, even on a case-insensitive system. rm -rf "$TEST_ROOT/case" From 12889704966afa417a1c9044755665646f9c2872 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 5 Sep 2024 19:26:10 +0200 Subject: [PATCH 055/361] Detect NAR directory entries that collide with another path after case-hacking The test was made by @puckipedia. (cherry picked from commit 35575873813f60fff26f27a65e09038986f17cb5) --- src/libutil/archive.cc | 3 +++ tests/functional/case-collision.nar | Bin 0 -> 1928 bytes tests/functional/nars.sh | 6 ++++++ 3 files changed, 9 insertions(+) create mode 100644 tests/functional/case-collision.nar diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 35376039836..849bfe02246 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -243,6 +243,9 @@ static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath debug("case collision between '%1%' and '%2%'", i->first, name); name += caseHackSuffix; name += std::to_string(++i->second); + auto j = names.find(name); + if (j != names.end()) + throw Error("NAR contains file name '%s' that collides with case-hacked file name '%s'", prevName, j->first); } else names[name] = 0; } diff --git a/tests/functional/case-collision.nar b/tests/functional/case-collision.nar new file mode 100644 index 0000000000000000000000000000000000000000..2eff86901c617be2a830d23074923cb5b3b69aa3 GIT binary patch literal 1928 zcmd^9%}&EG3@&2)Y!WvfAc(_YXsQr5o`XF=mU?TnHklH4TQ7Zf(qMC#G>KJ{av&Gy za}?+EXU7lO&ocTjmrj*>2lMyfx+4Dz*%4W6x6p6LgbVFJp>-|c8?s<9`cB0$vW{^$ z?iYCMuQE2ai07y7GmkrZ&%wH>q|5FJD{C-t@C1MJc_jzOWqdC0M~c()?t*xok{-HJ zs!i9+H#iU9)|ED!?3UuAbZZF8FyEZ~jG6y2J~toM9S7FoQvGmE`2|Vij(PpHA1=*f z7ka8+sd=Qc8V} DaOkrB literal 0 HcmV?d00001 diff --git a/tests/functional/nars.sh b/tests/functional/nars.sh index b2b6b2b1ae5..f2339af88ea 100755 --- a/tests/functional/nars.sh +++ b/tests/functional/nars.sh @@ -37,3 +37,9 @@ cmp case.nar "$TEST_ROOT/case.nar" # removal of the suffix). touch "$TEST_ROOT/case/xt_CONNMARK.h~nix~case~hack~3" (! nix-store "${opts[@]}" --dump "$TEST_ROOT/case" > /dev/null) + +# Detect NARs that have a directory entry that after case-hacking +# collides with another entry (e.g. a directory containing 'Test', +# 'Test~nix~case~hack~1' and 'test'). +rm -rf "$TEST_ROOT/case" +expectStderr 1 nix-store "${opts[@]}" --restore "$TEST_ROOT/case" < case-collision.nar | grepQuiet "NAR contains file name 'test' that collides with case-hacked file name 'Test~nix~case~hack~1'" From a041688133e69016b94110c76719813e11135365 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 5 Sep 2024 20:37:26 +0200 Subject: [PATCH 056/361] Test that deserializing NARs with names with equal Unicode normal forms fails on macOS The test is based on the one by @puckipedia but with the file names swapped to make them sorted. (cherry picked from commit 7a765a6aafa27267659eb7339cf7039990f30caa) --- tests/functional/nars.sh | 11 +++++++++++ tests/functional/unnormalized.nar | Bin 0 -> 1728 bytes 2 files changed, 11 insertions(+) create mode 100644 tests/functional/unnormalized.nar diff --git a/tests/functional/nars.sh b/tests/functional/nars.sh index f2339af88ea..b16650e7e0d 100755 --- a/tests/functional/nars.sh +++ b/tests/functional/nars.sh @@ -43,3 +43,14 @@ touch "$TEST_ROOT/case/xt_CONNMARK.h~nix~case~hack~3" # 'Test~nix~case~hack~1' and 'test'). rm -rf "$TEST_ROOT/case" expectStderr 1 nix-store "${opts[@]}" --restore "$TEST_ROOT/case" < case-collision.nar | grepQuiet "NAR contains file name 'test' that collides with case-hacked file name 'Test~nix~case~hack~1'" + +# Deserializing a NAR that contains file names that Unicode-normalize +# to the same name should fail on macOS but succeed on Linux. +rm -rf "$TEST_ROOT/out" +if [[ $(uname) = Darwin ]]; then + expectStderr 1 nix-store --restore "$TEST_ROOT/out" < unnormalized.nar | grepQuiet "cannot create directory.*File exists" +else + nix-store --restore "$TEST_ROOT/out" < unnormalized.nar + [[ -e $TEST_ROOT/out/â ]] + [[ -e $TEST_ROOT/out/â ]] +fi diff --git a/tests/functional/unnormalized.nar b/tests/functional/unnormalized.nar new file mode 100644 index 0000000000000000000000000000000000000000..4b7edb17e0b4a9b75cf2958e9f12cceca22d267c GIT binary patch literal 1728 zcmd^9&2GXl4DNo}ka&koJMc51YTAwW-~mEvXhfQz#07fgQFxVI_fQML(N5J=2`NbQ zV*7LLe6bx5vh%0qe#)&Vu$+Qc|z8XR?vo72w}Ja>8T2af{uR|2^gTKAx4X{4ZTc z-^V~CHIFT~SHUB7Jzi)&Hr6bq0+*^U@tqW~ Date: Thu, 5 Sep 2024 20:55:24 +0200 Subject: [PATCH 057/361] Fix test on macOS (cherry picked from commit 21dcbd7e83929fbf8b6c666d743afa0a9ea73d83) --- tests/functional/nars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/nars.sh b/tests/functional/nars.sh index b16650e7e0d..bd2c49fce5c 100755 --- a/tests/functional/nars.sh +++ b/tests/functional/nars.sh @@ -48,7 +48,7 @@ expectStderr 1 nix-store "${opts[@]}" --restore "$TEST_ROOT/case" < case-collisi # to the same name should fail on macOS but succeed on Linux. rm -rf "$TEST_ROOT/out" if [[ $(uname) = Darwin ]]; then - expectStderr 1 nix-store --restore "$TEST_ROOT/out" < unnormalized.nar | grepQuiet "cannot create directory.*File exists" + expectStderr 1 nix-store --restore "$TEST_ROOT/out" < unnormalized.nar | grepQuiet "path '.*/out/â' already exists" else nix-store --restore "$TEST_ROOT/out" < unnormalized.nar [[ -e $TEST_ROOT/out/â ]] From 25510ba66f31dce539796d0101cfee8c52e2752d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 5 Sep 2024 22:21:53 +0200 Subject: [PATCH 058/361] RestoreSink::createDirectory(): Use append() On macOS, `mkdir("x/')` behaves differently than `mkdir("x")` if `x` is a dangling symlink (the formed succeed while the latter fails). So make sure we always strip the trailing slash. (cherry picked from commit 9fcb588dd8a7b3f0d7d103cea449abcf9f736ad6) --- src/libutil/fs-sink.cc | 20 ++++++++++---------- tests/functional/nars.sh | 8 ++++---- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index 696cd17eaf7..a08cb0a4cf7 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -68,11 +68,19 @@ static RestoreSinkSettings restoreSinkSettings; static GlobalConfig::Register r1(&restoreSinkSettings); +static std::filesystem::path append(const std::filesystem::path & src, const CanonPath & path) +{ + auto dst = src; + if (!path.rel().empty()) + dst /= path.rel(); + return dst; +} void RestoreSink::createDirectory(const CanonPath & path) { - if (!std::filesystem::create_directory(dstPath / path.rel())) - throw Error("path '%s' already exists", (dstPath / path.rel()).string()); + auto p = append(dstPath, path); + if (!std::filesystem::create_directory(p)) + throw Error("path '%s' already exists", p.string()); }; struct RestoreRegularFile : CreateRegularFileSink { @@ -83,14 +91,6 @@ struct RestoreRegularFile : CreateRegularFileSink { void preallocateContents(uint64_t size) override; }; -static std::filesystem::path append(const std::filesystem::path & src, const CanonPath & path) -{ - auto dst = src; - if (!path.rel().empty()) - dst /= path.rel(); - return dst; -} - void RestoreSink::createRegularFile(const CanonPath & path, std::function func) { auto p = append(dstPath, path); diff --git a/tests/functional/nars.sh b/tests/functional/nars.sh index bd2c49fce5c..4f2470ea719 100755 --- a/tests/functional/nars.sh +++ b/tests/functional/nars.sh @@ -11,18 +11,18 @@ rm -rf "$TEST_ROOT/out" expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "NAR directory is not sorted" # Check that nix-store --restore fails if the output already exists. -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "path '.*/out/' already exists" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "path '.*/out' already exists" rm -rf "$TEST_ROOT/out" echo foo > "$TEST_ROOT/out" -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "cannot create directory.*File exists" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "File exists" rm -rf "$TEST_ROOT/out" ln -s "$TEST_ROOT/out2" "$TEST_ROOT/out" -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "cannot create directory.*File exists" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "File exists" mkdir -p "$TEST_ROOT/out2" -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "path '.*/out/' already exists" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "path '.*/out' already exists" # Check whether restoring and dumping a NAR that contains case # collisions is round-tripping, even on a case-insensitive system. From e25410c7886a91167ca0ca2f496bf6bf17ee6510 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 6 Sep 2024 16:28:09 +0200 Subject: [PATCH 059/361] Test that deserializing regular files / symlinks is exclusive (cherry picked from commit 52ba3cc5eac0418218a90c0cddb06688d4c7b5d3) --- tests/functional/nars.sh | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/tests/functional/nars.sh b/tests/functional/nars.sh index 4f2470ea719..ed19637a1bc 100755 --- a/tests/functional/nars.sh +++ b/tests/functional/nars.sh @@ -24,6 +24,44 @@ expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet mkdir -p "$TEST_ROOT/out2" expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "path '.*/out' already exists" +# The same, but for a regular file. +nix-store --dump ./nars.sh > "$TEST_ROOT/tmp.nar" + +rm -rf "$TEST_ROOT/out" +nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +rm -rf "$TEST_ROOT/out" +mkdir -p "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +rm -rf "$TEST_ROOT/out" +ln -s "$TEST_ROOT/out2" "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +mkdir -p "$TEST_ROOT/out2" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +# The same, but for a symlink +ln -sfn foo "$TEST_ROOT/symlink" +nix-store --dump "$TEST_ROOT/symlink" > "$TEST_ROOT/tmp.nar" + +rm -rf "$TEST_ROOT/out" +nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" +[[ -L "$TEST_ROOT/out" ]] +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +rm -rf "$TEST_ROOT/out" +mkdir -p "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +rm -rf "$TEST_ROOT/out" +ln -s "$TEST_ROOT/out2" "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +mkdir -p "$TEST_ROOT/out2" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + # Check whether restoring and dumping a NAR that contains case # collisions is round-tripping, even on a case-insensitive system. rm -rf "$TEST_ROOT/case" From 2e1cb495c1bf36d59c234d923a139c01a3866ee1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2024 14:11:35 +0200 Subject: [PATCH 060/361] Typo (cherry picked from commit 4cfa59fdb32aa4fcc58b735d8843ce308692a652) --- tests/functional/nars.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/nars.sh b/tests/functional/nars.sh index ed19637a1bc..9f5f43dc635 100755 --- a/tests/functional/nars.sh +++ b/tests/functional/nars.sh @@ -42,7 +42,7 @@ expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | gre mkdir -p "$TEST_ROOT/out2" expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" -# The same, but for a symlink +# The same, but for a symlink. ln -sfn foo "$TEST_ROOT/symlink" nix-store --dump "$TEST_ROOT/symlink" > "$TEST_ROOT/tmp.nar" From a6ad5565ef15a18ea2f60de4d57f75cd0175b167 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2024 14:29:05 +0200 Subject: [PATCH 061/361] Improve use-case-hack description slightly (cherry picked from commit 5ca2f58798e6f514b5194c16c0fea0d8ec128171) --- src/libutil/archive.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 849bfe02246..458438cbdd5 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -23,7 +23,7 @@ struct ArchiveSettings : Config false, #endif "use-case-hack", - "Whether to enable a Darwin-specific hack for dealing with file name collisions."}; + "Whether to enable a macOS-specific hack for dealing with file name case collisions."}; }; static ArchiveSettings archiveSettings; From 0f825b38f43df5722be32526476b832b62b98e97 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 10 Sep 2024 13:45:04 +0200 Subject: [PATCH 062/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index c5f92d6f8fd..7ed0e12bccd 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.24.6 +2.24.7 From 40461a8e0e347d457875653a1e08da51dbb1c587 Mon Sep 17 00:00:00 2001 From: Artturin Date: Wed, 11 Sep 2024 00:17:03 +0300 Subject: [PATCH 063/361] Fix making the build directory kept by `keep-failed` readable MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Caused by 1d3696f0fb88d610abc234a60e0d6d424feafdf1 Without this fix the kept build directory is readable only by root ``` $ sudo ls -ld /comp-temp/nix-build-openssh-static-x86_64-unknown-linux-musl-9.8p1.drv-5 drwx------ root root 60 B Wed Sep 11 00:09:48 2024  /comp-temp/nix-build-openssh-static-x86_64-unknown-linux-musl-9.8p1.drv-5/ $ sudo ls -ld /comp-temp/nix-build-openssh-static-x86_64-unknown-linux-musl-9.8p1.drv-5/build drwxr-xr-x nixbld1 nixbld 80 B Wed Sep 11 00:09:58 2024  /comp-temp/nix-build-openssh-static-x86_64-unknown-linux-musl-9.8p1.drv-5/build/ ``` (cherry picked from commit ebebe626ff4ec6da98c0a043c64b35efe1c05bc3) --- src/libstore/unix/build/local-derivation-goal.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index d3482df17a1..c9a54bb0ffa 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -3000,6 +3000,7 @@ void LocalDerivationGoal::deleteTmpDir(bool force) might have privileged stuff (like a copy of netrc). */ if (settings.keepFailed && !force && !drv->isBuiltin()) { printError("note: keeping build directory '%s'", tmpDir); + chmod(topTmpDir.c_str(), 0755); chmod(tmpDir.c_str(), 0755); } else From 97c5ac575277c35c5df09c837c312a5ed8408fa1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2024 19:52:21 +0200 Subject: [PATCH 064/361] Git fetcher: Don't update mtime of ref file if fetching by rev This fixes the warning $ nix eval --store /tmp/nix --expr 'builtins.fetchTree { type = "git"; url = "https://github.com/DeterminateSystems/attic"; ref = "fixups-for-magic-nix-cache"; rev = "635753a2069d4b8228e846dc5c09ad361c75cd1a"; }' warning: could not update mtime for file '/home/eelco/.cache/nix/gitv3/09788h9zgba5lbfkaa6ija2dvi004jwsqjf5ln21i2njs07cz766/refs/heads/fixups-for-magic-nix-cache': error: changing modification time of '"/home/eelco/.cache/nix/gitv3/09788h9zgba5lbfkaa6ija2dvi004jwsqjf5ln21i2njs07cz766/refs/heads/fixups-for-magic-nix-cache"': No such file or directory When we're fetching by rev, that file doesn't necessarily exist, and we don't care about it anyway. (cherry picked from commit b80b091bac1eeb6fa64db1ae078de5c6a2e4b1b8) --- src/libfetchers/git.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 076c757c5f5..6c5bda47000 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -584,9 +584,10 @@ struct GitInputScheme : InputScheme } try { - setWriteTime(localRefFile, now, now); + if (!input.getRev()) + setWriteTime(localRefFile, now, now); } catch (Error & e) { - warn("could not update mtime for file '%s': %s", localRefFile, e.msg()); + warn("could not update mtime for file '%s': %s", localRefFile, e.info().msg); } if (!originalRef && !storeCachedHead(repoInfo.url, ref)) warn("could not update cached head '%s' for '%s'", ref, repoInfo.url); From 751907dc8a2cf1af867fbf4877ec64b68c010ed6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 4 Sep 2024 14:43:43 +0200 Subject: [PATCH 065/361] Git fetcher: Ignore .gitmodules entries that are not submodules Fixes #10739. (cherry picked from commit 9d24080090539c717015add8f2d8ce02d1d84a2d) --- src/libfetchers/git-utils.cc | 18 ++++++++++++------ tests/functional/fetchGitSubmodules.sh | 21 +++++++++++++++++++++ 2 files changed, 33 insertions(+), 6 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 114aa4ec078..0bc930ab28e 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -601,12 +601,16 @@ struct GitSourceAccessor : SourceAccessor return readBlob(path, true); } - Hash getSubmoduleRev(const CanonPath & path) + /** + * If `path` exists and is a submodule, return its + * revision. Otherwise return nothing. + */ + std::optional getSubmoduleRev(const CanonPath & path) { - auto entry = need(path); + auto entry = lookup(path); - if (git_tree_entry_type(entry) != GIT_OBJECT_COMMIT) - throw Error("'%s' is not a submodule", showPath(path)); + if (!entry || git_tree_entry_type(entry) != GIT_OBJECT_COMMIT) + return std::nullopt; return toHash(*git_tree_entry_id(entry)); } @@ -1074,8 +1078,10 @@ std::vector> GitRepoImpl::getSubmodules auto rawAccessor = getRawAccessor(rev); for (auto & submodule : parseSubmodules(pathTemp)) { - auto rev = rawAccessor->getSubmoduleRev(submodule.path); - result.push_back({std::move(submodule), rev}); + /* Filter out .gitmodules entries that don't exist or are not + submodules. */ + if (auto rev = rawAccessor->getSubmoduleRev(submodule.path)) + result.push_back({std::move(submodule), *rev}); } return result; diff --git a/tests/functional/fetchGitSubmodules.sh b/tests/functional/fetchGitSubmodules.sh index 4a3e4c347ec..cd3b51674cf 100755 --- a/tests/functional/fetchGitSubmodules.sh +++ b/tests/functional/fetchGitSubmodules.sh @@ -104,6 +104,27 @@ noSubmoduleRepo=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$subR [[ $noSubmoduleRepoBaseline == $noSubmoduleRepo ]] +# Test .gitmodules with entries that refer to non-existent objects or objects that are not submodules. +cat >> $rootRepo/.gitmodules < $rootRepo/file +git -C $rootRepo add file +git -C $rootRepo commit -a -m "Add bad submodules" + +rev=$(git -C $rootRepo rev-parse HEAD) + +r=$(nix eval --raw --expr "builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }") + +[[ -f $r/file ]] +[[ ! -e $r/missing ]] + # Test relative submodule URLs. rm $TEST_HOME/.cache/nix/fetcher-cache* rm -rf $rootRepo/.git $rootRepo/.gitmodules $rootRepo/sub From cd97688bce63dcc6605486a5a2cc41a5d11b3552 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Mon, 26 Aug 2024 21:14:20 +0200 Subject: [PATCH 066/361] builtins.readDir: fix nix error trace on filesystem errors MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Before: nix-env % ./src/nix/nix eval --impure --expr 'let f = builtins.readDir "/nix/store/hs3yxdq9knimwdm51gvbs4dvncz46f9d-hello-2.12.1/foo"; in f' --show-trace error: filesystem error: directory iterator cannot open directory: No such file or directory [/nix/store/hs3yxdq9knimwdm51gvbs4dvncz46f9d-hello-2.12.1/foo] After: error: … while calling the 'readDir' builtin at «string»:1:9: 1| let f = builtins.readDir "/nix/store/hs3yxdq9knimwdm51gvbs4dvncz46f9d-hello-2.12.1/foo"; in f | ^ error: reading directory '/nix/store/hs3yxdq9knimwdm51gvbs4dvncz46f9d-hello-2.12.1/foo': No such file or directory (cherry picked from commit 22ba4dc78d956020e06e0618f020e11700749823) --- src/libutil/posix-source-accessor.cc | 42 +++++++++++++++------------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 2b1a485d55c..d09ea4a87ea 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -132,23 +132,24 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & { assertNoSymlinks(path); DirEntries res; - for (auto & entry : std::filesystem::directory_iterator{makeAbsPath(path)}) { - checkInterrupt(); - auto type = [&]() -> std::optional { - std::filesystem::file_type nativeType; - try { - nativeType = entry.symlink_status().type(); - } catch (std::filesystem::filesystem_error & e) { - // We cannot always stat the child. (Ideally there is no - // stat because the native directory entry has the type - // already, but this isn't always the case.) - if (e.code() == std::errc::permission_denied || e.code() == std::errc::operation_not_permitted) - return std::nullopt; - else throw; - } - - // cannot exhaustively enumerate because implementation-specific - // additional file types are allowed. + try { + for (auto & entry : std::filesystem::directory_iterator{makeAbsPath(path)}) { + checkInterrupt(); + auto type = [&]() -> std::optional { + std::filesystem::file_type nativeType; + try { + nativeType = entry.symlink_status().type(); + } catch (std::filesystem::filesystem_error & e) { + // We cannot always stat the child. (Ideally there is no + // stat because the native directory entry has the type + // already, but this isn't always the case.) + if (e.code() == std::errc::permission_denied || e.code() == std::errc::operation_not_permitted) + return std::nullopt; + else throw; + } + + // cannot exhaustively enumerate because implementation-specific + // additional file types are allowed. #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wswitch-enum" switch (nativeType) { @@ -158,8 +159,11 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & default: return tMisc; } #pragma GCC diagnostic pop - }(); - res.emplace(entry.path().filename().string(), type); + }(); + res.emplace(entry.path().filename().string(), type); + } + } catch (std::filesystem::filesystem_error & e) { + throw SysError("reading directory %1%", showPath(path)); } return res; } From c84fc0120f57b117c5cd24dcaa82033a32ce8761 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 5 Sep 2024 12:59:54 +0200 Subject: [PATCH 067/361] builtins.unpackChannel: wrap filesystem errors and sanitize channelName Otherwise these errors are not caught correctly (cherry picked from commit 70c52d72f4ee93b68b57b12cd7892bba03446067) --- src/libstore/builtins/unpack-channel.cc | 28 +++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc index a5f2b8e3adf..7f9a520eed3 100644 --- a/src/libstore/builtins/unpack-channel.cc +++ b/src/libstore/builtins/unpack-channel.cc @@ -13,21 +13,37 @@ void builtinUnpackChannel( return i->second; }; - auto out = outputs.at("out"); - auto channelName = getAttr("channelName"); + std::filesystem::path out(outputs.at("out")); + std::filesystem::path channelName(getAttr("channelName")); auto src = getAttr("src"); + if (channelName.filename() != channelName) { + throw Error("channelName is not allowed to contain filesystem seperators, got %1%", channelName); + } + createDirs(out); unpackTarfile(src, out); - auto entries = std::filesystem::directory_iterator{out}; - auto fileName = entries->path().string(); - auto fileCount = std::distance(std::filesystem::begin(entries), std::filesystem::end(entries)); + size_t fileCount; + std::string fileName; + try { + auto entries = std::filesystem::directory_iterator{out}; + fileName = entries->path().string(); + fileCount = std::distance(std::filesystem::begin(entries), std::filesystem::end(entries)); + } catch (std::filesystem::filesystem_error &e) { + throw SysError("failed to read directory %1%", out); + } + if (fileCount != 1) throw Error("channel tarball '%s' contains more than one file", src); - std::filesystem::rename(fileName, (out + "/" + channelName)); + std::filesystem::path target(out / channelName); + try { + std::filesystem::rename(fileName, target); + } catch (std::filesystem::filesystem_error &e) { + throw SysError("failed to rename %1% to %2%", fileName, target); + } } } From 60001b193672074ff205a53940214a8e6abb8b91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 5 Sep 2024 14:08:20 +0200 Subject: [PATCH 068/361] add release notes for filesystem fixes Update doc/manual/rl-next/filesystem-errors.md Co-authored-by: John Ericson (cherry picked from commit 04ce0e648aeac282b114cf426cea8a078c97e0a8) --- doc/manual/rl-next/filesystem-errors.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 doc/manual/rl-next/filesystem-errors.md diff --git a/doc/manual/rl-next/filesystem-errors.md b/doc/manual/rl-next/filesystem-errors.md new file mode 100644 index 00000000000..2d5b2622860 --- /dev/null +++ b/doc/manual/rl-next/filesystem-errors.md @@ -0,0 +1,14 @@ +--- +synopsis: wrap filesystem exceptions more correctly +issues: [] +prs: [11378] +--- + + +With the switch to `std::filesystem` in different places, Nix started to throw `std::filesystem::filesystem_error` in many places instead of its own exceptions. + +This lead to no longer generating error traces, for example when listing a non-existing directory, and can also lead to crashes inside the Nix REPL. + +This version catches these types of exception correctly and wrap them into Nix's own exeception type. + +Author: [**@Mic92**](https://github.com/Mic92) From 4354d903845ec2329a764d615130decc942f8a19 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 11 Sep 2024 11:59:11 -0400 Subject: [PATCH 069/361] tweak unpack channel built-in, std::filesystem::path for tarball (cherry picked from commit 193dc490971b0435c7de7565b86110a59d515ff2) --- src/libstore/builtins/unpack-channel.cc | 36 ++++++++++++++----------- src/libutil/tarfile.cc | 22 ++++++++------- src/libutil/tarfile.hh | 6 ++--- 3 files changed, 37 insertions(+), 27 deletions(-) diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc index 7f9a520eed3..d30626a309b 100644 --- a/src/libstore/builtins/unpack-channel.cc +++ b/src/libstore/builtins/unpack-channel.cc @@ -3,46 +3,52 @@ namespace nix { +namespace fs { using namespace std::filesystem; } + void builtinUnpackChannel( const BasicDerivation & drv, const std::map & outputs) { - auto getAttr = [&](const std::string & name) { + auto getAttr = [&](const std::string & name) -> const std::string & { auto i = drv.env.find(name); if (i == drv.env.end()) throw Error("attribute '%s' missing", name); return i->second; }; - std::filesystem::path out(outputs.at("out")); - std::filesystem::path channelName(getAttr("channelName")); - auto src = getAttr("src"); + fs::path out{outputs.at("out")}; + auto & channelName = getAttr("channelName"); + auto & src = getAttr("src"); - if (channelName.filename() != channelName) { + if (fs::path{channelName}.filename().string() != channelName) { throw Error("channelName is not allowed to contain filesystem seperators, got %1%", channelName); } - createDirs(out); + try { + fs::create_directories(out); + } catch (fs::filesystem_error &) { + throw SysError("creating directory '%1%'", out.string()); + } unpackTarfile(src, out); size_t fileCount; std::string fileName; try { - auto entries = std::filesystem::directory_iterator{out}; + auto entries = fs::directory_iterator{out}; fileName = entries->path().string(); - fileCount = std::distance(std::filesystem::begin(entries), std::filesystem::end(entries)); - } catch (std::filesystem::filesystem_error &e) { - throw SysError("failed to read directory %1%", out); + fileCount = std::distance(fs::begin(entries), fs::end(entries)); + } catch (fs::filesystem_error &) { + throw SysError("failed to read directory %1%", out.string()); } - if (fileCount != 1) throw Error("channel tarball '%s' contains more than one file", src); - std::filesystem::path target(out / channelName); + + auto target = out / channelName; try { - std::filesystem::rename(fileName, target); - } catch (std::filesystem::filesystem_error &e) { - throw SysError("failed to rename %1% to %2%", fileName, target); + fs::rename(fileName, target); + } catch (fs::filesystem_error &) { + throw SysError("failed to rename %1% to %2%", fileName, target.string()); } } diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc index 2e323629512..a8a22d283f8 100644 --- a/src/libutil/tarfile.cc +++ b/src/libutil/tarfile.cc @@ -8,6 +8,10 @@ namespace nix { +namespace fs { +using namespace std::filesystem; +} + namespace { int callback_open(struct archive *, void * self) @@ -102,14 +106,14 @@ TarArchive::TarArchive(Source & source, bool raw, std::optional com "Failed to open archive (%s)"); } -TarArchive::TarArchive(const Path & path) +TarArchive::TarArchive(const fs::path & path) : archive{archive_read_new()} , buffer(defaultBufferSize) { archive_read_support_filter_all(archive); enableSupportedFormats(archive); archive_read_set_option(archive, NULL, "mac-ext", NULL); - check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s"); + check(archive_read_open_filename(archive, path.string().c_str(), 16384), "failed to open archive: %s"); } void TarArchive::close() @@ -123,7 +127,7 @@ TarArchive::~TarArchive() archive_read_free(this->archive); } -static void extract_archive(TarArchive & archive, const Path & destDir) +static void extract_archive(TarArchive & archive, const fs::path & destDir) { int flags = ARCHIVE_EXTRACT_TIME | ARCHIVE_EXTRACT_SECURE_SYMLINKS | ARCHIVE_EXTRACT_SECURE_NODOTDOT; @@ -140,7 +144,7 @@ static void extract_archive(TarArchive & archive, const Path & destDir) else archive.check(r); - archive_entry_copy_pathname(entry, (destDir + "/" + name).c_str()); + archive_entry_copy_pathname(entry, (destDir / name).string().c_str()); // sources can and do contain dirs with no rx bits if (archive_entry_filetype(entry) == AE_IFDIR && (archive_entry_mode(entry) & 0500) != 0500) @@ -149,7 +153,7 @@ static void extract_archive(TarArchive & archive, const Path & destDir) // Patch hardlink path const char * original_hardlink = archive_entry_hardlink(entry); if (original_hardlink) { - archive_entry_copy_hardlink(entry, (destDir + "/" + original_hardlink).c_str()); + archive_entry_copy_hardlink(entry, (destDir / original_hardlink).string().c_str()); } archive.check(archive_read_extract(archive.archive, entry, flags)); @@ -158,19 +162,19 @@ static void extract_archive(TarArchive & archive, const Path & destDir) archive.close(); } -void unpackTarfile(Source & source, const Path & destDir) +void unpackTarfile(Source & source, const fs::path & destDir) { auto archive = TarArchive(source); - createDirs(destDir); + fs::create_directories(destDir); extract_archive(archive, destDir); } -void unpackTarfile(const Path & tarFile, const Path & destDir) +void unpackTarfile(const fs::path & tarFile, const fs::path & destDir) { auto archive = TarArchive(tarFile); - createDirs(destDir); + fs::create_directories(destDir); extract_archive(archive, destDir); } diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh index 0517177dbe6..5e29c6bbac3 100644 --- a/src/libutil/tarfile.hh +++ b/src/libutil/tarfile.hh @@ -15,7 +15,7 @@ struct TarArchive void check(int err, const std::string & reason = "failed to extract archive (%s)"); - explicit TarArchive(const Path & path); + explicit TarArchive(const std::filesystem::path & path); /// @brief Create a generic archive from source. /// @param source - Input byte stream. @@ -37,9 +37,9 @@ struct TarArchive int getArchiveFilterCodeByName(const std::string & method); -void unpackTarfile(Source & source, const Path & destDir); +void unpackTarfile(Source & source, const std::filesystem::path & destDir); -void unpackTarfile(const Path & tarFile, const Path & destDir); +void unpackTarfile(const std::filesystem::path & tarFile, const std::filesystem::path & destDir); time_t unpackTarfileToSink(TarArchive & archive, ExtendedFileSystemObjectSink & parseSink); From 684a690480784c21ad5580735c41af13fff04b6b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Fri, 13 Sep 2024 14:20:34 +0200 Subject: [PATCH 070/361] update filesystem-errors changelog to 2.24 release --- doc/manual/rl-next/filesystem-errors.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/rl-next/filesystem-errors.md b/doc/manual/rl-next/filesystem-errors.md index 2d5b2622860..faa9352b96a 100644 --- a/doc/manual/rl-next/filesystem-errors.md +++ b/doc/manual/rl-next/filesystem-errors.md @@ -7,7 +7,7 @@ prs: [11378] With the switch to `std::filesystem` in different places, Nix started to throw `std::filesystem::filesystem_error` in many places instead of its own exceptions. -This lead to no longer generating error traces, for example when listing a non-existing directory, and can also lead to crashes inside the Nix REPL. +This lead to no longer generating error traces, for example when listing a non-existing directory. This version catches these types of exception correctly and wrap them into Nix's own exeception type. From 1b076b4f84a74a47d4f4eeb14c7d1e485a754c87 Mon Sep 17 00:00:00 2001 From: "mergify[bot]" <37929162+mergify[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 16:03:05 +0200 Subject: [PATCH 071/361] doc: add admonitions for macOS 15 Sequoia update (#11487) (#11509) The impending release of macOS 15 Sequoia will break many existing nix installs on macOS, which may lead to an increased number of people who are looking to try to reinstall Nix without noticing the open/pinned issue (#10892) that explains the problem and outlines how to migrate existing installs. These admonitions are a short-term measure until we are over the hump and support volumes dwindle. (cherry picked from commit 48477d4a3e7130c89b2ded4496c00ef74601091f) Co-authored-by: Travis A. Everett --- doc/manual/src/installation/index.md | 8 ++++++++ doc/manual/src/installation/installing-binary.md | 8 ++++++++ doc/manual/src/installation/uninstall.md | 8 ++++++++ 3 files changed, 24 insertions(+) diff --git a/doc/manual/src/installation/index.md b/doc/manual/src/installation/index.md index dafdeb667e5..16a7f485a1d 100644 --- a/doc/manual/src/installation/index.md +++ b/doc/manual/src/installation/index.md @@ -14,6 +14,14 @@ This option requires either: * Linux running systemd, with SELinux disabled * MacOS +> **Updating to macOS 15 Sequoia** +> +> If you recently updated to macOS 15 Sequoia and are getting +> ```console +> error: the user '_nixbld1' in the group 'nixbld' does not exist +> ``` +> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. + ```console $ bash <(curl -L https://nixos.org/nix/install) --daemon ``` diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md index 6a168ff3dfd..6a1a5ddcaff 100644 --- a/doc/manual/src/installation/installing-binary.md +++ b/doc/manual/src/installation/installing-binary.md @@ -1,5 +1,13 @@ # Installing a Binary Distribution +> **Updating to macOS 15 Sequoia** +> +> If you recently updated to macOS 15 Sequoia and are getting +> ```console +> error: the user '_nixbld1' in the group 'nixbld' does not exist +> ``` +> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. + To install the latest version Nix, run the following command: ```console diff --git a/doc/manual/src/installation/uninstall.md b/doc/manual/src/installation/uninstall.md index 590327fea1b..97590e3db8a 100644 --- a/doc/manual/src/installation/uninstall.md +++ b/doc/manual/src/installation/uninstall.md @@ -43,6 +43,14 @@ which you may remove. ### macOS +> **Updating to macOS 15 Sequoia** +> +> If you recently updated to macOS 15 Sequoia and are getting +> ```console +> error: the user '_nixbld1' in the group 'nixbld' does not exist +> ``` +> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. + 1. If system-wide shell initialisation files haven't been altered since installing Nix, use the backups made by the installer: ```console From 9941f620c442f0996d7889d948b781304e5fb0f2 Mon Sep 17 00:00:00 2001 From: Brian McGee Date: Mon, 31 Jul 2023 18:40:45 +0100 Subject: [PATCH 072/361] base64Decode: clearer error message when an invalid character is detected Output the offending string in its entirety to provide context. Closes #8479 (cherry picked from commit dc3ccf02bfd4d359228b54f5c24ae2b6caf6428e) --- src/libutil/util.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 698e181a1d1..174e7ce8fab 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -260,8 +260,9 @@ std::string base64Decode(std::string_view s) if (c == '\n') continue; char digit = base64DecodeChars[(unsigned char) c]; - if (digit == npos) - throw Error("invalid character in Base64 string: '%c'", c); + if (digit == npos) { + throw Error("invalid character in Base64 string: '%c' in '%s'", c, s.data()); + } bits += 6; d = d << 6 | digit; From 5b5e1920eb519304833aebf9e061c66a262880cd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 19 Sep 2024 19:16:31 +0200 Subject: [PATCH 073/361] Fix missing GC root in zipAttrsWith My SNAFU was that I assumed that all the `Value *`s we put in `attrsSeen` are already reachable (which they are), but I forgot about the `elems` pointer in `ListBuilder`. Fixes #11547. (cherry picked from commit 0c2fdd2f3c0f04bef4b5c74fbb02a5f8227c07df) --- src/libexpr/primops.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 7ceb84f0e39..50552f6deff 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -3136,7 +3136,7 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg std::optional list; }; - std::map attrsSeen; + std::map, traceable_allocator>> attrsSeen; state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.zipAttrsWith"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.zipAttrsWith"); From ecd83dc155ac770caa5faccb98f045da8d579e29 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 19 Sep 2024 19:52:47 +0200 Subject: [PATCH 074/361] Use HAVE_BOEHMGC Co-authored-by: Robert Hensing (cherry picked from commit 4449b0da744c32cb9cbb06b661a5f5df4444497a) --- src/libexpr/primops.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 50552f6deff..8536eb3597e 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -3136,7 +3136,11 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg std::optional list; }; +#if HAVE_BOEHMGC std::map, traceable_allocator>> attrsSeen; +#else + std::map attrsSeen; +#endif state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.zipAttrsWith"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.zipAttrsWith"); From a7fdef6858dd45b9d7bda7c92324c63faee7f509 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 20 Sep 2024 01:19:15 +0200 Subject: [PATCH 075/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 7ed0e12bccd..4ee8b99322b 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.24.7 +2.24.8 From 563dedcf64d960e816fcd216f0944638e6677626 Mon Sep 17 00:00:00 2001 From: Alyssa Ross Date: Sat, 31 Aug 2024 15:59:18 +0200 Subject: [PATCH 076/361] Don't refer to public keys as secret keys in error This constructor is used for public keys as well. (cherry picked from commit 9cc550d65252d3ad822cc12496ef71482c47ff7e) --- src/libutil/signature/local-keys.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/signature/local-keys.cc b/src/libutil/signature/local-keys.cc index 858b036f550..00c4543f2be 100644 --- a/src/libutil/signature/local-keys.cc +++ b/src/libutil/signature/local-keys.cc @@ -22,7 +22,7 @@ Key::Key(std::string_view s) key = ss.payload; if (name == "" || key == "") - throw Error("secret key is corrupt"); + throw Error("key is corrupt"); key = base64Decode(key); } From 1e03ea386b75fbdd8bba01203f059694d0e4c139 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 20 Sep 2024 10:41:45 -0400 Subject: [PATCH 077/361] Revert "base64Decode: clearer error message when an invalid character is detected" We have a safer way of doing this. This reverts commit dc3ccf02bfd4d359228b54f5c24ae2b6caf6428e. (cherry picked from commit d0c351bf4392e76d81b282aaaafdf2c2e0a64c69) --- src/libutil/util.cc | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 174e7ce8fab..698e181a1d1 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -260,9 +260,8 @@ std::string base64Decode(std::string_view s) if (c == '\n') continue; char digit = base64DecodeChars[(unsigned char) c]; - if (digit == npos) { - throw Error("invalid character in Base64 string: '%c' in '%s'", c, s.data()); - } + if (digit == npos) + throw Error("invalid character in Base64 string: '%c'", c); bits += 6; d = d << 6 | digit; From 082f6bb35d4c3d63afeaead5733e253760d0d344 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 17 Sep 2024 15:25:30 -0400 Subject: [PATCH 078/361] Ensure error messages don't leak private key Since #8766, invalid base64 is rendered in errors, but we don't actually want to show this in the case of an invalid private keys. Co-Authored-By: Eelco Dolstra (cherry picked from commit 2b6b03d8df8811ef85605461c030466af84a8761) --- src/libfetchers/git-utils.cc | 8 +++++++- src/libstore/machines.cc | 5 +++-- src/libstore/ssh.cc | 14 ++++++++++++-- src/libstore/ssh.hh | 3 +++ src/libutil/hash.cc | 7 ++++++- src/libutil/signature/local-keys.cc | 29 +++++++++++++++++++++-------- src/libutil/signature/local-keys.hh | 12 ++++++++---- src/libutil/util.cc | 2 +- src/libutil/util.hh | 6 +++++- tests/unit/libexpr/nix_api_expr.cc | 2 +- 10 files changed, 67 insertions(+), 21 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 0bc930ab28e..79ff6e7cd87 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -460,7 +460,13 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this std::string re = R"(Good "git" signature for \* with .* key SHA256:[)"; for (const fetchers::PublicKey & k : publicKeys){ // Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally - auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "="); + std::string keyDecoded; + try { + keyDecoded = base64Decode(k.key); + } catch (Error & e) { + e.addTrace({}, "while decoding public key '%s' used for git signature", k.key); + } + auto fingerprint = trim(hashString(HashAlgorithm::SHA256, keyDecoded).to_string(nix::HashFormat::Base64, false), "="); auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" ); re += "(" + escaped_fingerprint + ")"; } diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 256cf918892..5e038fb28d3 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -159,8 +159,9 @@ static Machine parseBuilderLine(const std::set & defaultSystems, co const auto & str = tokens[fieldIndex]; try { base64Decode(str); - } catch (const Error & e) { - throw FormatError("bad machine specification: a column #%lu in a row: '%s' is not valid base64 string: %s", fieldIndex, line, e.what()); + } catch (FormatError & e) { + e.addTrace({}, "while parsing machine specification at a column #%lu in a row: '%s'", fieldIndex, line); + throw; } return str; }; diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index e5d623adf3a..f9cb61778ac 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -6,6 +6,16 @@ namespace nix { +static std::string parsePublicHostKey(std::string_view host, std::string_view sshPublicHostKey) +{ + try { + return base64Decode(sshPublicHostKey); + } catch (Error & e) { + e.addTrace({}, "while decoding ssh public host key for host '%s'", host); + throw; + } +} + SSHMaster::SSHMaster( std::string_view host, std::string_view keyFile, @@ -14,7 +24,7 @@ SSHMaster::SSHMaster( : host(host) , fakeSSH(host == "localhost") , keyFile(keyFile) - , sshPublicHostKey(sshPublicHostKey) + , sshPublicHostKey(parsePublicHostKey(host, sshPublicHostKey)) , useMaster(useMaster && !fakeSSH) , compress(compress) , logFD(logFD) @@ -38,7 +48,7 @@ void SSHMaster::addCommonSSHOpts(Strings & args) std::filesystem::path fileName = state->tmpDir->path() / "host-key"; auto p = host.rfind("@"); std::string thost = p != std::string::npos ? std::string(host, p + 1) : host; - writeFile(fileName.string(), thost + " " + base64Decode(sshPublicHostKey) + "\n"); + writeFile(fileName.string(), thost + " " + sshPublicHostKey + "\n"); args.insert(args.end(), {"-oUserKnownHostsFile=" + fileName.string()}); } if (compress) diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index 19b30e8838f..4097134d055 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -14,6 +14,9 @@ private: const std::string host; bool fakeSSH; const std::string keyFile; + /** + * Raw bytes, not Base64 encoding. + */ const std::string sshPublicHostKey; const bool useMaster; const bool compress; diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index ab2a8695dd4..748176d3370 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -245,7 +245,12 @@ Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) } else if (isSRI || rest.size() == base64Len()) { - auto d = base64Decode(rest); + std::string d; + try { + d = base64Decode(rest); + } catch (Error & e) { + e.addTrace({}, "While decoding hash '%s'", rest); + } if (d.size() != hashSize) throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", rest); assert(hashSize); diff --git a/src/libutil/signature/local-keys.cc b/src/libutil/signature/local-keys.cc index 00c4543f2be..70bcb5f33c2 100644 --- a/src/libutil/signature/local-keys.cc +++ b/src/libutil/signature/local-keys.cc @@ -14,17 +14,25 @@ BorrowedCryptoValue BorrowedCryptoValue::parse(std::string_view s) return {s.substr(0, colon), s.substr(colon + 1)}; } -Key::Key(std::string_view s) +Key::Key(std::string_view s, bool sensitiveValue) { auto ss = BorrowedCryptoValue::parse(s); name = ss.name; key = ss.payload; - if (name == "" || key == "") - throw Error("key is corrupt"); - - key = base64Decode(key); + try { + if (name == "" || key == "") + throw FormatError("key is corrupt"); + + key = base64Decode(key); + } catch (Error & e) { + std::string extra; + if (!sensitiveValue) + extra = fmt(" with raw value '%s'", key); + e.addTrace({}, "while decoding key named '%s'%s", name, extra); + throw; + } } std::string Key::to_string() const @@ -33,7 +41,7 @@ std::string Key::to_string() const } SecretKey::SecretKey(std::string_view s) - : Key(s) + : Key{s, true} { if (key.size() != crypto_sign_SECRETKEYBYTES) throw Error("secret key is not valid"); @@ -66,7 +74,7 @@ SecretKey SecretKey::generate(std::string_view name) } PublicKey::PublicKey(std::string_view s) - : Key(s) + : Key{s, false} { if (key.size() != crypto_sign_PUBLICKEYBYTES) throw Error("public key is not valid"); @@ -83,7 +91,12 @@ bool PublicKey::verifyDetached(std::string_view data, std::string_view sig) cons bool PublicKey::verifyDetachedAnon(std::string_view data, std::string_view sig) const { - auto sig2 = base64Decode(sig); + std::string sig2; + try { + sig2 = base64Decode(sig); + } catch (Error & e) { + e.addTrace({}, "while decoding signature '%s'", sig); + } if (sig2.size() != crypto_sign_BYTES) throw Error("signature is not valid"); diff --git a/src/libutil/signature/local-keys.hh b/src/libutil/signature/local-keys.hh index 4aafc123944..9977f0dac6e 100644 --- a/src/libutil/signature/local-keys.hh +++ b/src/libutil/signature/local-keys.hh @@ -31,15 +31,19 @@ struct Key std::string name; std::string key; + std::string to_string() const; + +protected: + /** * Construct Key from a string in the format * ‘:’. + * + * @param sensitiveValue Avoid displaying the raw Base64 in error + * messages to avoid leaking private keys. */ - Key(std::string_view s); - - std::string to_string() const; + Key(std::string_view s, bool sensitiveValue); -protected: Key(std::string_view name, std::string && key) : name(name), key(std::move(key)) { } }; diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 698e181a1d1..7a79e424982 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -261,7 +261,7 @@ std::string base64Decode(std::string_view s) char digit = base64DecodeChars[(unsigned char) c]; if (digit == npos) - throw Error("invalid character in Base64 string: '%c'", c); + throw FormatError("invalid character in Base64 string: '%c'", c); bits += 6; d = d << 6 | digit; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 877d1527945..9fbc710cc51 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -210,9 +210,13 @@ constexpr char treeNull[] = " "; /** - * Base64 encoding/decoding. + * Encode arbitrary bytes as Base64. */ std::string base64Encode(std::string_view s); + +/** + * Decode arbitrary bytes to Base64. + */ std::string base64Decode(std::string_view s); diff --git a/tests/unit/libexpr/nix_api_expr.cc b/tests/unit/libexpr/nix_api_expr.cc index 8b97d692345..b37ac44b317 100644 --- a/tests/unit/libexpr/nix_api_expr.cc +++ b/tests/unit/libexpr/nix_api_expr.cc @@ -8,7 +8,7 @@ #include "tests/nix_api_expr.hh" #include "tests/string_callback.hh" -#include "gmock/gmock.h" +#include #include namespace nixC { From d4824c8ff7567e35760f211a52f7766947e52a9f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 23 Sep 2024 15:09:44 +0200 Subject: [PATCH 079/361] builtin:fetchurl: Enable TLS verification This is better for privacy and to avoid leaking netrc credentials in a MITM attack, but also the assumption that we check the hash no longer holds in some cases (in particular for impure derivations). Partially reverts https://github.com/NixOS/nix/commit/5db358d4d78aea7204a8f22c5bf2a309267ee038. (cherry picked from commit c04bc17a5a0fdcb725a11ef6541f94730112e7b6) --- src/libstore/builtins/fetchurl.cc | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index b9dfeba2f8e..f33060c3307 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -38,10 +38,7 @@ void builtinFetchurl( auto source = sinkToSource([&](Sink & sink) { - /* No need to do TLS verification, because we check the hash of - the result anyway. */ FileTransferRequest request(url); - request.verifyTLS = false; request.decompress = false; auto decompressor = makeDecompressionSink( From ee6a5faf4b39978adb3095970ac140a91ec896cc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 24 Sep 2024 16:13:28 +0200 Subject: [PATCH 080/361] Add a test for builtin:fetchurl cert verification (cherry picked from commit f2f47fa725fc87bfb536de171a2ea81f2789c9fb) # Conflicts: # tests/nixos/default.nix --- tests/nixos/default.nix | 11 ++++++ tests/nixos/fetchurl.nix | 78 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 89 insertions(+) create mode 100644 tests/nixos/fetchurl.nix diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index c0c7b42fd9b..7612ce5f914 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -146,4 +146,15 @@ in functional_root = runNixOSTestFor "x86_64-linux" ./functional/as-root.nix; user-sandboxing = runNixOSTestFor "x86_64-linux" ./user-sandboxing; +<<<<<<< HEAD +======= + + s3-binary-cache-store = runNixOSTestFor "x86_64-linux" ./s3-binary-cache-store.nix; + + fsync = runNixOSTestFor "x86_64-linux" ./fsync.nix; + + cgroups = runNixOSTestFor "x86_64-linux" ./cgroups; + + fetchurl = runNixOSTestFor "x86_64-linux" ./fetchurl.nix; +>>>>>>> f2f47fa72 (Add a test for builtin:fetchurl cert verification) } diff --git a/tests/nixos/fetchurl.nix b/tests/nixos/fetchurl.nix new file mode 100644 index 00000000000..476f779bcc3 --- /dev/null +++ b/tests/nixos/fetchurl.nix @@ -0,0 +1,78 @@ +# Test whether builtin:fetchurl properly performs TLS certificate +# checks on HTTPS servers. + +{ lib, config, pkgs, ... }: + +let + + makeTlsCert = name: pkgs.runCommand name { + nativeBuildInputs = with pkgs; [ openssl ]; + } '' + mkdir -p $out + openssl req -x509 \ + -subj '/CN=${name}/' -days 49710 \ + -addext 'subjectAltName = DNS:${name}' \ + -keyout "$out/key.pem" -newkey ed25519 \ + -out "$out/cert.pem" -noenc + ''; + + goodCert = makeTlsCert "good"; + badCert = makeTlsCert "bad"; + +in + +{ + name = "nss-preload"; + + nodes = { + machine = { lib, pkgs, ... }: { + services.nginx = { + enable = true; + + virtualHosts."good" = { + addSSL = true; + sslCertificate = "${goodCert}/cert.pem"; + sslCertificateKey = "${goodCert}/key.pem"; + root = pkgs.runCommand "nginx-root" {} '' + mkdir "$out" + echo 'hello world' > "$out/index.html" + ''; + }; + + virtualHosts."bad" = { + addSSL = true; + sslCertificate = "${badCert}/cert.pem"; + sslCertificateKey = "${badCert}/key.pem"; + root = pkgs.runCommand "nginx-root" {} '' + mkdir "$out" + echo 'foobar' > "$out/index.html" + ''; + }; + }; + + security.pki.certificateFiles = [ "${goodCert}/cert.pem" ]; + + networking.hosts."127.0.0.1" = [ "good" "bad" ]; + + virtualisation.writableStore = true; + + nix.settings.experimental-features = "nix-command"; + }; + }; + + testScript = { nodes, ... }: '' + machine.wait_for_unit("nginx") + machine.wait_for_open_port(443) + + out = machine.succeed("curl https://good/index.html") + assert out == "hello world\n" + + # Fetching from a server with a trusted cert should work. + machine.succeed("nix build --no-substitute --expr 'import { url = \"https://good/index.html\"; hash = \"sha256-qUiQTy8PR5uPgZdpSzAYSw0u0cHNKh7A+4XSmaGSpEc=\"; }'") + + # Fetching from a server with an untrusted cert should fail. + err = machine.fail("nix build --no-substitute --expr 'import { url = \"https://bad/index.html\"; hash = \"sha256-rsBwZF/lPuOzdjBZN2E08FjMM3JHyXit0Xi2zN+wAZ8=\"; }' 2>&1") + print(err) + assert "SSL certificate problem: self-signed certificate" in err + ''; +} From 345a264a39a40e891587553d41db2989a36e2065 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 25 Sep 2024 22:33:50 +0200 Subject: [PATCH 081/361] Add release note (cherry picked from commit 7b39cd631e0d3c3d238015c6f450c59bbc9cbc5b) --- doc/manual/rl-next/verify-tls.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 doc/manual/rl-next/verify-tls.md diff --git a/doc/manual/rl-next/verify-tls.md b/doc/manual/rl-next/verify-tls.md new file mode 100644 index 00000000000..489941d5bc4 --- /dev/null +++ b/doc/manual/rl-next/verify-tls.md @@ -0,0 +1,8 @@ +--- +synopsis: "`` uses TLS verification" +prs: [11585] +--- + +Previously `` did not do TLS verification. This was because the Nix sandbox in the past did not have access to TLS certificates, and Nix checks the hash of the fetched file anyway. However, this can expose authentication data from `netrc` and URLs to man-in-the-middle attackers. In addition, Nix now in some cases (such as when using impure derivations) does *not* check the hash. Therefore we have now enabled TLS verification. This means that downloads by `` will now fail if you're fetching from a HTTPS server that does not have a valid certificate. + +`` is also known as the builtin derivation builder `builtin:fetchurl`. It's not to be confused with the evaluation-time function `builtins.fetchurl`, which was not affected by this issues. From e87be60055fd17895f3d9713f837d73f85bcf48d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 26 Sep 2024 00:15:04 +0200 Subject: [PATCH 082/361] Typo (cherry picked from commit ef8987955be337976ae229c44870cf6adc43bba5) --- doc/manual/rl-next/verify-tls.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/rl-next/verify-tls.md b/doc/manual/rl-next/verify-tls.md index 489941d5bc4..afc689f46a9 100644 --- a/doc/manual/rl-next/verify-tls.md +++ b/doc/manual/rl-next/verify-tls.md @@ -5,4 +5,4 @@ prs: [11585] Previously `` did not do TLS verification. This was because the Nix sandbox in the past did not have access to TLS certificates, and Nix checks the hash of the fetched file anyway. However, this can expose authentication data from `netrc` and URLs to man-in-the-middle attackers. In addition, Nix now in some cases (such as when using impure derivations) does *not* check the hash. Therefore we have now enabled TLS verification. This means that downloads by `` will now fail if you're fetching from a HTTPS server that does not have a valid certificate. -`` is also known as the builtin derivation builder `builtin:fetchurl`. It's not to be confused with the evaluation-time function `builtins.fetchurl`, which was not affected by this issues. +`` is also known as the builtin derivation builder `builtin:fetchurl`. It's not to be confused with the evaluation-time function `builtins.fetchurl`, which was not affected by this issue. From ba8159801770df18435de8f1cc63b3b523ab65ec Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 26 Sep 2024 00:17:03 +0200 Subject: [PATCH 083/361] Resolve conflict --- tests/nixos/default.nix | 9 --------- 1 file changed, 9 deletions(-) diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 7612ce5f914..313dc2f3cd1 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -146,15 +146,6 @@ in functional_root = runNixOSTestFor "x86_64-linux" ./functional/as-root.nix; user-sandboxing = runNixOSTestFor "x86_64-linux" ./user-sandboxing; -<<<<<<< HEAD -======= - - s3-binary-cache-store = runNixOSTestFor "x86_64-linux" ./s3-binary-cache-store.nix; - - fsync = runNixOSTestFor "x86_64-linux" ./fsync.nix; - - cgroups = runNixOSTestFor "x86_64-linux" ./cgroups; fetchurl = runNixOSTestFor "x86_64-linux" ./fetchurl.nix; ->>>>>>> f2f47fa72 (Add a test for builtin:fetchurl cert verification) } From b23812a59c6854378f042e33f5e006c4d9dc516a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 26 Sep 2024 03:25:40 +0200 Subject: [PATCH 084/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 4ee8b99322b..358c8e60ec9 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.24.8 +2.24.9 From 34fd00accce3d0f1efe12e89735542a707e6e89d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 24 Sep 2024 08:02:57 +0200 Subject: [PATCH 085/361] create git caches atomically When working on speeding up the CI, I triggered a race condition in the creation of the tarball cache. This code now instead will ensure that half-initialized repositories are no longer visible to any other nix process. This is the error message that I got before: error: opening Git repository '"/Users/runner/.cache/nix/tarball-cache"': could not find repository at '/Users/runner/.cache/nix/tarball-cache' (cherry picked from commit 12d5b2cfa1e77816abc9c7c6989afaead9723bbc) --- src/libfetchers/git-utils.cc | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 79ff6e7cd87..e45590b801d 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -159,6 +159,27 @@ static Object peelToTreeOrBlob(git_object * obj) return peelObject(obj, GIT_OBJECT_TREE); } +static void initRepoAtomically(std::filesystem::path &path, bool bare) { + if (pathExists(path.string())) return; + + Path tmpDir = createTempDir(std::filesystem::path(path).parent_path()); + AutoDelete delTmpDir(tmpDir, true); + Repository tmpRepo; + + if (git_repository_init(Setter(tmpRepo), tmpDir.c_str(), bare)) + throw Error("creating Git repository %s: %s", path, git_error_last()->message); + try { + std::filesystem::rename(tmpDir, path); + } catch (std::filesystem::filesystem_error & e) { + if (e.code() == std::errc::file_exists) // Someone might race us to create the repository. + return; + else + throw SysError("moving temporary git repository from %s to %s", tmpDir, path); + } + // we successfully moved the repository, so the temporary directory no longer exists. + delTmpDir.cancel(); +} + struct GitRepoImpl : GitRepo, std::enable_shared_from_this { /** Location of the repository on disk. */ @@ -170,13 +191,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this { initLibGit2(); - if (pathExists(path.string())) { - if (git_repository_open(Setter(repo), path.string().c_str())) - throw Error("opening Git repository '%s': %s", path, git_error_last()->message); - } else { - if (git_repository_init(Setter(repo), path.string().c_str(), bare)) - throw Error("creating Git repository '%s': %s", path, git_error_last()->message); - } + initRepoAtomically(path, bare); + if (git_repository_open(Setter(repo), path.string().c_str())) + throw Error("opening Git repository '%s': %s", path, git_error_last()->message); + } operator git_repository * () From 15a2b49115f2b8fcb6152afd7209e147d7042685 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 27 Sep 2024 00:16:52 +0200 Subject: [PATCH 086/361] HttpBinaryCacheStore::getFile(): Fix uncaught exception This method is marked as `noexcept`, but `enqueueFileTransfer()` can throw `Interrupted` if the user has hit Ctrl-C or if the `ThreadPool` that the thread is a part of is shutting down. (cherry picked from commit 4566854981423ec36c1c7987ea2bcaba619b5d4e) --- src/libstore/http-binary-cache-store.cc | 37 +++++++++++++------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index b15ef4e4cba..fc7ac2deac8 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -169,28 +169,29 @@ class HttpBinaryCacheStore : public virtual HttpBinaryCacheStoreConfig, public v { try { checkEnabled(); + + auto request(makeRequest(path)); + + auto callbackPtr = std::make_shared(std::move(callback)); + + getFileTransfer()->enqueueFileTransfer(request, + {[callbackPtr, this](std::future result) { + try { + (*callbackPtr)(std::move(result.get().data)); + } catch (FileTransferError & e) { + if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) + return (*callbackPtr)({}); + maybeDisable(); + callbackPtr->rethrow(); + } catch (...) { + callbackPtr->rethrow(); + } + }}); + } catch (...) { callback.rethrow(); return; } - - auto request(makeRequest(path)); - - auto callbackPtr = std::make_shared(std::move(callback)); - - getFileTransfer()->enqueueFileTransfer(request, - {[callbackPtr, this](std::future result) { - try { - (*callbackPtr)(std::move(result.get().data)); - } catch (FileTransferError & e) { - if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - return (*callbackPtr)({}); - maybeDisable(); - callbackPtr->rethrow(); - } catch (...) { - callbackPtr->rethrow(); - } - }}); } /** From a1d841bf2c387a805ebdd165f2511aff9f6e63ec Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sat, 28 Sep 2024 00:05:03 +0200 Subject: [PATCH 087/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 358c8e60ec9..588b4a3cc9b 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.24.9 +2.24.10 From 742eb0f8159c2b22470ec7b6c5c0e9a99c008349 Mon Sep 17 00:00:00 2001 From: Puck Meerburg Date: Sat, 28 Sep 2024 16:54:39 +0200 Subject: [PATCH 088/361] fix passing CA files into builtins:fetchurl sandbox MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This patch has been manually adapted from https://github.com/lix-project/lix/commit/14dc84ed03f1b7e5a41bb6fdce00916faab32b60 Tested with: $ NIX_SSL_CERT_FILE=$(nix-build '' -A cacert)/etc/ssl/certs/ca-bundle.crt nix-build --store $(mktemp -d) -E 'import { url = https://google.com; }' Finished at 16:57:50 after 1s warning: found empty hash, assuming 'sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=' this derivation will be built: nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) google.com> building '/nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv' nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) google.com> error: nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) google.com> … writing file '/nix/store/0zynn4n8yx59bczy1mgh1lq2rnprvvrc-google.com' nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) google.com> nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) google.com> error: unable to download 'https://google.com': Problem with the SSL CA cert (path? access rights?) (77) error setting certificate file: /nix/store/nlgbippbbgn38hynjkp1ghiybcq1dqhx-nss-cacert-3.101.1/etc/ssl/certs/ca-bundle.crt nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) error: builder for '/nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv' failed with exit code 1 Now returns: nix-env % NIX_SSL_CERT_FILE=$(nix-build '' -A cacert)/etc/ssl/certs/ca-bundle.crt nix-build --store $(mktemp -d) -E 'import { url = https://google.com; }' Finished at 17:05:48 after 0s warning: found empty hash, assuming 'sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=' this derivation will be built: nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) google.com> building '/nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv' nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) nix-output-monitor error: DerivationReadError /nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv: openFile: does not exist (No such file or directory) error: hash mismatch in fixed-output derivation '/nix/store/4qljhy0jj2b0abjzpsbyarpia1bqylwc-google.com.drv': specified: sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= (cherry picked from commit c1ecf0bee973e620c9282bd71ddf1a5710968249) --- src/libstore/builtins.hh | 3 ++- src/libstore/builtins/fetchurl.cc | 6 +++++- .../unix/build/local-derivation-goal.cc | 21 ++++++++++++------- tests/nixos/fetchurl.nix | 6 ++++++ 4 files changed, 27 insertions(+), 9 deletions(-) diff --git a/src/libstore/builtins.hh b/src/libstore/builtins.hh index 93558b49e23..091946e013a 100644 --- a/src/libstore/builtins.hh +++ b/src/libstore/builtins.hh @@ -9,7 +9,8 @@ namespace nix { void builtinFetchurl( const BasicDerivation & drv, const std::map & outputs, - const std::string & netrcData); + const std::string & netrcData, + const std::string & caFileData); void builtinUnpackChannel( const BasicDerivation & drv, diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index f33060c3307..90e58dfdb3d 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -9,7 +9,8 @@ namespace nix { void builtinFetchurl( const BasicDerivation & drv, const std::map & outputs, - const std::string & netrcData) + const std::string & netrcData, + const std::string & caFileData) { /* Make the host's netrc data available. Too bad curl requires this to be stored in a file. It would be nice if we could just @@ -19,6 +20,9 @@ void builtinFetchurl( writeFile(settings.netrcFile, netrcData, 0600); } + settings.caFile = "ca-certificates.crt"; + writeFile(settings.caFile, caFileData, 0600); + auto out = get(drv.outputs, "out"); if (!out) throw Error("'builtin:fetchurl' requires an 'out' output"); diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index c9a54bb0ffa..54ca69580fa 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -1746,13 +1746,20 @@ void LocalDerivationGoal::runChild() bool setUser = true; - /* Make the contents of netrc available to builtin:fetchurl - (which may run under a different uid and/or in a sandbox). */ + /* Make the contents of netrc and the CA certificate bundle + available to builtin:fetchurl (which may run under a + different uid and/or in a sandbox). */ std::string netrcData; - try { - if (drv->isBuiltin() && drv->builder == "builtin:fetchurl") - netrcData = readFile(settings.netrcFile); - } catch (SystemError &) { } + std::string caFileData; + if (drv->isBuiltin() && drv->builder == "builtin:fetchurl") { + try { + netrcData = readFile(settings.netrcFile); + } catch (SystemError &) { } + + try { + caFileData = readFile(settings.caFile); + } catch (SystemError &) { } + } #if __linux__ if (useChroot) { @@ -2191,7 +2198,7 @@ void LocalDerivationGoal::runChild() worker.store.printStorePath(scratchOutputs.at(e.first))); if (drv->builder == "builtin:fetchurl") - builtinFetchurl(*drv, outputs, netrcData); + builtinFetchurl(*drv, outputs, netrcData, caFileData); else if (drv->builder == "builtin:buildenv") builtinBuildenv(*drv, outputs); else if (drv->builder == "builtin:unpack-channel") diff --git a/tests/nixos/fetchurl.nix b/tests/nixos/fetchurl.nix index 476f779bcc3..f873bf4b56f 100644 --- a/tests/nixos/fetchurl.nix +++ b/tests/nixos/fetchurl.nix @@ -67,6 +67,9 @@ in out = machine.succeed("curl https://good/index.html") assert out == "hello world\n" + out = machine.succeed("cat ${badCert}/cert.pem > /tmp/cafile.pem; curl --cacert /tmp/cafile.pem https://bad/index.html") + assert out == "foobar\n" + # Fetching from a server with a trusted cert should work. machine.succeed("nix build --no-substitute --expr 'import { url = \"https://good/index.html\"; hash = \"sha256-qUiQTy8PR5uPgZdpSzAYSw0u0cHNKh7A+4XSmaGSpEc=\"; }'") @@ -74,5 +77,8 @@ in err = machine.fail("nix build --no-substitute --expr 'import { url = \"https://bad/index.html\"; hash = \"sha256-rsBwZF/lPuOzdjBZN2E08FjMM3JHyXit0Xi2zN+wAZ8=\"; }' 2>&1") print(err) assert "SSL certificate problem: self-signed certificate" in err + + # Fetching from a server with a trusted cert should work via environment variable override. + machine.succeed("NIX_SSL_CERT_FILE=/tmp/cafile.pem nix build --no-substitute --expr 'import { url = \"https://bad/index.html\"; hash = \"sha256-rsBwZF/lPuOzdjBZN2E08FjMM3JHyXit0Xi2zN+wAZ8=\"; }'") ''; } From 5f1b132187651dddfc9435c5e0a83737d016c780 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Sat, 28 Sep 2024 17:06:10 +0200 Subject: [PATCH 089/361] tests/nixos/fetchurl: drop unused variables (cherry picked from commit 410853ddcf91910bd4db7421b3df756e25a4fbbd) --- tests/nixos/fetchurl.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/nixos/fetchurl.nix b/tests/nixos/fetchurl.nix index f873bf4b56f..243c0cacc6e 100644 --- a/tests/nixos/fetchurl.nix +++ b/tests/nixos/fetchurl.nix @@ -1,7 +1,7 @@ # Test whether builtin:fetchurl properly performs TLS certificate # checks on HTTPS servers. -{ lib, config, pkgs, ... }: +{ pkgs, ... }: let @@ -25,7 +25,7 @@ in name = "nss-preload"; nodes = { - machine = { lib, pkgs, ... }: { + machine = { pkgs, ... }: { services.nginx = { enable = true; @@ -60,7 +60,7 @@ in }; }; - testScript = { nodes, ... }: '' + testScript = '' machine.wait_for_unit("nginx") machine.wait_for_open_port(443) From d80bf54e3b61b296a8944e2c95088c37661b0deb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Aug 2024 11:38:38 +0200 Subject: [PATCH 090/361] Add a VM test for S3BinaryCacheStore Fixes #11238. (cherry picked from commit 2950f9e18af1bd57b566b8c0b4df71022edb3b80) --- tests/nixos/default.nix | 2 + tests/nixos/nix-copy-closure.nix | 2 +- tests/nixos/s3-binary-cache-store.nix | 63 +++++++++++++++++++++++++++ 3 files changed, 66 insertions(+), 1 deletion(-) create mode 100644 tests/nixos/s3-binary-cache-store.nix diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 313dc2f3cd1..e79bb59b8de 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -148,4 +148,6 @@ in user-sandboxing = runNixOSTestFor "x86_64-linux" ./user-sandboxing; fetchurl = runNixOSTestFor "x86_64-linux" ./fetchurl.nix; + + s3-binary-cache-store = runNixOSTestFor "x86_64-linux" ./s3-binary-cache-store.nix; } diff --git a/tests/nixos/nix-copy-closure.nix b/tests/nixos/nix-copy-closure.nix index 66cbfb0338d..b9daa0a1f90 100644 --- a/tests/nixos/nix-copy-closure.nix +++ b/tests/nixos/nix-copy-closure.nix @@ -1,6 +1,6 @@ # Test ‘nix-copy-closure’. -{ lib, config, nixpkgs, hostPkgs, ... }: +{ lib, config, nixpkgs, ... }: let pkgs = config.nodes.client.nixpkgs.pkgs; diff --git a/tests/nixos/s3-binary-cache-store.nix b/tests/nixos/s3-binary-cache-store.nix new file mode 100644 index 00000000000..0154579680e --- /dev/null +++ b/tests/nixos/s3-binary-cache-store.nix @@ -0,0 +1,63 @@ +{ lib, config, nixpkgs, ... }: + +let + pkgs = config.nodes.client.nixpkgs.pkgs; + + pkgA = pkgs.cowsay; + + accessKey = "BKIKJAA5BMMU2RHO6IBB"; + secretKey = "V7f1CwQqAcwo80UEIJEjc5gVQUSSx5ohQ9GSrr12"; + env = "AWS_ACCESS_KEY_ID=${accessKey} AWS_SECRET_ACCESS_KEY=${secretKey}"; + + storeUrl = "s3://my-cache?endpoint=http://server:9000®ion=eu-west-1"; + +in { + name = "nix-copy-closure"; + + nodes = + { server = + { config, lib, pkgs, ... }: + { virtualisation.writableStore = true; + virtualisation.additionalPaths = [ pkgA ]; + environment.systemPackages = [ pkgs.minio-client ]; + nix.extraOptions = "experimental-features = nix-command"; + services.minio = { + enable = true; + region = "eu-west-1"; + rootCredentialsFile = pkgs.writeText "minio-credentials-full" '' + MINIO_ROOT_USER=${accessKey} + MINIO_ROOT_PASSWORD=${secretKey} + ''; + }; + networking.firewall.allowedTCPPorts = [ 9000 ]; + }; + + client = + { config, pkgs, ... }: + { virtualisation.writableStore = true; + nix.extraOptions = "experimental-features = nix-command"; + }; + }; + + testScript = { nodes }: '' + # fmt: off + start_all() + + # Create a binary cache. + server.wait_for_unit("minio") + + server.succeed("mc config host add minio http://localhost:9000 ${accessKey} ${secretKey} --api s3v4") + server.succeed("mc mb minio/my-cache") + + server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}") + + # Copy a package from the binary cache. + client.fail("nix path-info ${pkgA}") + + client.succeed("${env} nix store info --store '${storeUrl}' >&2") + + client.succeed("${env} nix copy --no-check-sigs --from '${storeUrl}' ${pkgA}") + + client.succeed("nix path-info ${pkgA}") + ''; +} From 4912a9e7fdd69b9b66437a94a86eb04789f2fd12 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 11 Oct 2024 14:31:15 +0200 Subject: [PATCH 091/361] builtins.fetchurl: Fix segfault on s3:// URLs Also, add an activity to show that we're downloading an s3:// file. Fixes #11674. (cherry picked from commit 0500fba56a02c3c8458d257b6ea24af1c81c8b9e) --- src/libstore/filetransfer.cc | 5 +++++ tests/nixos/s3-binary-cache-store.nix | 3 +++ 2 files changed, 8 insertions(+) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 5ea8b6f962c..b8421080538 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -754,12 +754,17 @@ struct curlFileTransfer : public FileTransfer S3Helper s3Helper(profile, region, scheme, endpoint); + Activity act(*logger, lvlTalkative, actFileTransfer, + fmt("downloading '%s'", request.uri), + {request.uri}, request.parentAct); + // FIXME: implement ETag auto s3Res = s3Helper.getObject(bucketName, key); FileTransferResult res; if (!s3Res.data) throw FileTransferError(NotFound, "S3 object '%s' does not exist", request.uri); res.data = std::move(*s3Res.data); + res.urls.push_back(request.uri); callback(std::move(res)); #else throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri); diff --git a/tests/nixos/s3-binary-cache-store.nix b/tests/nixos/s3-binary-cache-store.nix index 0154579680e..6ae2e357295 100644 --- a/tests/nixos/s3-binary-cache-store.nix +++ b/tests/nixos/s3-binary-cache-store.nix @@ -51,6 +51,9 @@ in { server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}") + # Test fetchurl on s3:// URLs while we're at it. + client.succeed("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"s3://my-cache/nix-cache-info?endpoint=http://server:9000®ion=eu-west-1\"; }'") + # Copy a package from the binary cache. client.fail("nix path-info ${pkgA}") From 339236d32ef337cdc5fb3e1e964f7ee92d7141f6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 11 Oct 2024 14:55:22 +0200 Subject: [PATCH 092/361] Make S3 downloads slightly more interruptable (cherry picked from commit d38f62f64d389cb4e9a582d89aa3f8a50fb3c074) --- src/libstore/s3-binary-cache-store.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 21175b1ebfd..bcbf0b55ebc 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -9,6 +9,7 @@ #include "globals.hh" #include "compression.hh" #include "filetransfer.hh" +#include "signals.hh" #include #include @@ -117,6 +118,7 @@ class RetryStrategy : public Aws::Client::DefaultRetryStrategy { bool ShouldRetry(const Aws::Client::AWSError& error, long attemptedRetries) const override { + checkInterrupt(); auto retry = Aws::Client::DefaultRetryStrategy::ShouldRetry(error, attemptedRetries); if (retry) printError("AWS error '%s' (%s), will retry in %d ms", From 1294442c6cc6a2ee883f9dd932ad5139f5b35a92 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 14 Oct 2024 13:15:55 +0200 Subject: [PATCH 093/361] Add assert (cherry picked from commit d2f4d076195f048146fa64916283a524f6820380) --- src/libfetchers/tarball.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index dd4f3b78086..52ba73f6235 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -90,6 +90,7 @@ DownloadFileResult downloadFile( /* Cache metadata for all URLs in the redirect chain. */ for (auto & url : res.urls) { key.second.insert_or_assign("url", url); + assert(!res.urls.empty()); infoAttrs.insert_or_assign("url", *res.urls.rbegin()); getCache()->upsert(key, *store, infoAttrs, *storePath); } From 9da1300617891a5f71e7ec5d8380aaa1e4cf2240 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 14 Oct 2024 13:53:54 +0200 Subject: [PATCH 094/361] Handle tarballs where directory entries are not contiguous I.e. when not all entries underneath a directory X follow eachother, but there is some entry Y that isn't a child of X in between. Fixes #11656. (cherry picked from commit 4012954b596b725dd61d49668691a69d491120c3) --- src/libfetchers/git-utils.cc | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index e45590b801d..6efb453ec13 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -855,8 +855,24 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink void pushBuilder(std::string name) { + const git_tree_entry * entry; + Tree prevTree = nullptr; + + if (!pendingDirs.empty() && + (entry = git_treebuilder_get(pendingDirs.back().builder.get(), name.c_str()))) + { + /* Clone a tree that we've already finished. This happens + if a tarball has directory entries that are not + contiguous. */ + if (git_tree_entry_type(entry) != GIT_OBJECT_TREE) + throw Error("parent of '%s' is not a directory", name); + + if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(prevTree), *repo, entry)) + throw Error("looking up parent of '%s': %s", name, git_error_last()->message); + } + git_treebuilder * b; - if (git_treebuilder_new(&b, *repo, nullptr)) + if (git_treebuilder_new(&b, *repo, prevTree.get())) throw Error("creating a tree builder: %s", git_error_last()->message); pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) }); }; From 57ace600af864f2d06bdf7391de316a26827047a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 14 Oct 2024 14:10:36 +0200 Subject: [PATCH 095/361] Add a test (cherry picked from commit a7b9877da9d1bdafcc9b2f4681ecb3a1b83de7fc) --- tests/functional/tarball.sh | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/functional/tarball.sh b/tests/functional/tarball.sh index 4d89456255f..a1e0f9cb054 100755 --- a/tests/functional/tarball.sh +++ b/tests/functional/tarball.sh @@ -100,3 +100,17 @@ chmod +x "$TEST_ROOT/tar_root/foo" tar cvf "$TEST_ROOT/tar.tar" -C "$TEST_ROOT/tar_root" . path="$(nix flake prefetch --refresh --json "tarball+file://$TEST_ROOT/tar.tar" | jq -r .storePath)" [[ $(cat "$path/foo") = bar ]] + +# Test a tarball with non-contiguous directory entries. +rm -rf "$TEST_ROOT/tar_root" +mkdir -p "$TEST_ROOT/tar_root/a/b" +echo foo > "$TEST_ROOT/tar_root/a/b/foo" +echo bla > "$TEST_ROOT/tar_root/bla" +tar cvf "$TEST_ROOT/tar.tar" -C "$TEST_ROOT/tar_root" . +echo abc > "$TEST_ROOT/tar_root/bla" +echo xyzzy > "$TEST_ROOT/tar_root/a/b/xyzzy" +tar rvf "$TEST_ROOT/tar.tar" -C "$TEST_ROOT/tar_root" ./a/b/xyzzy ./bla +path="$(nix flake prefetch --refresh --json "tarball+file://$TEST_ROOT/tar.tar" | jq -r .storePath)" +[[ $(cat "$path/a/b/xyzzy") = xyzzy ]] +[[ $(cat "$path/a/b/foo") = foo ]] +[[ $(cat "$path/bla") = abc ]] From 0e9b04a66ed4ea5f097a6ba0489a01d9f08e891a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Fri, 18 Oct 2024 12:03:33 +0300 Subject: [PATCH 096/361] fix env-vars beeing written to `/tmp` This overall seems like insecure tmp file handling to me. Because other users could replace files in /tmp with a symlink and make the nix-shell override other files. fixes https://github.com/NixOS/nix/issues/11470 (cherry picked from commit 2105574702b582578c43b551cfe8905715211f03) --- src/nix-build/nix-build.cc | 17 +++++------------ tests/functional/nix-shell.sh | 9 +++++++++ 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index a5b9e1e548e..5346641ebdc 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -526,8 +526,6 @@ static void main_nix_build(int argc, char * * argv) // Set the environment. auto env = getEnv(); - auto tmp = getEnvNonEmpty("TMPDIR").value_or("/tmp"); - if (pure) { decltype(env) newEnv; for (auto & i : env) @@ -538,18 +536,16 @@ static void main_nix_build(int argc, char * * argv) env["__ETC_PROFILE_SOURCED"] = "1"; } - env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmp; + env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmpDir.path(); env["NIX_STORE"] = store->storeDir; env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores); auto passAsFile = tokenizeString(getOr(drv.env, "passAsFile", "")); - bool keepTmp = false; int fileNr = 0; for (auto & var : drv.env) if (passAsFile.count(var.first)) { - keepTmp = true; auto fn = ".attr-" + std::to_string(fileNr++); Path p = (tmpDir.path() / fn).string(); writeFile(p, var.second); @@ -591,7 +587,6 @@ static void main_nix_build(int argc, char * * argv) env["NIX_ATTRS_SH_FILE"] = attrsSH; env["NIX_ATTRS_JSON_FILE"] = attrsJSON; - keepTmp = true; } } @@ -601,12 +596,10 @@ static void main_nix_build(int argc, char * * argv) lose the current $PATH directories. */ auto rcfile = (tmpDir.path() / "rc").string(); std::string rc = fmt( - R"(_nix_shell_clean_tmpdir() { command rm -rf %1%; }; )"s + - (keepTmp ? - "trap _nix_shell_clean_tmpdir EXIT; " - "exitHooks+=(_nix_shell_clean_tmpdir); " - "failureHooks+=(_nix_shell_clean_tmpdir); ": - "_nix_shell_clean_tmpdir; ") + + (R"(_nix_shell_clean_tmpdir() { command rm -rf %1%; };)"s + "trap _nix_shell_clean_tmpdir EXIT; " + "exitHooks+=(_nix_shell_clean_tmpdir); " + "failureHooks+=(_nix_shell_clean_tmpdir); ") + (pure ? "" : "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc;") + "%2%" // always clear PATH. diff --git a/tests/functional/nix-shell.sh b/tests/functional/nix-shell.sh index b9625eb666f..b14e3dc6a2d 100755 --- a/tests/functional/nix-shell.sh +++ b/tests/functional/nix-shell.sh @@ -31,6 +31,15 @@ output=$(nix-shell --pure --keep SELECTED_IMPURE_VAR "$shellDotNix" -A shellDrv [ "$output" = " - foo - bar - baz" ] +# test NIX_BUILD_TOP +testTmpDir=$(pwd)/nix-shell +mkdir -p "$testTmpDir" +output=$(TMPDIR="$testTmpDir" nix-shell --pure "$shellDotNix" -A shellDrv --run 'echo $NIX_BUILD_TOP') +[[ "$output" =~ ${testTmpDir}.* ]] || { + echo "expected $output =~ ${testTmpDir}.*" >&2 + exit 1 +} + # Test nix-shell on a .drv [[ $(nix-shell --pure $(nix-instantiate "$shellDotNix" -A shellDrv) --run \ 'echo "$IMPURE_VAR - $VAR_FROM_STDENV_SETUP - $VAR_FROM_NIX - $TEST_inNixShell"') = " - foo - bar - false" ]] From 170242cf0ca3e9fadbad2004126793634d56623e Mon Sep 17 00:00:00 2001 From: Puck Meerburg Date: Fri, 1 Mar 2024 11:42:24 -0500 Subject: [PATCH 097/361] fix: Run all derivation builders inside the sandbox on macOS --- configure.ac | 6 +- package.nix | 2 + .../unix/build/local-derivation-goal.cc | 223 +++++++++--------- 3 files changed, 116 insertions(+), 115 deletions(-) diff --git a/configure.ac b/configure.ac index 5c22ed17636..dff35981bec 100644 --- a/configure.ac +++ b/configure.ac @@ -62,12 +62,16 @@ AC_CHECK_TOOL([AR], [ar]) AC_SYS_LARGEFILE -# Solaris-specific stuff. +# OS-specific stuff. case "$host_os" in solaris*) # Solaris requires -lsocket -lnsl for network functions LDFLAGS="-lsocket -lnsl $LDFLAGS" ;; + darwin*) + # Need to link to libsandbox. + LDFLAGS="-lsandbox $LDFLAGS" + ;; esac diff --git a/package.nix b/package.nix index a7c8923e8b4..fcd1e189843 100644 --- a/package.nix +++ b/package.nix @@ -23,6 +23,7 @@ , libseccomp , libsodium , man +, darwin , lowdown , mdbook , mdbook-linkcheck @@ -235,6 +236,7 @@ in { gtest rapidcheck ] ++ lib.optional stdenv.isLinux libseccomp + ++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid # There have been issues building these dependencies ++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)) diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index 54ca69580fa..7ce2661224b 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -58,6 +58,10 @@ #if __APPLE__ #include #include +#include + +/* This definition is undocumented but depended upon by all major browsers. */ +extern "C" int sandbox_init_with_parameters(const char *profile, uint64_t flags, const char *const parameters[], char **errorbuf); #endif #include @@ -2039,141 +2043,132 @@ void LocalDerivationGoal::runChild() std::string builder = "invalid"; - if (drv->isBuiltin()) { - ; - } #if __APPLE__ - else { - /* This has to appear before import statements. */ - std::string sandboxProfile = "(version 1)\n"; - - if (useChroot) { - - /* Lots and lots and lots of file functions freak out if they can't stat their full ancestry */ - PathSet ancestry; - - /* We build the ancestry before adding all inputPaths to the store because we know they'll - all have the same parents (the store), and there might be lots of inputs. This isn't - particularly efficient... I doubt it'll be a bottleneck in practice */ - for (auto & i : pathsInChroot) { - Path cur = i.first; - while (cur.compare("/") != 0) { - cur = dirOf(cur); - ancestry.insert(cur); - } - } + /* This has to appear before import statements. */ + std::string sandboxProfile = "(version 1)\n"; + + if (useChroot) { - /* And we want the store in there regardless of how empty pathsInChroot. We include the innermost - path component this time, since it's typically /nix/store and we care about that. */ - Path cur = worker.store.storeDir; + /* Lots and lots and lots of file functions freak out if they can't stat their full ancestry */ + PathSet ancestry; + + /* We build the ancestry before adding all inputPaths to the store because we know they'll + all have the same parents (the store), and there might be lots of inputs. This isn't + particularly efficient... I doubt it'll be a bottleneck in practice */ + for (auto & i : pathsInChroot) { + Path cur = i.first; while (cur.compare("/") != 0) { - ancestry.insert(cur); cur = dirOf(cur); + ancestry.insert(cur); } + } - /* Add all our input paths to the chroot */ - for (auto & i : inputPaths) { - auto p = worker.store.printStorePath(i); - pathsInChroot[p] = p; - } - - /* Violations will go to the syslog if you set this. Unfortunately the destination does not appear to be configurable */ - if (settings.darwinLogSandboxViolations) { - sandboxProfile += "(deny default)\n"; - } else { - sandboxProfile += "(deny default (with no-log))\n"; - } + /* And we want the store in there regardless of how empty pathsInChroot. We include the innermost + path component this time, since it's typically /nix/store and we care about that. */ + Path cur = worker.store.storeDir; + while (cur.compare("/") != 0) { + ancestry.insert(cur); + cur = dirOf(cur); + } - sandboxProfile += - #include "sandbox-defaults.sb" - ; + /* Add all our input paths to the chroot */ + for (auto & i : inputPaths) { + auto p = worker.store.printStorePath(i); + pathsInChroot[p] = p; + } - if (!derivationType->isSandboxed()) - sandboxProfile += - #include "sandbox-network.sb" - ; - - /* Add the output paths we'll use at build-time to the chroot */ - sandboxProfile += "(allow file-read* file-write* process-exec\n"; - for (auto & [_, path] : scratchOutputs) - sandboxProfile += fmt("\t(subpath \"%s\")\n", worker.store.printStorePath(path)); - - sandboxProfile += ")\n"; - - /* Our inputs (transitive dependencies and any impurities computed above) - - without file-write* allowed, access() incorrectly returns EPERM - */ - sandboxProfile += "(allow file-read* file-write* process-exec\n"; - for (auto & i : pathsInChroot) { - if (i.first != i.second.source) - throw Error( - "can't map '%1%' to '%2%': mismatched impure paths not supported on Darwin", - i.first, i.second.source); - - std::string path = i.first; - auto optSt = maybeLstat(path.c_str()); - if (!optSt) { - if (i.second.optional) - continue; - throw SysError("getting attributes of required path '%s", path); - } - if (S_ISDIR(optSt->st_mode)) - sandboxProfile += fmt("\t(subpath \"%s\")\n", path); - else - sandboxProfile += fmt("\t(literal \"%s\")\n", path); - } - sandboxProfile += ")\n"; + /* Violations will go to the syslog if you set this. Unfortunately the destination does not appear to be configurable */ + if (settings.darwinLogSandboxViolations) { + sandboxProfile += "(deny default)\n"; + } else { + sandboxProfile += "(deny default (with no-log))\n"; + } - /* Allow file-read* on full directory hierarchy to self. Allows realpath() */ - sandboxProfile += "(allow file-read*\n"; - for (auto & i : ancestry) { - sandboxProfile += fmt("\t(literal \"%s\")\n", i); - } - sandboxProfile += ")\n"; + sandboxProfile += + #include "sandbox-defaults.sb" + ; - sandboxProfile += additionalSandboxProfile; - } else + if (!derivationType->isSandboxed()) sandboxProfile += - #include "sandbox-minimal.sb" + #include "sandbox-network.sb" ; - debug("Generated sandbox profile:"); - debug(sandboxProfile); - - Path sandboxFile = tmpDir + "/.sandbox.sb"; + /* Add the output paths we'll use at build-time to the chroot */ + sandboxProfile += "(allow file-read* file-write* process-exec\n"; + for (auto & [_, path] : scratchOutputs) + sandboxProfile += fmt("\t(subpath \"%s\")\n", worker.store.printStorePath(path)); - writeFile(sandboxFile, sandboxProfile); + sandboxProfile += ")\n"; - bool allowLocalNetworking = parsedDrv->getBoolAttr("__darwinAllowLocalNetworking"); + /* Our inputs (transitive dependencies and any impurities computed above) - /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms - to find temporary directories, so we want to open up a broader place for them to put their files, if needed. */ - Path globalTmpDir = canonPath(defaultTempDir(), true); + without file-write* allowed, access() incorrectly returns EPERM + */ + sandboxProfile += "(allow file-read* file-write* process-exec\n"; + for (auto & i : pathsInChroot) { + if (i.first != i.second.source) + throw Error( + "can't map '%1%' to '%2%': mismatched impure paths not supported on Darwin", + i.first, i.second.source); + + std::string path = i.first; + auto optSt = maybeLstat(path.c_str()); + if (!optSt) { + if (i.second.optional) + continue; + throw SysError("getting attributes of required path '%s", path); + } + if (S_ISDIR(optSt->st_mode)) + sandboxProfile += fmt("\t(subpath \"%s\")\n", path); + else + sandboxProfile += fmt("\t(literal \"%s\")\n", path); + } + sandboxProfile += ")\n"; - /* They don't like trailing slashes on subpath directives */ - while (!globalTmpDir.empty() && globalTmpDir.back() == '/') - globalTmpDir.pop_back(); + /* Allow file-read* on full directory hierarchy to self. Allows realpath() */ + sandboxProfile += "(allow file-read*\n"; + for (auto & i : ancestry) { + sandboxProfile += fmt("\t(literal \"%s\")\n", i); + } + sandboxProfile += ")\n"; - if (getEnv("_NIX_TEST_NO_SANDBOX") != "1") { - builder = "/usr/bin/sandbox-exec"; - args.push_back("sandbox-exec"); - args.push_back("-f"); - args.push_back(sandboxFile); - args.push_back("-D"); - args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir); - if (allowLocalNetworking) { - args.push_back("-D"); - args.push_back(std::string("_ALLOW_LOCAL_NETWORKING=1")); - } - args.push_back(drv->builder); - } else { - builder = drv->builder; - args.push_back(std::string(baseNameOf(drv->builder))); + sandboxProfile += additionalSandboxProfile; + } else + sandboxProfile += + #include "sandbox-minimal.sb" + ; + + debug("Generated sandbox profile:"); + debug(sandboxProfile); + + bool allowLocalNetworking = parsedDrv->getBoolAttr("__darwinAllowLocalNetworking"); + + /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms + to find temporary directories, so we want to open up a broader place for them to put their files, if needed. */ + Path globalTmpDir = canonPath(defaultTempDir(), true); + + /* They don't like trailing slashes on subpath directives */ + while (!globalTmpDir.empty() && globalTmpDir.back() == '/') + globalTmpDir.pop_back(); + + if (getEnv("_NIX_TEST_NO_SANDBOX") != "1") { + Strings sandboxArgs; + sandboxArgs.push_back("_GLOBAL_TMP_DIR"); + sandboxArgs.push_back(globalTmpDir); + if (allowLocalNetworking) { + sandboxArgs.push_back("_ALLOW_LOCAL_NETWORKING"); + sandboxArgs.push_back("1"); + } + if (sandbox_init_with_parameters(sandboxProfile.c_str(), 0, stringsToCharPtrs(sandboxArgs).data(), NULL)) { + writeFull(STDERR_FILENO, "failed to configure sandbox\n"); + _exit(1); } } + + builder = drv->builder; + args.push_back(std::string(baseNameOf(drv->builder))); #else - else { + if (!drv->isBuiltin()) { builder = drv->builder; args.push_back(std::string(baseNameOf(drv->builder))); } From f8a1a149c73113e01c44b73ce9e1005575d52a9a Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 3 Oct 2024 12:23:17 +0200 Subject: [PATCH 098/361] packaging: Add darwin -lsandbox in meson --- src/libstore/meson.build | 5 +++++ src/libstore/package.nix | 2 ++ 2 files changed, 7 insertions(+) diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 50b15e15dc7..b23c85061ee 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -68,6 +68,11 @@ has_acl_support = cxx.has_header('sys/xattr.h') \ and cxx.has_function('lremovexattr') configdata.set('HAVE_ACL_SUPPORT', has_acl_support.to_int()) +if host_machine.system() == 'darwin' + sandbox = cxx.find_library('sandbox') + deps_other += [sandbox] +endif + subdir('build-utils-meson/threads') boost = dependency( diff --git a/src/libstore/package.nix b/src/libstore/package.nix index 4582ba0d2b0..d98bac16d33 100644 --- a/src/libstore/package.nix +++ b/src/libstore/package.nix @@ -7,6 +7,7 @@ , ninja , pkg-config , unixtools +, darwin , nix-util , boost @@ -65,6 +66,7 @@ mkMesonDerivation (finalAttrs: { sqlite ] ++ lib.optional stdenv.hostPlatform.isLinux libseccomp # There have been issues building these dependencies + ++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox ++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)) aws-sdk-cpp ; From ae7a2ea74136363c2f6ac6e624ea95da7abfafcc Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 3 Oct 2024 12:44:12 +0200 Subject: [PATCH 099/361] local-derivation-goal: Print sandbox error detail on darwin MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Théophane Hufschmitt --- src/libstore/unix/build/local-derivation-goal.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index 7ce2661224b..706771e8e26 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -2159,8 +2159,9 @@ void LocalDerivationGoal::runChild() sandboxArgs.push_back("_ALLOW_LOCAL_NETWORKING"); sandboxArgs.push_back("1"); } - if (sandbox_init_with_parameters(sandboxProfile.c_str(), 0, stringsToCharPtrs(sandboxArgs).data(), NULL)) { - writeFull(STDERR_FILENO, "failed to configure sandbox\n"); + char * sandbox_errbuf = nullptr; + if (sandbox_init_with_parameters(sandboxProfile.c_str(), 0, stringsToCharPtrs(sandboxArgs).data(), &sandbox_errbuf)) { + writeFull(STDERR_FILENO, fmt("failed to configure sandbox: %s\n", sandbox_errbuf ? sandbox_errbuf : "(null)")); _exit(1); } } From 047ee50db2f660eb3f50fab8f7543ce95e814b7c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 3 Oct 2024 12:50:27 +0200 Subject: [PATCH 100/361] local-derivation-goal: Refactor MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This works because the `builder` and `args` variables are only used in the non-builtin code path. Co-Authored-By: Théophane Hufschmitt --- src/libstore/unix/build/local-derivation-goal.cc | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index 706771e8e26..d9738a1eae9 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -2165,15 +2165,12 @@ void LocalDerivationGoal::runChild() _exit(1); } } +#endif - builder = drv->builder; - args.push_back(std::string(baseNameOf(drv->builder))); -#else if (!drv->isBuiltin()) { builder = drv->builder; args.push_back(std::string(baseNameOf(drv->builder))); } -#endif for (auto & i : drv->args) args.push_back(rewriteStrings(i, inputRewrites)); From 50f83e4bbd9107576399f94449ac9cb4e80d575e Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 3 Oct 2024 12:57:00 +0200 Subject: [PATCH 101/361] local-derivation-goal: Move builder preparation to non-builtin code path --- .../unix/build/local-derivation-goal.cc | 25 ++++++++----------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index d9738a1eae9..2a09e3dd42d 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -2038,11 +2038,6 @@ void LocalDerivationGoal::runChild() throw SysError("setuid failed"); } - /* Fill in the arguments. */ - Strings args; - - std::string builder = "invalid"; - #if __APPLE__ /* This has to appear before import statements. */ std::string sandboxProfile = "(version 1)\n"; @@ -2167,14 +2162,6 @@ void LocalDerivationGoal::runChild() } #endif - if (!drv->isBuiltin()) { - builder = drv->builder; - args.push_back(std::string(baseNameOf(drv->builder))); - } - - for (auto & i : drv->args) - args.push_back(rewriteStrings(i, inputRewrites)); - /* Indicate that we managed to set up the build environment. */ writeFull(STDERR_FILENO, std::string("\2\n")); @@ -2205,6 +2192,14 @@ void LocalDerivationGoal::runChild() } } + // Now builder is not builtin + + Strings args; + args.push_back(std::string(baseNameOf(drv->builder))); + + for (auto & i : drv->args) + args.push_back(rewriteStrings(i, inputRewrites)); + #if __APPLE__ posix_spawnattr_t attrp; @@ -2226,9 +2221,9 @@ void LocalDerivationGoal::runChild() posix_spawnattr_setbinpref_np(&attrp, 1, &cpu, NULL); } - posix_spawn(NULL, builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data()); + posix_spawn(NULL, drv->builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data()); #else - execve(builder.c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data()); + execve(drv->builder.c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data()); #endif throw SysError("executing '%1%'", drv->builder); From d6ece7e94aa4253f8c32e81707d87f4280587e6d Mon Sep 17 00:00:00 2001 From: Artemis Tosini Date: Thu, 24 Oct 2024 21:24:47 +0000 Subject: [PATCH 102/361] Fix OpenBSD build with Makefiles OpenBSD dynamic libraries never link to libc directly. Instead, they have undefined symbols for all libc functions they use that ld.so resolves to the libc referred to in the main executable. Thus, disallowing undefined symbols will always fail (cherry picked from commit c49bff2434971d693b03525622082a81b5ed75eb) --- mk/libraries.mk | 4 +++- mk/platform.mk | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/mk/libraries.mk b/mk/libraries.mk index b99ba278210..a7848ba358d 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -86,7 +86,9 @@ define build-library else ifndef HOST_DARWIN ifndef HOST_WINDOWS - $(1)_LDFLAGS += -Wl,-z,defs + ifndef HOST_OPENBSD + $(1)_LDFLAGS += -Wl,-z,defs + endif endif endif endif diff --git a/mk/platform.mk b/mk/platform.mk index 22c114a2077..3c4fff78036 100644 --- a/mk/platform.mk +++ b/mk/platform.mk @@ -21,6 +21,10 @@ ifdef HOST_OS HOST_NETBSD = 1 HOST_UNIX = 1 endif + ifeq ($(patsubst openbsd%,,$(HOST_KERNEL)),) + HOST_OPENBSD = 1 + HOST_UNIX = 1 + endif ifeq ($(HOST_KERNEL), linux) HOST_LINUX = 1 HOST_UNIX = 1 From 0ae90918db12f7cf20f40216460c8eba91004a78 Mon Sep 17 00:00:00 2001 From: Artemis Tosini Date: Sat, 26 Oct 2024 16:46:32 +0000 Subject: [PATCH 103/361] package.nix: Disable GC on OpenBSD Nix fails to build on OpenBSD with a linking error due to a non-found symbol in boehm-gc. Just disable the GC until we can find a proper workaround. (cherry picked from commit fecc1ca2055ee590d8b957830f70512fcecbfe4b) --- package.nix | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/package.nix b/package.nix index a7c8923e8b4..e1b4aebb05d 100644 --- a/package.nix +++ b/package.nix @@ -75,7 +75,9 @@ # # Temporarily disabled on Windows because the `GC_throw_bad_alloc` # symbol is missing during linking. -, enableGC ? !stdenv.hostPlatform.isWindows +# +# Disabled on OpenBSD because of missing `_data_start` symbol while linking +, enableGC ? !stdenv.hostPlatform.isWindows && !stdenv.hostPlatform.isOpenBSD # Whether to enable Markdown rendering in the Nix binary. , enableMarkdown ? !stdenv.hostPlatform.isWindows From 803943fce4c9b4825d1b962d9b338ddf7e30074d Mon Sep 17 00:00:00 2001 From: Artemis Tosini Date: Sat, 26 Oct 2024 17:12:06 +0000 Subject: [PATCH 104/361] Add support for `utimensat` as an alternative to `lutimes` OpenBSD doesn't support `lutimes`, but does support `utimensat` which subsumes it. In fact, all the BSDs, Linux, and newer macOS all support it. So lets make this our first choice for the implementation. In addition, let's get rid of the `lutimes` `ENOSYS` special case. The Linux manpage says > ENOSYS > > The kernel does not support this call; Linux 2.6.22 or later is > required. which I think is the origin of this check, but that's a very old version of Linux at this point. The code can be simplified a lot of we drop support for it here (as we've done elsewhere, anyways). Co-Authored-By: John Ericson (cherry picked from commit d0232028111ce4f5a066d9a302fec142ebe91037) --- configure.ac | 7 ++-- src/libutil/file-system.cc | 68 +++++++++++++++++++------------------- src/libutil/meson.build | 4 +++ 3 files changed, 42 insertions(+), 37 deletions(-) diff --git a/configure.ac b/configure.ac index 5c22ed17636..dd33dbe110e 100644 --- a/configure.ac +++ b/configure.ac @@ -89,9 +89,10 @@ AC_LANG_POP(C++) AC_CHECK_FUNCS([statvfs pipe2]) -# Check for lutimes, optionally used for changing the mtime of -# symlinks. -AC_CHECK_FUNCS([lutimes]) +# Check for lutimes and utimensat, optionally used for changing the +# mtime of symlinks. +AC_CHECK_DECLS([AT_SYMLINK_NOFOLLOW], [], [], [[#include ]]) +AC_CHECK_FUNCS([lutimes utimensat]) # Check whether the store optimiser can optimise symlinks. diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index 060a806fbc5..04e4369fab4 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -574,7 +574,28 @@ void setWriteTime( time_t modificationTime, std::optional optIsSymlink) { -#ifndef _WIN32 +#ifdef _WIN32 + // FIXME use `fs::last_write_time`. + // + // Would be nice to use std::filesystem unconditionally, but + // doesn't support access time just modification time. + // + // System clock vs File clock issues also make that annoying. + warn("Changing file times is not yet implemented on Windows, path is '%s'", path); +#elif HAVE_UTIMENSAT && HAVE_DECL_AT_SYMLINK_NOFOLLOW + struct timespec times[2] = { + { + .tv_sec = accessedTime, + .tv_nsec = 0, + }, + { + .tv_sec = modificationTime, + .tv_nsec = 0, + }, + }; + if (utimensat(AT_FDCWD, path.c_str(), times, AT_SYMLINK_NOFOLLOW) == -1) + throw SysError("changing modification time of '%s' (using `utimensat`)", path); +#else struct timeval times[2] = { { .tv_sec = accessedTime, @@ -585,42 +606,21 @@ void setWriteTime( .tv_usec = 0, }, }; -#endif - - auto nonSymlink = [&]{ - bool isSymlink = optIsSymlink - ? *optIsSymlink - : fs::is_symlink(path); - - if (!isSymlink) { -#ifdef _WIN32 - // FIXME use `fs::last_write_time`. - // - // Would be nice to use std::filesystem unconditionally, but - // doesn't support access time just modification time. - // - // System clock vs File clock issues also make that annoying. - warn("Changing file times is not yet implemented on Windows, path is '%s'", path); +#if HAVE_LUTIMES + if (lutimes(path.c_str(), times) == -1) + throw SysError("changing modification time of '%s'", path); #else - if (utimes(path.c_str(), times) == -1) { - - throw SysError("changing modification time of '%s' (not a symlink)", path); - } -#endif - } else { - throw Error("Cannot modification time of symlink '%s'", path); - } - }; + bool isSymlink = optIsSymlink + ? *optIsSymlink + : fs::is_symlink(path); -#if HAVE_LUTIMES - if (lutimes(path.c_str(), times) == -1) { - if (errno == ENOSYS) - nonSymlink(); - else - throw SysError("changing modification time of '%s'", path); + if (!isSymlink) { + if (utimes(path.c_str(), times) == -1) + throw SysError("changing modification time of '%s' (not a symlink)", path); + } else { + throw Error("Cannot modification time of symlink '%s'", path); } -#else - nonSymlink(); +#endif #endif } diff --git a/src/libutil/meson.build b/src/libutil/meson.build index 8552c4c9dad..cba5a5288ed 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -41,6 +41,8 @@ check_funcs = [ # Optionally used to try to close more file descriptors (e.g. before # forking) on Unix. 'sysconf', + # Optionally used for changing the mtime of files and symlinks. + 'utimensat', ] foreach funcspec : check_funcs define_name = 'HAVE_' + funcspec.underscorify().to_upper() @@ -48,6 +50,8 @@ foreach funcspec : check_funcs configdata.set(define_name, define_value) endforeach +configdata.set('HAVE_DECL_AT_SYMLINK_NOFOLLOW', cxx.has_header_symbol('fcntl.h', 'AT_SYMLINK_NOFOLLOW').to_int()) + subdir('build-utils-meson/threads') if host_machine.system() == 'windows' From ffcc42faf467d692e685697ffb205bdbf3926979 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 29 Oct 2024 15:18:48 +0100 Subject: [PATCH 105/361] Revert flake-schemas for now --- Makefile.config.in | 1 - configure.ac | 6 - doc/manual/src/SUMMARY.md.in | 1 - doc/manual/src/protocols/flake-schemas.md | 64 -- flake.lock | 29 +- flake.nix | 8 +- package.nix | 3 - packaging/dependencies.nix | 13 +- packaging/hydra.nix | 2 - src/libcmd/installable-flake.cc | 14 + src/libcmd/installable-flake.hh | 2 + src/libcmd/installables.cc | 5 + src/libexpr/eval-cache.cc | 6 - src/libexpr/eval-cache.hh | 7 - src/libflake/flake/flake.cc | 36 +- src/libflake/flake/flake.hh | 19 - src/nix/call-flake-schemas.nix | 43 -- src/nix/flake-check.md | 58 +- src/nix/flake-schemas.cc | 224 ------ src/nix/flake-schemas.hh | 45 -- src/nix/flake.cc | 885 +++++++++++++++++----- src/nix/local.mk | 6 - tests/functional/flakes/check.sh | 11 + tests/functional/flakes/show.sh | 43 +- tests/functional/fmt.sh | 4 +- 25 files changed, 816 insertions(+), 719 deletions(-) delete mode 100644 doc/manual/src/protocols/flake-schemas.md delete mode 100644 src/nix/call-flake-schemas.nix delete mode 100644 src/nix/flake-schemas.cc delete mode 100644 src/nix/flake-schemas.hh diff --git a/Makefile.config.in b/Makefile.config.in index 2ed716b5e7b..3100d207365 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -37,7 +37,6 @@ checkbindir = @checkbindir@ checklibdir = @checklibdir@ datadir = @datadir@ datarootdir = @datarootdir@ -default_flake_schemas = @default_flake_schemas@ docdir = @docdir@ embedded_sandbox_shell = @embedded_sandbox_shell@ exec_prefix = @exec_prefix@ diff --git a/configure.ac b/configure.ac index cd931b87d55..5c22ed17636 100644 --- a/configure.ac +++ b/configure.ac @@ -428,12 +428,6 @@ if test "$embedded_sandbox_shell" = yes; then AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.]) fi - -AC_ARG_WITH(default-flake-schemas, AS_HELP_STRING([--with-default-flake-schemas=PATH],[path of the default flake schemas flake]), - default_flake_schemas=$withval, - [AC_MSG_FAILURE([--with-default-flake-schemas is missing])]) -AC_SUBST(default_flake_schemas) - ]) diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index b6d5b3c440c..8739599a03e 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -114,7 +114,6 @@ - [Store Path Specification](protocols/store-path.md) - [Nix Archive (NAR) Format](protocols/nix-archive.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - - [Flake Schemas](protocols/flake-schemas.md) - [C API](c-api.md) - [Glossary](glossary.md) - [Development](development/index.md) diff --git a/doc/manual/src/protocols/flake-schemas.md b/doc/manual/src/protocols/flake-schemas.md deleted file mode 100644 index b1dfa5da6f0..00000000000 --- a/doc/manual/src/protocols/flake-schemas.md +++ /dev/null @@ -1,64 +0,0 @@ -# Flake Schemas - -Flake schemas are a mechanism to allow tools like `nix flake show` and `nix flake check` to enumerate and check the contents of a flake -in a generic way, without requiring built-in knowledge of specific flake output types like `packages` or `nixosConfigurations`. - -A flake can define schemas for its outputs by defining a `schemas` output. `schemas` should be an attribute set with an attribute for -every output type that you want to be supported. If a flake does not have a `schemas` attribute, Nix uses a built-in set of schemas (namely https://github.com/DeterminateSystems/flake-schemas). - -A schema is an attribute set with the following attributes: - -| Attribute | Description | Default | -| :---------- | :---------------------------------------------------------------------------------------------- | :------ | -| `version` | Should be set to 1 | | -| `doc` | A string containing documentation about the flake output type in Markdown format. | | -| `allowIFD` | Whether the evaluation of the output attributes of this flake can read from derivation outputs. | `true` | -| `inventory` | A function that returns the contents of the flake output (described [below](#inventory)). | | - -# Inventory - -The `inventory` function returns a _node_ describing the contents of the flake output. A node is either a _leaf node_ or a _non-leaf node_. This allows nested flake output attributes to be described (e.g. `x86_64-linux.hello` inside a `packages` output). - -Non-leaf nodes must have the following attribute: - -| Attribute | Description | -| :--------- | :------------------------------------------------------------------------------------- | -| `children` | An attribute set of nodes. If this attribute is missing, the attribute is a leaf node. | - -Leaf nodes can have the following attributes: - -| Attribute | Description | -| :----------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `derivation` | The main derivation of this node, if any. It must evaluate for `nix flake check` and `nix flake show` to succeed. | -| `evalChecks` | An attribute set of Boolean values, used by `nix flake check`. Each attribute must evaluate to `true`. | -| `isFlakeCheck` | Whether `nix flake check` should build the `derivation` attribute of this node. | -| `shortDescription` | A one-sentence description of the node (such as the `meta.description` attribute in Nixpkgs). | -| `what` | A brief human-readable string describing the type of the node, e.g. `"package"` or `"development environment"`. This is used by tools like `nix flake show` to describe the contents of a flake. | - -Both leaf and non-leaf nodes can have the following attributes: - -| Attribute | Description | -| :----------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `forSystems` | A list of Nix system types (e.g. `["x86_64-linux"]`) supported by this node. This is used by tools to skip nodes that cannot be built on the user's system. Setting this on a non-leaf node allows all the children to be skipped, regardless of the `forSystems` attributes of the children. If this attribute is not set, the node is never skipped. | - -# Example - -Here is a schema that checks that every element of the `nixosConfigurations` flake output evaluates and builds correctly (meaning that it has a `config.system.build.toplevel` attribute that yields a buildable derivation). - -```nix -outputs = { - schemas.nixosConfigurations = { - version = 1; - doc = '' - The `nixosConfigurations` flake output defines NixOS system configurations. - ''; - inventory = output: { - children = builtins.mapAttrs (configName: machine: - { - what = "NixOS configuration"; - derivation = machine.config.system.build.toplevel; - }) output; - }; - }; -}; -``` diff --git a/flake.lock b/flake.lock index 1a6e4f7a1da..bb1114734e7 100644 --- a/flake.lock +++ b/flake.lock @@ -36,21 +36,6 @@ "type": "github" } }, - "flake-schemas": { - "locked": { - "lastModified": 1719857163, - "narHash": "sha256-wM+8JtoKBkahHiKn+EM1ikurMnitwRQrZ91hipJIJK8=", - "owner": "DeterminateSystems", - "repo": "flake-schemas", - "rev": "61a02d7183d4241962025e6c6307a22a0bb72a21", - "type": "github" - }, - "original": { - "owner": "DeterminateSystems", - "repo": "flake-schemas", - "type": "github" - } - }, "git-hooks-nix": { "inputs": { "flake-compat": [], @@ -63,11 +48,11 @@ ] }, "locked": { - "lastModified": 1721042469, - "narHash": "sha256-6FPUl7HVtvRHCCBQne7Ylp4p+dpP3P/OYuzjztZ4s70=", + "lastModified": 1729104314, + "narHash": "sha256-pZRZsq5oCdJt3upZIU4aslS9XwFJ+/nVtALHIciX/BI=", "owner": "cachix", "repo": "git-hooks.nix", - "rev": "f451c19376071a90d8c58ab1a953c6e9840527fd", + "rev": "3c3e88f0f544d6bb54329832616af7eb971b6be6", "type": "github" }, "original": { @@ -79,16 +64,15 @@ "libgit2": { "flake": false, "locked": { - "lastModified": 1715853528, - "narHash": "sha256-J2rCxTecyLbbDdsyBWn9w7r3pbKRMkI9E7RvRgAqBdY=", + "lastModified": 1730025633, + "narHash": "sha256-HcL9fW5crHeLpP7C7vShO+j5fwY8z95Plr1c+hIwFRQ=", "owner": "libgit2", "repo": "libgit2", - "rev": "36f7e21ad757a3dacc58cf7944329da6bc1d6e96", + "rev": "b363ea4b9e761fed7942eef4bbc735ccf16f9fed", "type": "github" }, "original": { "owner": "libgit2", - "ref": "v1.8.1", "repo": "libgit2", "type": "github" } @@ -145,7 +129,6 @@ "inputs": { "flake-compat": "flake-compat", "flake-parts": "flake-parts", - "flake-schemas": "flake-schemas", "git-hooks-nix": "git-hooks-nix", "libgit2": "libgit2", "nixpkgs": "nixpkgs", diff --git a/flake.nix b/flake.nix index fe5907f77b8..848d04a7ae8 100644 --- a/flake.nix +++ b/flake.nix @@ -5,8 +5,7 @@ inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446"; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; - inputs.libgit2 = { url = "github:libgit2/libgit2/v1.8.1"; flake = false; }; - inputs.flake-schemas.url = "github:DeterminateSystems/flake-schemas"; + inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; }; # dev tooling inputs.flake-parts.url = "github:hercules-ci/flake-parts"; @@ -19,7 +18,8 @@ inputs.git-hooks-nix.inputs.flake-compat.follows = ""; inputs.git-hooks-nix.inputs.gitignore.follows = ""; - outputs = inputs@{ self, nixpkgs, nixpkgs-regression, libgit2, flake-schemas, ... }: + outputs = inputs@{ self, nixpkgs, nixpkgs-regression, libgit2, ... }: + let inherit (nixpkgs) lib; @@ -156,8 +156,6 @@ }; in { - schemas = flake-schemas.schemas; - # A Nixpkgs overlay that overrides the 'nix' and # 'nix-perl-bindings' packages. overlays.default = overlayFor (p: p.stdenv); diff --git a/package.nix b/package.nix index 4f18eb8bb20..a7c8923e8b4 100644 --- a/package.nix +++ b/package.nix @@ -38,8 +38,6 @@ , busybox-sandbox-shell ? null -, flake-schemas - # Configuration Options #: # This probably seems like too many degrees of freedom, but it @@ -261,7 +259,6 @@ in { (lib.enableFeature enableMarkdown "markdown") (lib.enableFeature installUnitTests "install-unit-tests") (lib.withFeatureAs true "readline-flavor" readlineFlavor) - "--with-default-flake-schemas=${flake-schemas}" ] ++ lib.optionals (!forDevShell) [ "--sysconfdir=/etc" ] ++ lib.optionals installUnitTests [ diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 5a0981bfb56..2b34720fe4d 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -152,16 +152,5 @@ scope: { inherit resolvePath filesetToSource; - mkMesonDerivation = f: let - exts = [ - miscGoodPractice - bsdNoLinkAsNeeded - localSourceLayer - ]; - in stdenv.mkDerivation - (lib.extends - (lib.foldr lib.composeExtensions (_: _: {}) exts) - f); - - inherit (inputs) flake-schemas; + mkMesonDerivation = f: stdenv.mkDerivation (lib.extends localSourceLayer f); } diff --git a/packaging/hydra.nix b/packaging/hydra.nix index 24c614e6746..dbe99247675 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -28,8 +28,6 @@ let test-daemon = daemon; doBuild = false; - - inherit (inputs) flake-schemas; }; # Technically we could just return `pkgs.nixComponents`, but for Hydra it's diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index 8796ad5ba79..852a5618efd 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -43,6 +43,20 @@ std::vector InstallableFlake::getActualAttrPaths() return res; } +Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake) +{ + auto vFlake = state.allocValue(); + + callFlake(state, lockedFlake, *vFlake); + + auto aOutputs = vFlake->attrs()->get(state.symbols.create("outputs")); + assert(aOutputs); + + state.forceValue(*aOutputs->value, aOutputs->value->determinePos(noPos)); + + return aOutputs->value; +} + static std::string showAttrPaths(const std::vector & paths) { std::string s; diff --git a/src/libcmd/installable-flake.hh b/src/libcmd/installable-flake.hh index 8e0a232ef8a..b0d6f5afca6 100644 --- a/src/libcmd/installable-flake.hh +++ b/src/libcmd/installable-flake.hh @@ -53,6 +53,8 @@ struct InstallableFlake : InstallableValue std::vector getActualAttrPaths(); + Value * getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake); + DerivedPathsWithInfo toDerivedPaths() override; std::pair toValue(EvalState & state) override; diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 3c29ad9c8f9..e4ca6e455d8 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -449,6 +449,11 @@ ref openEvalCache( : std::nullopt; auto rootLoader = [&state, lockedFlake]() { + /* For testing whether the evaluation cache is + complete. */ + if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0") + throw Error("not everything is cached, but evaluation is not allowed"); + auto vFlake = state.allocValue(); flake::callFlake(state, *lockedFlake, *vFlake); diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 043ec23d313..26352187e72 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -368,12 +368,6 @@ Value * EvalCache::getRootValue() { if (!value) { debug("getting root value"); - - /* For testing whether the evaluation cache is - complete. */ - if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0") - throw Error("not everything is cached, but evaluation is not allowed"); - value = allocRootValue(rootLoader()); } return *value; diff --git a/src/libexpr/eval-cache.hh b/src/libexpr/eval-cache.hh index a6c8ad011c8..b1911e3a4f7 100644 --- a/src/libexpr/eval-cache.hh +++ b/src/libexpr/eval-cache.hh @@ -34,11 +34,7 @@ class EvalCache : public std::enable_shared_from_this friend struct CachedEvalError; std::shared_ptr db; - -public: EvalState & state; - -private: typedef std::function RootLoader; RootLoader rootLoader; RootValue value; @@ -93,10 +89,7 @@ class AttrCursor : public std::enable_shared_from_this friend class EvalCache; friend struct CachedEvalError; -public: ref root; - -private: typedef std::optional, Symbol>> Parent; Parent parent; RootValue _value; diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index 38efbd37fe1..ceb840c746d 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -204,7 +204,7 @@ static std::map parseFlakeInputs( return inputs; } -Flake readFlake( +static Flake readFlake( EvalState & state, const FlakeRef & originalRef, const FlakeRef & resolvedRef, @@ -338,16 +338,20 @@ static LockFile readLockFile( : LockFile(); } +/* Compute an in-memory lock file for the specified top-level flake, + and optionally write it to file, if the flake is writable. */ LockedFlake lockFlake( const Settings & settings, EvalState & state, const FlakeRef & topRef, - const LockFlags & lockFlags, - Flake flake, - FlakeCache & flakeCache) + const LockFlags & lockFlags) { + FlakeCache flakeCache; + auto useRegistries = lockFlags.useRegistries.value_or(settings.useRegistries); + auto flake = getFlake(state, topRef, useRegistries, flakeCache); + if (lockFlags.applyNixConfig) { flake.config.apply(settings); state.store->setOptions(); @@ -738,30 +742,6 @@ LockedFlake lockFlake( } } -LockedFlake lockFlake( - const Settings & settings, - EvalState & state, - const FlakeRef & topRef, - const LockFlags & lockFlags) -{ - FlakeCache flakeCache; - - auto useRegistries = lockFlags.useRegistries.value_or(settings.useRegistries); - - return lockFlake(settings, state, topRef, lockFlags, getFlake(state, topRef, useRegistries, flakeCache), flakeCache); -} - -LockedFlake lockFlake( - const Settings & settings, - EvalState & state, - const FlakeRef & topRef, - const LockFlags & lockFlags, - Flake flake) -{ - FlakeCache flakeCache; - return lockFlake(settings, state, topRef, lockFlags, std::move(flake), flakeCache); -} - void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & vRes) diff --git a/src/libflake/flake/flake.hh b/src/libflake/flake/flake.hh index 69744efb313..cce17009ce3 100644 --- a/src/libflake/flake/flake.hh +++ b/src/libflake/flake/flake.hh @@ -203,31 +203,12 @@ struct LockFlags std::set inputUpdates; }; -Flake readFlake( - EvalState & state, - const FlakeRef & originalRef, - const FlakeRef & resolvedRef, - const FlakeRef & lockedRef, - const SourcePath & rootDir, - const InputPath & lockRootPath); - -/** - * Compute an in-memory lock file for the specified top-level flake, - * and optionally write it to file, if the flake is writable. - */ LockedFlake lockFlake( const Settings & settings, EvalState & state, const FlakeRef & flakeRef, const LockFlags & lockFlags); -LockedFlake lockFlake( - const Settings & settings, - EvalState & state, - const FlakeRef & topRef, - const LockFlags & lockFlags, - Flake flake); - void callFlake( EvalState & state, const LockedFlake & lockedFlake, diff --git a/src/nix/call-flake-schemas.nix b/src/nix/call-flake-schemas.nix deleted file mode 100644 index cd6d4c3ae53..00000000000 --- a/src/nix/call-flake-schemas.nix +++ /dev/null @@ -1,43 +0,0 @@ -/* The flake providing default schemas. */ -defaultSchemasFlake: - -/* The flake whose contents we want to extract. */ -flake: - -let - - # Helper functions. - - mapAttrsToList = f: attrs: map (name: f name attrs.${name}) (builtins.attrNames attrs); - -in - -rec { - outputNames = builtins.attrNames flake.outputs; - - allSchemas = (flake.outputs.schemas or defaultSchemasFlake.schemas) // schemaOverrides; - - schemaOverrides = {}; # FIXME - - schemas = - builtins.listToAttrs (builtins.concatLists (mapAttrsToList - (outputName: output: - if allSchemas ? ${outputName} then - [{ name = outputName; value = allSchemas.${outputName}; }] - else - [ ]) - flake.outputs)); - - inventory = - builtins.mapAttrs - (outputName: output: - if schemas ? ${outputName} && schemas.${outputName}.version == 1 - then - { output = schemas.${outputName}.inventory output; - inherit (schemas.${outputName}) doc; - } - else - { unknown = true; } - ) - flake.outputs; -} diff --git a/src/nix/flake-check.md b/src/nix/flake-check.md index 71dd916407e..c8307f8d85b 100644 --- a/src/nix/flake-check.md +++ b/src/nix/flake-check.md @@ -18,20 +18,56 @@ R""( # Description This command verifies that the flake specified by flake reference -*flake-url* can be evaluated and built successfully according to its -`schemas` flake output. For every flake output that has a schema -definition, `nix flake check` uses the schema to extract the contents -of the output. Then, for every item in the contents: - -* It evaluates the elements of the `evalChecks` attribute set returned - by the schema for that item, printing an error or warning for every - check that fails to evaluate or that evaluates to `false`. - -* It builds `derivation` attribute returned by the schema for that - item, if the item has the `isFlakeCheck` attribute. +*flake-url* can be evaluated successfully (as detailed below), and +that the derivations specified by the flake's `checks` output can be +built successfully. If the `keep-going` option is set to `true`, Nix will keep evaluating as much as it can and report the errors as it encounters them. Otherwise it will stop at the first error. +# Evaluation checks + +The following flake output attributes must be derivations: + +* `checks.`*system*`.`*name* +* `defaultPackage.`*system* +* `devShell.`*system* +* `devShells.`*system*`.`*name* +* `nixosConfigurations.`*name*`.config.system.build.toplevel` +* `packages.`*system*`.`*name* + +The following flake output attributes must be [app +definitions](./nix3-run.md): + +* `apps.`*system*`.`*name* +* `defaultApp.`*system* + +The following flake output attributes must be [template +definitions](./nix3-flake-init.md): + +* `defaultTemplate` +* `templates.`*name* + +The following flake output attributes must be *Nixpkgs overlays*: + +* `overlay` +* `overlays.`*name* + +The following flake output attributes must be *NixOS modules*: + +* `nixosModule` +* `nixosModules.`*name* + +The following flake output attributes must be +[bundlers](./nix3-bundle.md): + +* `bundlers.`*name* +* `defaultBundler` + +In addition, the `hydraJobs` output is evaluated in the same way as +Hydra's `hydra-eval-jobs` (i.e. as a arbitrarily deeply nested +attribute set of derivations). Similarly, the +`legacyPackages`.*system* output is evaluated like `nix-env --query --available `. + )"" diff --git a/src/nix/flake-schemas.cc b/src/nix/flake-schemas.cc deleted file mode 100644 index 0047f27ce61..00000000000 --- a/src/nix/flake-schemas.cc +++ /dev/null @@ -1,224 +0,0 @@ -#include "flake-schemas.hh" -#include "eval-settings.hh" -#include "fetch-to-store.hh" -#include "memory-source-accessor.hh" -#include "strings-inline.hh" - -namespace nix::flake_schemas { - -using namespace eval_cache; -using namespace flake; - -static LockedFlake getBuiltinDefaultSchemasFlake(EvalState & state) -{ - auto accessor = make_ref(); - - accessor->setPathDisplay("«builtin-flake-schemas»"); - - accessor->addFile( - CanonPath("flake.nix"), -#include "builtin-flake-schemas.nix.gen.hh" - ); - - // FIXME: remove this when we have lazy trees. - auto storePath = fetchToStore(*state.store, {accessor}, FetchMode::Copy); - state.allowPath(storePath); - - // Construct a dummy flakeref. - auto flakeRef = parseFlakeRef( - fetchSettings, - fmt("tarball+https://builtin-flake-schemas?narHash=%s", - state.store->queryPathInfo(storePath)->narHash.to_string(HashFormat::SRI, true))); - - auto flake = readFlake(state, flakeRef, flakeRef, flakeRef, state.rootPath(state.store->toRealPath(storePath)), {}); - - return lockFlake(flakeSettings, state, flakeRef, {}, flake); -} - -std::tuple, ref> -call(EvalState & state, std::shared_ptr lockedFlake, std::optional defaultSchemasFlake) -{ - auto fingerprint = lockedFlake->getFingerprint(state.store); - - std::string callFlakeSchemasNix = -#include "call-flake-schemas.nix.gen.hh" - ; - - auto lockedDefaultSchemasFlake = defaultSchemasFlake - ? flake::lockFlake(flakeSettings, state, *defaultSchemasFlake, {}) - : getBuiltinDefaultSchemasFlake(state); - auto lockedDefaultSchemasFlakeFingerprint = lockedDefaultSchemasFlake.getFingerprint(state.store); - - std::optional fingerprint2; - if (fingerprint && lockedDefaultSchemasFlakeFingerprint) - fingerprint2 = hashString( - HashAlgorithm::SHA256, - fmt("app:%s:%s:%s", - hashString(HashAlgorithm::SHA256, callFlakeSchemasNix).to_string(HashFormat::Base16, false), - fingerprint->to_string(HashFormat::Base16, false), - lockedDefaultSchemasFlakeFingerprint->to_string(HashFormat::Base16, false))); - - // FIXME: merge with openEvalCache(). - auto cache = make_ref( - evalSettings.useEvalCache && evalSettings.pureEval ? fingerprint2 : std::nullopt, - state, - [&state, lockedFlake, callFlakeSchemasNix, lockedDefaultSchemasFlake]() { - auto vCallFlakeSchemas = state.allocValue(); - state.eval( - state.parseExprFromString(callFlakeSchemasNix, state.rootPath(CanonPath::root)), *vCallFlakeSchemas); - - auto vFlake = state.allocValue(); - flake::callFlake(state, *lockedFlake, *vFlake); - - auto vDefaultSchemasFlake = state.allocValue(); - if (vFlake->type() == nAttrs && vFlake->attrs()->get(state.symbols.create("schemas"))) - vDefaultSchemasFlake->mkNull(); - else - flake::callFlake(state, lockedDefaultSchemasFlake, *vDefaultSchemasFlake); - - auto vRes = state.allocValue(); - Value * args[] = {vDefaultSchemasFlake, vFlake}; - state.callFunction(*vCallFlakeSchemas, 2, args, *vRes, noPos); - - return vRes; - }); - - return {cache, cache->getRoot()->getAttr("inventory")}; -} - -/* Derive the flake output attribute path from the cursor used to - traverse the inventory. We do this so we don't have to maintain a - separate attrpath for that. */ -std::vector toAttrPath(ref cursor) -{ - auto attrPath = cursor->getAttrPath(); - std::vector res; - auto i = attrPath.begin(); - assert(i != attrPath.end()); - ++i; // skip "inventory" - assert(i != attrPath.end()); - res.push_back(*i++); // copy output name - if (i != attrPath.end()) - ++i; // skip "outputs" - while (i != attrPath.end()) { - ++i; // skip "children" - if (i != attrPath.end()) - res.push_back(*i++); - } - return res; -} - -std::string toAttrPathStr(ref cursor) -{ - return concatStringsSep(".", cursor->root->state.symbols.resolve(toAttrPath(cursor))); -} - -void forEachOutput( - ref inventory, - std::function output, const std::string & doc, bool isLast)> f) -{ - // FIXME: handle non-IFD outputs first. - // evalSettings.enableImportFromDerivation.setDefault(false); - - auto outputNames = inventory->getAttrs(); - for (const auto & [i, outputName] : enumerate(outputNames)) { - auto output = inventory->getAttr(outputName); - try { - auto isUnknown = (bool) output->maybeGetAttr("unknown"); - Activity act(*logger, lvlInfo, actUnknown, fmt("evaluating '%s'", toAttrPathStr(output))); - f(outputName, - isUnknown ? std::shared_ptr() : output->getAttr("output"), - isUnknown ? "" : output->getAttr("doc")->getString(), - i + 1 == outputNames.size()); - } catch (Error & e) { - e.addTrace(nullptr, "while evaluating the flake output '%s':", toAttrPathStr(output)); - throw; - } - } -} - -void visit( - std::optional system, - ref node, - std::function leaf)> visitLeaf, - std::function)> visitNonLeaf, - std::function node, const std::vector & systems)> visitFiltered) -{ - Activity act(*logger, lvlInfo, actUnknown, fmt("evaluating '%s'", toAttrPathStr(node))); - - /* Apply the system type filter. */ - if (system) { - if (auto forSystems = node->maybeGetAttr("forSystems")) { - auto systems = forSystems->getListOfStrings(); - if (std::find(systems.begin(), systems.end(), system) == systems.end()) { - visitFiltered(node, systems); - return; - } - } - } - - if (auto children = node->maybeGetAttr("children")) { - visitNonLeaf([&](ForEachChild f) { - auto attrNames = children->getAttrs(); - for (const auto & [i, attrName] : enumerate(attrNames)) { - try { - f(attrName, children->getAttr(attrName), i + 1 == attrNames.size()); - } catch (Error & e) { - // FIXME: make it a flake schema attribute whether to ignore evaluation errors. - if (node->root->state.symbols[toAttrPath(node)[0]] != "legacyPackages") { - e.addTrace(nullptr, "while evaluating the flake output attribute '%s':", toAttrPathStr(node)); - throw; - } - } - } - }); - } - - else - visitLeaf(ref(node)); -} - -std::optional what(ref leaf) -{ - if (auto what = leaf->maybeGetAttr("what")) - return what->getString(); - else - return std::nullopt; -} - -std::optional shortDescription(ref leaf) -{ - if (auto what = leaf->maybeGetAttr("shortDescription")) { - auto s = trim(what->getString()); - if (s != "") - return s; - } - return std::nullopt; -} - -std::shared_ptr derivation(ref leaf) -{ - return leaf->maybeGetAttr("derivation"); -} - -MixFlakeSchemas::MixFlakeSchemas() -{ - addFlag( - {.longName = "default-flake-schemas", - .description = "The URL of the flake providing default flake schema definitions.", - .labels = {"flake-ref"}, - .handler = {&defaultFlakeSchemas}, - .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { - completeFlakeRef(completions, getStore(), prefix); - }}}); -} - -std::optional MixFlakeSchemas::getDefaultFlakeSchemas() -{ - if (!defaultFlakeSchemas) - return std::nullopt; - else - return parseFlakeRef(fetchSettings, *defaultFlakeSchemas, absPath(".")); -} - -} diff --git a/src/nix/flake-schemas.hh b/src/nix/flake-schemas.hh deleted file mode 100644 index 9d1ba75a0ed..00000000000 --- a/src/nix/flake-schemas.hh +++ /dev/null @@ -1,45 +0,0 @@ -#include "eval-cache.hh" -#include "flake/flake.hh" -#include "command.hh" - -namespace nix::flake_schemas { - -using namespace eval_cache; - -std::tuple, ref> -call(EvalState & state, std::shared_ptr lockedFlake, std::optional defaultSchemasFlake); - -std::vector toAttrPath(ref cursor); - -std::string toAttrPathStr(ref cursor); - -void forEachOutput( - ref inventory, - std::function output, const std::string & doc, bool isLast)> f); - -typedef std::function attr, bool isLast)> ForEachChild; - -void visit( - std::optional system, - ref node, - std::function leaf)> visitLeaf, - std::function)> visitNonLeaf, - std::function node, const std::vector & systems)> visitFiltered); - -std::optional what(ref leaf); - -std::optional shortDescription(ref leaf); - -std::shared_ptr derivation(ref leaf); - -/* Some helper functions for processing flake schema output. */ -struct MixFlakeSchemas : virtual Args, virtual StoreCommand -{ - std::optional defaultFlakeSchemas; - - MixFlakeSchemas(); - - std::optional getDefaultFlakeSchemas(); -}; - -} diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 7ecaa272805..4465479695f 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -17,7 +17,6 @@ #include "eval-cache.hh" #include "markdown.hh" #include "users.hh" -#include "flake-schemas.hh" #include #include @@ -166,6 +165,31 @@ struct CmdFlakeLock : FlakeCommand } }; +static void enumerateOutputs(EvalState & state, Value & vFlake, + std::function callback) +{ + auto pos = vFlake.determinePos(noPos); + state.forceAttrs(vFlake, pos, "while evaluating a flake to get its outputs"); + + auto aOutputs = vFlake.attrs()->get(state.symbols.create("outputs")); + assert(aOutputs); + + state.forceAttrs(*aOutputs->value, pos, "while evaluating the outputs of a flake"); + + auto sHydraJobs = state.symbols.create("hydraJobs"); + + /* Hack: ensure that hydraJobs is evaluated before anything + else. This way we can disable IFD for hydraJobs and then enable + it for other outputs. */ + if (auto attr = aOutputs->value->attrs()->get(sHydraJobs)) + callback(state.symbols[attr->name], *attr->value, attr->pos); + + for (auto & attr : *aOutputs->value->attrs()) { + if (attr.name != sHydraJobs) + callback(state.symbols[attr.name], *attr.value, attr.pos); + } +} + struct CmdFlakeMetadata : FlakeCommand, MixJSON { std::string description() override @@ -296,7 +320,7 @@ struct CmdFlakeInfo : CmdFlakeMetadata } }; -struct CmdFlakeCheck : FlakeCommand, flake_schemas::MixFlakeSchemas +struct CmdFlakeCheck : FlakeCommand { bool build = true; bool checkAllSystems = false; @@ -337,26 +361,16 @@ struct CmdFlakeCheck : FlakeCommand, flake_schemas::MixFlakeSchemas auto state = getEvalState(); lockFlags.applyNixConfig = true; - auto flake = std::make_shared(lockFlake()); + auto flake = lockFlake(); auto localSystem = std::string(settings.thisSystem.get()); - auto [cache, inventory] = flake_schemas::call(*state, flake, getDefaultFlakeSchemas()); - - std::vector drvPaths; - - std::set uncheckedOutputs; - std::set omittedSystems; - - std::function node)> visit; - bool hasErrors = false; - auto reportError = [&](const Error & e) { try { throw e; } catch (Error & e) { if (settings.keepGoing) { - logError({.msg = e.info().msg}); + ignoreException(); hasErrors = true; } else @@ -364,70 +378,428 @@ struct CmdFlakeCheck : FlakeCommand, flake_schemas::MixFlakeSchemas } }; - visit = [&](ref node) - { - flake_schemas::visit( - checkAllSystems ? std::optional() : localSystem, - node, + std::set omittedSystems; - [&](ref leaf) - { - if (auto evalChecks = leaf->maybeGetAttr("evalChecks")) { - auto checkNames = evalChecks->getAttrs(); - for (auto & checkName : checkNames) { - // FIXME: update activity - auto cursor = evalChecks->getAttr(checkName); - auto b = cursor->getBool(); - if (!b) - reportError(Error("Evaluation check '%s' failed.", flake_schemas::toAttrPathStr(cursor))); - } + // FIXME: rewrite to use EvalCache. + + auto resolve = [&] (PosIdx p) { + return state->positions[p]; + }; + + auto argHasName = [&] (Symbol arg, std::string_view expected) { + std::string_view name = state->symbols[arg]; + return + name == expected + || name == "_" + || (hasPrefix(name, "_") && name.substr(1) == expected); + }; + + auto checkSystemName = [&](std::string_view system, const PosIdx pos) { + // FIXME: what's the format of "system"? + if (system.find('-') == std::string::npos) + reportError(Error("'%s' is not a valid system type, at %s", system, resolve(pos))); + }; + + auto checkSystemType = [&](std::string_view system, const PosIdx pos) { + if (!checkAllSystems && system != localSystem) { + omittedSystems.insert(std::string(system)); + return false; + } else { + return true; + } + }; + + auto checkDerivation = [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional { + try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking derivation %s", attrPath)); + auto packageInfo = getDerivation(*state, v, false); + if (!packageInfo) + throw Error("flake attribute '%s' is not a derivation", attrPath); + else { + // FIXME: check meta attributes + auto storePath = packageInfo->queryDrvPath(); + if (storePath) { + logger->log(lvlInfo, + fmt("derivation evaluated to %s", + store->printStorePath(storePath.value()))); } + return storePath; + } + } catch (Error & e) { + e.addTrace(resolve(pos), HintFmt("while checking the derivation '%s'", attrPath)); + reportError(e); + } + return std::nullopt; + }; - if (auto drv = flake_schemas::derivation(leaf)) { - if (auto isFlakeCheck = leaf->maybeGetAttr("isFlakeCheck")) { - if (isFlakeCheck->getBool()) { - auto drvPath = drv->forceDerivation(); - drvPaths.push_back(DerivedPath::Built { - .drvPath = makeConstantStorePathRef(drvPath), - .outputs = OutputsSpec::All { }, - }); - } - } + std::vector drvPaths; + + auto checkApp = [&](const std::string & attrPath, Value & v, const PosIdx pos) { + try { + #if 0 + // FIXME + auto app = App(*state, v); + for (auto & i : app.context) { + auto [drvPathS, outputName] = NixStringContextElem::parse(i); + store->parseStorePath(drvPathS); + } + #endif + } catch (Error & e) { + e.addTrace(resolve(pos), HintFmt("while checking the app definition '%s'", attrPath)); + reportError(e); + } + }; + + auto checkOverlay = [&](std::string_view attrPath, Value & v, const PosIdx pos) { + try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking overlay '%s'", attrPath)); + state->forceValue(v, pos); + if (!v.isLambda()) { + throw Error("overlay is not a function, but %s instead", showType(v)); + } + if (v.payload.lambda.fun->hasFormals() + || !argHasName(v.payload.lambda.fun->arg, "final")) + throw Error("overlay does not take an argument named 'final'"); + // FIXME: if we have a 'nixpkgs' input, use it to + // evaluate the overlay. + } catch (Error & e) { + e.addTrace(resolve(pos), HintFmt("while checking the overlay '%s'", attrPath)); + reportError(e); + } + }; + + auto checkModule = [&](std::string_view attrPath, Value & v, const PosIdx pos) { + try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking NixOS module '%s'", attrPath)); + state->forceValue(v, pos); + } catch (Error & e) { + e.addTrace(resolve(pos), HintFmt("while checking the NixOS module '%s'", attrPath)); + reportError(e); + } + }; + + std::function checkHydraJobs; + + checkHydraJobs = [&](std::string_view attrPath, Value & v, const PosIdx pos) { + try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking Hydra job '%s'", attrPath)); + state->forceAttrs(v, pos, ""); + + if (state->isDerivation(v)) + throw Error("jobset should not be a derivation at top-level"); + + for (auto & attr : *v.attrs()) { + state->forceAttrs(*attr.value, attr.pos, ""); + auto attrPath2 = concatStrings(attrPath, ".", state->symbols[attr.name]); + if (state->isDerivation(*attr.value)) { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking Hydra job '%s'", attrPath2)); + checkDerivation(attrPath2, *attr.value, attr.pos); + } else + checkHydraJobs(attrPath2, *attr.value, attr.pos); + } + + } catch (Error & e) { + e.addTrace(resolve(pos), HintFmt("while checking the Hydra jobset '%s'", attrPath)); + reportError(e); + } + }; + + auto checkNixOSConfiguration = [&](const std::string & attrPath, Value & v, const PosIdx pos) { + try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking NixOS configuration '%s'", attrPath)); + Bindings & bindings(*state->allocBindings(0)); + auto vToplevel = findAlongAttrPath(*state, "config.system.build.toplevel", bindings, v).first; + state->forceValue(*vToplevel, pos); + if (!state->isDerivation(*vToplevel)) + throw Error("attribute 'config.system.build.toplevel' is not a derivation"); + } catch (Error & e) { + e.addTrace(resolve(pos), HintFmt("while checking the NixOS configuration '%s'", attrPath)); + reportError(e); + } + }; + + auto checkTemplate = [&](std::string_view attrPath, Value & v, const PosIdx pos) { + try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking template '%s'", attrPath)); + + state->forceAttrs(v, pos, ""); + + if (auto attr = v.attrs()->get(state->symbols.create("path"))) { + if (attr->name == state->symbols.create("path")) { + NixStringContext context; + auto path = state->coerceToPath(attr->pos, *attr->value, context, ""); + if (!path.pathExists()) + throw Error("template '%s' refers to a non-existent path '%s'", attrPath, path); + // TODO: recursively check the flake in 'path'. } - }, + } else + throw Error("template '%s' lacks attribute 'path'", attrPath); - [&](std::function forEachChild) - { - forEachChild([&](Symbol attrName, ref node, bool isLast) - { - visit(node); - }); - }, - - [&](ref node, const std::vector & systems) { - for (auto & s : systems) - omittedSystems.insert(s); - }); + if (auto attr = v.attrs()->get(state->symbols.create("description"))) + state->forceStringNoCtx(*attr->value, attr->pos, ""); + else + throw Error("template '%s' lacks attribute 'description'", attrPath); + + for (auto & attr : *v.attrs()) { + std::string_view name(state->symbols[attr.name]); + if (name != "path" && name != "description" && name != "welcomeText") + throw Error("template '%s' has unsupported attribute '%s'", attrPath, name); + } + } catch (Error & e) { + e.addTrace(resolve(pos), HintFmt("while checking the template '%s'", attrPath)); + reportError(e); + } + }; + + auto checkBundler = [&](const std::string & attrPath, Value & v, const PosIdx pos) { + try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking bundler '%s'", attrPath)); + state->forceValue(v, pos); + if (!v.isLambda()) + throw Error("bundler must be a function"); + // TODO: check types of inputs/outputs? + } catch (Error & e) { + e.addTrace(resolve(pos), HintFmt("while checking the template '%s'", attrPath)); + reportError(e); + } }; - flake_schemas::forEachOutput(inventory, [&](Symbol outputName, std::shared_ptr output, const std::string & doc, bool isLast) { - if (output) { - visit(ref(output)); - } else - uncheckedOutputs.insert(std::string(state->symbols[outputName])); - }); + Activity act(*logger, lvlInfo, actUnknown, "evaluating flake"); + + auto vFlake = state->allocValue(); + flake::callFlake(*state, flake, *vFlake); + + enumerateOutputs(*state, + *vFlake, + [&](std::string_view name, Value & vOutput, const PosIdx pos) { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking flake output '%s'", name)); + + try { + evalSettings.enableImportFromDerivation.setDefault(name != "hydraJobs"); + + state->forceValue(vOutput, pos); + + std::string_view replacement = + name == "defaultPackage" ? "packages..default" : + name == "defaultApp" ? "apps..default" : + name == "defaultTemplate" ? "templates.default" : + name == "defaultBundler" ? "bundlers..default" : + name == "overlay" ? "overlays.default" : + name == "devShell" ? "devShells..default" : + name == "nixosModule" ? "nixosModules.default" : + ""; + if (replacement != "") + warn("flake output attribute '%s' is deprecated; use '%s' instead", name, replacement); + + if (name == "checks") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs()) { + auto drvPath = checkDerivation( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); + if (drvPath && attr_name == settings.thisSystem.get()) { + drvPaths.push_back(DerivedPath::Built { + .drvPath = makeConstantStorePathRef(*drvPath), + .outputs = OutputsSpec::All { }, + }); + } + } + } + } + } - if (!uncheckedOutputs.empty()) - warn("The following flake outputs are unchecked: %s.", - concatStringsSep(", ", uncheckedOutputs)); // FIXME: quote + else if (name == "formatter") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + checkApp( + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); + }; + } + } + + else if (name == "packages" || name == "devShells") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs()) + checkDerivation( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); + }; + } + } + + else if (name == "apps") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs()) + checkApp( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); + }; + } + } + + else if (name == "defaultPackage" || name == "devShell") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + checkDerivation( + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); + }; + } + } + + else if (name == "defaultApp") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos) ) { + checkApp( + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); + }; + } + } + + else if (name == "legacyPackages") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + checkSystemName(state->symbols[attr.name], attr.pos); + checkSystemType(state->symbols[attr.name], attr.pos); + // FIXME: do getDerivations? + } + } + + else if (name == "overlay") + checkOverlay(name, vOutput, pos); + + else if (name == "overlays") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) + checkOverlay(fmt("%s.%s", name, state->symbols[attr.name]), + *attr.value, attr.pos); + } + + else if (name == "nixosModule") + checkModule(name, vOutput, pos); + + else if (name == "nixosModules") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) + checkModule(fmt("%s.%s", name, state->symbols[attr.name]), + *attr.value, attr.pos); + } + + else if (name == "nixosConfigurations") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) + checkNixOSConfiguration(fmt("%s.%s", name, state->symbols[attr.name]), + *attr.value, attr.pos); + } + + else if (name == "hydraJobs") + checkHydraJobs(name, vOutput, pos); + + else if (name == "defaultTemplate") + checkTemplate(name, vOutput, pos); + + else if (name == "templates") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) + checkTemplate(fmt("%s.%s", name, state->symbols[attr.name]), + *attr.value, attr.pos); + } + + else if (name == "defaultBundler") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + checkBundler( + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); + }; + } + } + + else if (name == "bundlers") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs()) { + checkBundler( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); + } + }; + } + } + + else if ( + name == "lib" + || name == "darwinConfigurations" + || name == "darwinModules" + || name == "flakeModule" + || name == "flakeModules" + || name == "herculesCI" + || name == "homeConfigurations" + || name == "homeModule" + || name == "homeModules" + || name == "nixopsConfigurations" + ) + // Known but unchecked community attribute + ; + + else + warn("unknown flake output '%s'", name); + + } catch (Error & e) { + e.addTrace(resolve(pos), HintFmt("while checking flake output '%s'", name)); + reportError(e); + } + }); + } if (build && !drvPaths.empty()) { Activity act(*logger, lvlInfo, actUnknown, fmt("running %d flake checks", drvPaths.size())); store->buildPaths(drvPaths); } - if (hasErrors) throw Error("some errors were encountered during the evaluation"); @@ -438,7 +810,7 @@ struct CmdFlakeCheck : FlakeCommand, flake_schemas::MixFlakeSchemas "Use '--all-systems' to check all.", concatStringsSep(", ", omittedSystems) ); - } + }; }; }; @@ -723,7 +1095,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun } }; -struct CmdFlakeShow : FlakeCommand, MixJSON, flake_schemas::MixFlakeSchemas +struct CmdFlakeShow : FlakeCommand, MixJSON { bool showLegacy = false; bool showAllSystems = false; @@ -756,158 +1128,267 @@ struct CmdFlakeShow : FlakeCommand, MixJSON, flake_schemas::MixFlakeSchemas void run(nix::ref store) override { + evalSettings.enableImportFromDerivation.setDefault(false); + auto state = getEvalState(); auto flake = std::make_shared(lockFlake()); auto localSystem = std::string(settings.thisSystem.get()); - auto [cache, inventory] = flake_schemas::call(*state, flake, getDefaultFlakeSchemas()); + std::function &attrPath, + const Symbol &attr)> hasContent; + + // For frameworks it's important that structures are as lazy as possible + // to prevent infinite recursions, performance issues and errors that + // aren't related to the thing to evaluate. As a consequence, they have + // to emit more attributes than strictly (sic) necessary. + // However, these attributes with empty values are not useful to the user + // so we omit them. + hasContent = [&]( + eval_cache::AttrCursor & visitor, + const std::vector &attrPath, + const Symbol &attr) -> bool + { + auto attrPath2(attrPath); + attrPath2.push_back(attr); + auto attrPathS = state->symbols.resolve(attrPath2); + const auto & attrName = state->symbols[attr]; - if (json) { - std::function node, nlohmann::json & obj)> visit; + auto visitor2 = visitor.getAttr(attrName); - visit = [&](ref node, nlohmann::json & obj) - { - flake_schemas::visit( - showAllSystems ? std::optional() : localSystem, - node, - - [&](ref leaf) - { - obj.emplace("leaf", true); - - if (auto what = flake_schemas::what(leaf)) - obj.emplace("what", what); - - if (auto shortDescription = flake_schemas::shortDescription(leaf)) - obj.emplace("shortDescription", shortDescription); - - if (auto drv = flake_schemas::derivation(leaf)) - obj.emplace("derivationName", drv->getAttr(state->sName)->getString()); - - // FIXME: add more stuff - }, - - [&](std::function forEachChild) - { - auto children = nlohmann::json::object(); - forEachChild([&](Symbol attrName, ref node, bool isLast) - { - auto j = nlohmann::json::object(); - try { - visit(node, j); - } catch (EvalError & e) { - // FIXME: make it a flake schema attribute whether to ignore evaluation errors. - if (node->root->state.symbols[flake_schemas::toAttrPath(node)[0]] == "legacyPackages") - j.emplace("failed", true); - else - throw; - } - children.emplace(state->symbols[attrName], std::move(j)); - }); - obj.emplace("children", std::move(children)); - }, - - [&](ref node, const std::vector & systems) - { - obj.emplace("filtered", true); - }); - }; + try { + if ((attrPathS[0] == "apps" + || attrPathS[0] == "checks" + || attrPathS[0] == "devShells" + || attrPathS[0] == "legacyPackages" + || attrPathS[0] == "packages") + && (attrPathS.size() == 1 || attrPathS.size() == 2)) { + for (const auto &subAttr : visitor2->getAttrs()) { + if (hasContent(*visitor2, attrPath2, subAttr)) { + return true; + } + } + return false; + } - auto res = nlohmann::json::object(); + if ((attrPathS.size() == 1) + && (attrPathS[0] == "formatter" + || attrPathS[0] == "nixosConfigurations" + || attrPathS[0] == "nixosModules" + || attrPathS[0] == "overlays" + )) { + for (const auto &subAttr : visitor2->getAttrs()) { + if (hasContent(*visitor2, attrPath2, subAttr)) { + return true; + } + } + return false; + } - flake_schemas::forEachOutput(inventory, [&](Symbol outputName, std::shared_ptr output, const std::string & doc, bool isLast) - { - auto j = nlohmann::json::object(); - - if (!showLegacy && state->symbols[outputName] == "legacyPackages") { - j.emplace("skipped", true); - } else if (output) { - j.emplace("doc", doc); - auto j2 = nlohmann::json::object(); - visit(ref(output), j2); - j.emplace("output", std::move(j2)); - } else - j.emplace("unknown", true); + // If we don't recognize it, it's probably content + return true; + } catch (EvalError & e) { + // Some attrs may contain errors, e.g. legacyPackages of + // nixpkgs. We still want to recurse into it, instead of + // skipping it at all. + return true; + } + }; - res.emplace(state->symbols[outputName], j); - }); + std::function & attrPath, + const std::string & headerPrefix, + const std::string & nextPrefix)> visit; + + visit = [&]( + eval_cache::AttrCursor & visitor, + const std::vector & attrPath, + const std::string & headerPrefix, + const std::string & nextPrefix) + -> nlohmann::json + { + auto j = nlohmann::json::object(); - logger->cout("%s", res.dump()); - } + auto attrPathS = state->symbols.resolve(attrPath); + + Activity act(*logger, lvlInfo, actUnknown, + fmt("evaluating '%s'", concatStringsSep(".", attrPathS))); - else { - logger->cout(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef); + try { + auto recurse = [&]() + { + if (!json) + logger->cout("%s", headerPrefix); + std::vector attrs; + for (const auto &attr : visitor.getAttrs()) { + if (hasContent(visitor, attrPath, attr)) + attrs.push_back(attr); + } - std::function node, - const std::string & headerPrefix, - const std::string & prevPrefix)> visit; + for (const auto & [i, attr] : enumerate(attrs)) { + const auto & attrName = state->symbols[attr]; + bool last = i + 1 == attrs.size(); + auto visitor2 = visitor.getAttr(attrName); + auto attrPath2(attrPath); + attrPath2.push_back(attr); + auto j2 = visit(*visitor2, attrPath2, + fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, nextPrefix, last ? treeLast : treeConn, attrName), + nextPrefix + (last ? treeNull : treeLine)); + if (json) j.emplace(attrName, std::move(j2)); + } + }; - visit = [&]( - ref node, - const std::string & headerPrefix, - const std::string & prevPrefix) - { - flake_schemas::visit( - showAllSystems ? std::optional() : localSystem, - node, - - [&](ref leaf) - { - auto s = headerPrefix; - - if (auto what = flake_schemas::what(leaf)) - s += fmt(": %s", *what); - - if (auto drv = flake_schemas::derivation(leaf)) - s += fmt(ANSI_ITALIC " [%s]" ANSI_NORMAL, drv->getAttr(state->sName)->getString()); - - logger->cout(s); - }, - - [&](std::function forEachChild) - { - logger->cout(headerPrefix); - forEachChild([&](Symbol attrName, ref node, bool isLast) - { - visit(node, - fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, prevPrefix, - isLast ? treeLast : treeConn, state->symbols[attrName]), - prevPrefix + (isLast ? treeNull : treeLine)); - }); - }, - - [&](ref node, const std::vector & systems) - { - logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--all-systems' to show)", headerPrefix)); - }); - }; + auto showDerivation = [&]() + { + auto name = visitor.getAttr(state->sName)->getString(); + if (json) { + std::optional description; + if (auto aMeta = visitor.maybeGetAttr(state->sMeta)) { + if (auto aDescription = aMeta->maybeGetAttr(state->sDescription)) + description = aDescription->getString(); + } + j.emplace("type", "derivation"); + j.emplace("name", name); + if (description) + j.emplace("description", *description); + } else { + logger->cout("%s: %s '%s'", + headerPrefix, + attrPath.size() == 2 && attrPathS[0] == "devShell" ? "development environment" : + attrPath.size() >= 2 && attrPathS[0] == "devShells" ? "development environment" : + attrPath.size() == 3 && attrPathS[0] == "checks" ? "derivation" : + attrPath.size() >= 1 && attrPathS[0] == "hydraJobs" ? "derivation" : + "package", + name); + } + }; + + if (attrPath.size() == 0 + || (attrPath.size() == 1 && ( + attrPathS[0] == "defaultPackage" + || attrPathS[0] == "devShell" + || attrPathS[0] == "formatter" + || attrPathS[0] == "nixosConfigurations" + || attrPathS[0] == "nixosModules" + || attrPathS[0] == "defaultApp" + || attrPathS[0] == "templates" + || attrPathS[0] == "overlays")) + || ((attrPath.size() == 1 || attrPath.size() == 2) + && (attrPathS[0] == "checks" + || attrPathS[0] == "packages" + || attrPathS[0] == "devShells" + || attrPathS[0] == "apps")) + ) + { + recurse(); + } - flake_schemas::forEachOutput(inventory, [&](Symbol outputName, std::shared_ptr output, const std::string & doc, bool isLast) - { - auto headerPrefix = fmt( - ANSI_GREEN "%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, - isLast ? treeLast : treeConn, state->symbols[outputName]); - - if (!showLegacy && state->symbols[outputName] == "legacyPackages") { - logger->cout(headerPrefix); - logger->cout( - ANSI_GREEN "%s" "%s" ANSI_NORMAL ANSI_ITALIC "%s" ANSI_NORMAL, - isLast ? treeNull : treeLine, - treeLast, - "(skipped; use '--legacy' to show)"); - } else if (output) { - visit(ref(output), headerPrefix, isLast ? treeNull : treeLine); - } else { - logger->cout(headerPrefix); - logger->cout( - ANSI_GREEN "%s" "%s" ANSI_NORMAL ANSI_ITALIC "%s" ANSI_NORMAL, - isLast ? treeNull : treeLine, - treeLast, - "(unknown flake output)"); + else if ( + (attrPath.size() == 2 && (attrPathS[0] == "defaultPackage" || attrPathS[0] == "devShell" || attrPathS[0] == "formatter")) + || (attrPath.size() == 3 && (attrPathS[0] == "checks" || attrPathS[0] == "packages" || attrPathS[0] == "devShells")) + ) + { + if (!showAllSystems && std::string(attrPathS[1]) != localSystem) { + if (!json) + logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--all-systems' to show)", headerPrefix)); + else { + logger->warn(fmt("%s omitted (use '--all-systems' to show)", concatStringsSep(".", attrPathS))); + } + } else { + if (visitor.isDerivation()) + showDerivation(); + else + throw Error("expected a derivation"); + } } - }); - } + + else if (attrPath.size() > 0 && attrPathS[0] == "hydraJobs") { + if (visitor.isDerivation()) + showDerivation(); + else + recurse(); + } + + else if (attrPath.size() > 0 && attrPathS[0] == "legacyPackages") { + if (attrPath.size() == 1) + recurse(); + else if (!showLegacy){ + if (!json) + logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix)); + else { + logger->warn(fmt("%s omitted (use '--legacy' to show)", concatStringsSep(".", attrPathS))); + } + } else if (!showAllSystems && std::string(attrPathS[1]) != localSystem) { + if (!json) + logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--all-systems' to show)", headerPrefix)); + else { + logger->warn(fmt("%s omitted (use '--all-systems' to show)", concatStringsSep(".", attrPathS))); + } + } else { + if (visitor.isDerivation()) + showDerivation(); + else if (attrPath.size() <= 2) + // FIXME: handle recurseIntoAttrs + recurse(); + } + } + + else if ( + (attrPath.size() == 2 && attrPathS[0] == "defaultApp") || + (attrPath.size() == 3 && attrPathS[0] == "apps")) + { + auto aType = visitor.maybeGetAttr("type"); + if (!aType || aType->getString() != "app") + state->error("not an app definition").debugThrow(); + if (json) { + j.emplace("type", "app"); + } else { + logger->cout("%s: app", headerPrefix); + } + } + + else if ( + (attrPath.size() == 1 && attrPathS[0] == "defaultTemplate") || + (attrPath.size() == 2 && attrPathS[0] == "templates")) + { + auto description = visitor.getAttr("description")->getString(); + if (json) { + j.emplace("type", "template"); + j.emplace("description", description); + } else { + logger->cout("%s: template: " ANSI_BOLD "%s" ANSI_NORMAL, headerPrefix, description); + } + } + + else { + auto [type, description] = + (attrPath.size() == 1 && attrPathS[0] == "overlay") + || (attrPath.size() == 2 && attrPathS[0] == "overlays") ? std::make_pair("nixpkgs-overlay", "Nixpkgs overlay") : + attrPath.size() == 2 && attrPathS[0] == "nixosConfigurations" ? std::make_pair("nixos-configuration", "NixOS configuration") : + (attrPath.size() == 1 && attrPathS[0] == "nixosModule") + || (attrPath.size() == 2 && attrPathS[0] == "nixosModules") ? std::make_pair("nixos-module", "NixOS module") : + std::make_pair("unknown", "unknown"); + if (json) { + j.emplace("type", type); + } else { + logger->cout("%s: " ANSI_WARNING "%s" ANSI_NORMAL, headerPrefix, description); + } + } + } catch (EvalError & e) { + if (!(attrPath.size() > 0 && attrPathS[0] == "legacyPackages")) + throw; + } + + return j; + }; + + auto cache = openEvalCache(*state, flake); + + auto j = visit(*cache->getRoot(), {}, fmt(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef), ""); + if (json) + logger->cout("%s", j.dump()); } }; diff --git a/src/nix/local.mk b/src/nix/local.mk index 43a22a2afb3..28b30b58619 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -55,9 +55,3 @@ $(d)/main.cc: \ $(d)/profile.cc: $(d)/profile.md $(d)/profile.md: $(d)/profiles.md.gen.hh - -src/nix/flake.cc: src/nix/call-flake-schemas.nix.gen.hh src/nix/builtin-flake-schemas.nix.gen.hh - -src/nix/builtin-flake-schemas.nix: $(default_flake_schemas)/flake.nix - $(trace-gen) cp $^ $@ - @chmod +w $@ diff --git a/tests/functional/flakes/check.sh b/tests/functional/flakes/check.sh index 48a0d333aac..3b83dcafe4b 100755 --- a/tests/functional/flakes/check.sh +++ b/tests/functional/flakes/check.sh @@ -16,6 +16,17 @@ EOF nix flake check $flakeDir +cat > $flakeDir/flake.nix < $flakeDir/flake.nix < show-output.json nix eval --impure --expr ' let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); in -assert show_output.packages.output.children.someOtherSystem.filtered; -assert show_output.packages.output.children.${builtins.currentSystem}.children.default.derivationName == "simple"; -assert show_output.legacyPackages.skipped; +assert show_output.packages.someOtherSystem.default == {}; +assert show_output.packages.${builtins.currentSystem}.default.name == "simple"; +assert show_output.legacyPackages.${builtins.currentSystem} == {}; true ' @@ -26,8 +26,8 @@ nix flake show --json --all-systems > show-output.json nix eval --impure --expr ' let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); in -assert show_output.packages.output.children.someOtherSystem.children.default.derivationName == "simple"; -assert show_output.legacyPackages.skipped; +assert show_output.packages.someOtherSystem.default.name == "simple"; +assert show_output.legacyPackages.${builtins.currentSystem} == {}; true ' @@ -36,7 +36,34 @@ nix flake show --json --legacy > show-output.json nix eval --impure --expr ' let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); in -assert show_output.legacyPackages.output.children.${builtins.currentSystem}.children.hello.derivationName == "simple"; +assert show_output.legacyPackages.${builtins.currentSystem}.hello.name == "simple"; +true +' + +# Test that attributes are only reported when they have actual content +cat >flake.nix < show-output.json +nix eval --impure --expr ' +let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); +in +assert show_output == { }; true ' @@ -56,7 +83,7 @@ nix flake show --json --legacy --all-systems > show-output.json nix eval --impure --expr ' let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); in -assert show_output.legacyPackages.output.children.${builtins.currentSystem}.children.AAAAAASomeThingsFailToEvaluate.failed; -assert show_output.legacyPackages.output.children.${builtins.currentSystem}.children.simple.derivationName == "simple"; +assert show_output.legacyPackages.${builtins.currentSystem}.AAAAAASomeThingsFailToEvaluate == { }; +assert show_output.legacyPackages.${builtins.currentSystem}.simple.name == "simple"; true ' diff --git a/tests/functional/fmt.sh b/tests/functional/fmt.sh index b0a0b2e5f7b..b29fe64d6bc 100755 --- a/tests/functional/fmt.sh +++ b/tests/functional/fmt.sh @@ -32,6 +32,4 @@ cat << EOF > flake.nix EOF nix fmt ./file ./folder | grep 'Formatting: ./file ./folder' nix flake check - -clearStore -nix flake show | grep -P "package.*\[formatter\]" +nix flake show | grep -P "package 'formatter'" From d9284d246ced1ccdcd358ecdc9c6c9153ecaa264 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 Jul 2024 16:21:27 +0200 Subject: [PATCH 106/361] Remove unused InstallableFlake::getFlakeOutputs() (cherry picked from commit 87323a5689f4789d9fc25271a16ba57c57f76392) --- src/libcmd/installable-flake.cc | 14 -------------- src/libcmd/installable-flake.hh | 2 -- 2 files changed, 16 deletions(-) diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index 852a5618efd..8796ad5ba79 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -43,20 +43,6 @@ std::vector InstallableFlake::getActualAttrPaths() return res; } -Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake) -{ - auto vFlake = state.allocValue(); - - callFlake(state, lockedFlake, *vFlake); - - auto aOutputs = vFlake->attrs()->get(state.symbols.create("outputs")); - assert(aOutputs); - - state.forceValue(*aOutputs->value, aOutputs->value->determinePos(noPos)); - - return aOutputs->value; -} - static std::string showAttrPaths(const std::vector & paths) { std::string s; diff --git a/src/libcmd/installable-flake.hh b/src/libcmd/installable-flake.hh index b0d6f5afca6..8e0a232ef8a 100644 --- a/src/libcmd/installable-flake.hh +++ b/src/libcmd/installable-flake.hh @@ -53,8 +53,6 @@ struct InstallableFlake : InstallableValue std::vector getActualAttrPaths(); - Value * getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake); - DerivedPathsWithInfo toDerivedPaths() override; std::pair toValue(EvalState & state) override; From 21ddd181fe6d4b8d345b783e72aa54b2340fb541 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 30 Oct 2024 12:53:05 +0100 Subject: [PATCH 107/361] Disable mingw32 jobs This gets rid of an evaluation error in devShells.aarch64-linux.x86_64-w64-mingw32. https://github.com/DeterminateSystems/nix-priv/actions/runs/11576785241/job/32226923043?pr=39 --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 848d04a7ae8..97e4b50d5f7 100644 --- a/flake.nix +++ b/flake.nix @@ -44,7 +44,7 @@ "riscv64-unknown-linux-gnu" "x86_64-unknown-netbsd" "x86_64-unknown-freebsd" - "x86_64-w64-mingw32" + #"x86_64-w64-mingw32" ]; stdenvs = [ From f36f4a4f52bb32b91aa4709eb76f5e03c2d74fd9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 30 Oct 2024 13:30:39 +0100 Subject: [PATCH 108/361] Disable some Hydra jobs that we don't care about at the moment --- flake.nix | 10 +++++----- packaging/hydra.nix | 6 ++++++ 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/flake.nix b/flake.nix index 97e4b50d5f7..ddda3191bfe 100644 --- a/flake.nix +++ b/flake.nix @@ -32,7 +32,7 @@ then "" else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}"; - linux32BitSystems = [ "i686-linux" ]; + linux32BitSystems = [ /* "i686-linux" */ ]; linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ]; linuxSystems = linux32BitSystems ++ linux64BitSystems; darwinSystems = [ "x86_64-darwin" "aarch64-darwin" ]; @@ -175,7 +175,7 @@ checks = forAllSystems (system: { binaryTarball = self.hydraJobs.binaryTarball.${system}; - installTests = self.hydraJobs.installTests.${system}; + #installTests = self.hydraJobs.installTests.${system}; nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system}; rl-next = let pkgs = nixpkgsFor.${system}.native; @@ -201,7 +201,7 @@ } // lib.optionalAttrs (! nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) { # TODO: enable static builds for darwin, blocked on: # https://github.com/NixOS/nixpkgs/issues/320448 - "static-" = nixpkgsFor.${system}.static; + #"static-" = nixpkgsFor.${system}.static; }) (nixpkgsPrefix: nixpkgs: flatMapAttrs nixpkgs.nixComponents @@ -238,7 +238,7 @@ (pkgName: {}: { # These attributes go right into `packages.`. "${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName}; - "${pkgName}-static" = nixpkgsFor.${system}.static.nixComponents.${pkgName}; + #"${pkgName}-static" = nixpkgsFor.${system}.static.nixComponents.${pkgName}; } // flatMapAttrs (lib.genAttrs crossSystems (_: { })) (crossSystem: {}: { # These attributes go right into `packages.`. @@ -362,7 +362,7 @@ in (makeShells "native" nixpkgsFor.${system}.native) // (lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) - (makeShells "static" nixpkgsFor.${system}.static) // + #(makeShells "static" nixpkgsFor.${system}.static) // (forAllCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv))) // { default = self.devShells.${system}.native-stdenvPackages; diff --git a/packaging/hydra.nix b/packaging/hydra.nix index dbe99247675..999fa651594 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -64,6 +64,7 @@ in shellInputs = forAllSystems (system: self.devShells.${system}.default.inputDerivation); + /* buildStatic = forAllPackages (pkgName: lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.nixComponents.${pkgName})); @@ -85,6 +86,7 @@ in readlineFlavor = "readline"; } ); + */ # Perl bindings for various platforms. perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-perl-bindings); @@ -94,6 +96,7 @@ in # the installation script. binaryTarball = forAllSystems (system: binaryTarball nixpkgsFor.${system}.native.nix nixpkgsFor.${system}.native); + /* binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] (system: forAllCrossSystems (crossSystem: binaryTarball @@ -125,6 +128,7 @@ in self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf" self.hydraJobs.binaryTarballCross."x86_64-linux"."riscv64-unknown-linux-gnu" ]; + */ # docker image with Nix inside dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage); @@ -178,6 +182,7 @@ in nixpkgs = nixpkgs-regression; }; + /* installTests = forAllSystems (system: let pkgs = nixpkgsFor.${system}.native; in pkgs.runCommand "install-tests" @@ -197,4 +202,5 @@ in binaryTarballs = self.hydraJobs.binaryTarball; inherit nixpkgsFor; }; + */ } From f9180f12c4ca28e224db7f7efbc9600b2e25da8a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 30 Oct 2024 15:30:29 +0100 Subject: [PATCH 109/361] release notes: 2.24.9 --- doc/manual/rl-next/filesystem-errors.md | 14 -------------- doc/manual/rl-next/verify-tls.md | 8 -------- doc/manual/src/release-notes/rl-2.24.md | 15 +++++++++++++++ 3 files changed, 15 insertions(+), 22 deletions(-) delete mode 100644 doc/manual/rl-next/filesystem-errors.md delete mode 100644 doc/manual/rl-next/verify-tls.md diff --git a/doc/manual/rl-next/filesystem-errors.md b/doc/manual/rl-next/filesystem-errors.md deleted file mode 100644 index faa9352b96a..00000000000 --- a/doc/manual/rl-next/filesystem-errors.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -synopsis: wrap filesystem exceptions more correctly -issues: [] -prs: [11378] ---- - - -With the switch to `std::filesystem` in different places, Nix started to throw `std::filesystem::filesystem_error` in many places instead of its own exceptions. - -This lead to no longer generating error traces, for example when listing a non-existing directory. - -This version catches these types of exception correctly and wrap them into Nix's own exeception type. - -Author: [**@Mic92**](https://github.com/Mic92) diff --git a/doc/manual/rl-next/verify-tls.md b/doc/manual/rl-next/verify-tls.md deleted file mode 100644 index afc689f46a9..00000000000 --- a/doc/manual/rl-next/verify-tls.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -synopsis: "`` uses TLS verification" -prs: [11585] ---- - -Previously `` did not do TLS verification. This was because the Nix sandbox in the past did not have access to TLS certificates, and Nix checks the hash of the fetched file anyway. However, this can expose authentication data from `netrc` and URLs to man-in-the-middle attackers. In addition, Nix now in some cases (such as when using impure derivations) does *not* check the hash. Therefore we have now enabled TLS verification. This means that downloads by `` will now fail if you're fetching from a HTTPS server that does not have a valid certificate. - -`` is also known as the builtin derivation builder `builtin:fetchurl`. It's not to be confused with the evaluation-time function `builtins.fetchurl`, which was not affected by this issue. diff --git a/doc/manual/src/release-notes/rl-2.24.md b/doc/manual/src/release-notes/rl-2.24.md index 5bcc1d79ced..38358d72856 100644 --- a/doc/manual/src/release-notes/rl-2.24.md +++ b/doc/manual/src/release-notes/rl-2.24.md @@ -274,6 +274,21 @@ be configured using the `warn-large-path-threshold` setting, e.g. `--warn-large-path-threshold 100M`. +- Wrap filesystem exceptions more correctly [#11378](https://github.com/NixOS/nix/pull/11378) + + With the switch to `std::filesystem` in different places, Nix started to throw `std::filesystem::filesystem_error` in many places instead of its own exceptions. + + This led to no longer generating error traces, for example when listing a non-existing directory. + + This version catches these types of exception correctly and wraps them into Nix's own exeception type. + + Author: [**@Mic92**](https://github.com/Mic92) + +- `` uses TLS verification [#11585](https://github.com/NixOS/nix/pull/11585) + + Previously `` did not do TLS verification. This was because the Nix sandbox in the past did not have access to TLS certificates, and Nix checks the hash of the fetched file anyway. However, this can expose authentication data from `netrc` and URLs to man-in-the-middle attackers. In addition, Nix now in some cases (such as when using impure derivations) does *not* check the hash. Therefore we have now enabled TLS verification. This means that downloads by `` will now fail if you're fetching from a HTTPS server that does not have a valid certificate. + + `` is also known as the builtin derivation builder `builtin:fetchurl`. It's not to be confused with the evaluation-time function `builtins.fetchurl`, which was not affected by this issue. # Contributors From 2d61e3c7837e5b054a7ea9eafa862293883dcc91 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 30 Oct 2024 15:50:27 +0100 Subject: [PATCH 110/361] Add Determinate Nix release notes to the manual --- doc/manual/book.toml | 2 +- doc/manual/src/SUMMARY.md.in | 5 ++++- doc/manual/src/release-notes-determinate/changes.md | 7 +++++++ doc/manual/src/release-notes-determinate/index.md | 3 +++ 4 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 doc/manual/src/release-notes-determinate/changes.md create mode 100644 doc/manual/src/release-notes-determinate/index.md diff --git a/doc/manual/book.toml b/doc/manual/book.toml index 73fb7e75e24..f1d7c2c060f 100644 --- a/doc/manual/book.toml +++ b/doc/manual/book.toml @@ -1,5 +1,5 @@ [book] -title = "Nix Reference Manual" +title = "Determinate Nix Reference Manual" [output.html] additional-css = ["custom.css"] diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index 8739599a03e..f4c5dcfbd30 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -125,7 +125,10 @@ - [C++ style guide](development/cxx.md) - [Experimental Features](development/experimental-features.md) - [Contributing](development/contributing.md) -- [Releases](release-notes/index.md) +- [Determinate Nix Releases Notes](release-notes-determinate/index.md) + - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) + - [Release 1.0 (2024-11-??)](release-notes-determinate/rl-1.0.md) +- [Nix Releases Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} - [Release 2.24 (2024-07-31)](release-notes/rl-2.24.md) - [Release 2.23 (2024-06-03)](release-notes/rl-2.23.md) diff --git a/doc/manual/src/release-notes-determinate/changes.md b/doc/manual/src/release-notes-determinate/changes.md new file mode 100644 index 00000000000..a71867ea2ec --- /dev/null +++ b/doc/manual/src/release-notes-determinate/changes.md @@ -0,0 +1,7 @@ +# Changes between Nix and Determinate Nix + +This section lists the differences between upstream Nix 2.24 and Determinate Nix 1.0. + +* In Determinate Nix, flakes are stable. You no longer need to enable the `flakes` experimental feature. + +* In Determinate Nix, the new Nix CLI (i.e. the `nix` command) is stable. You no longer need to enable the `nix-command` experimental feature. diff --git a/doc/manual/src/release-notes-determinate/index.md b/doc/manual/src/release-notes-determinate/index.md new file mode 100644 index 00000000000..bba33084424 --- /dev/null +++ b/doc/manual/src/release-notes-determinate/index.md @@ -0,0 +1,3 @@ +# Determinate Nix Release Notes + +This chapter lists the differences between Nix and Determinate Nix, as well as the release history of Determinate Nix. From 707cbad5f8da4a89da1c07e5229e990f1b39c170 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 30 Oct 2024 18:46:59 +0100 Subject: [PATCH 111/361] Show Determinate Nix version number `nix --version` will now show `nix (Determinate Nix 1.0) 2.24.10`. --- .version-determinate | 1 + package.nix | 1 + src/libmain/shared.cc | 2 +- src/libstore/globals.cc | 2 ++ src/libstore/globals.hh | 2 ++ src/libstore/local.mk | 3 ++- src/libstore/meson.build | 6 ++++-- 7 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 .version-determinate diff --git a/.version-determinate b/.version-determinate new file mode 100644 index 00000000000..d3827e75a5c --- /dev/null +++ b/.version-determinate @@ -0,0 +1 @@ +1.0 diff --git a/package.nix b/package.nix index e1b4aebb05d..ac196ef9090 100644 --- a/package.nix +++ b/package.nix @@ -158,6 +158,7 @@ in { fileset = fileset.intersection baseFiles (fileset.unions ([ # For configure ./.version + ./.version-determinate ./configure.ac ./m4 # TODO: do we really need README.md? It doesn't seem used in the build. diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 59e1e48b777..65d8b661b62 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -294,7 +294,7 @@ void parseCmdLine(const std::string & programName, const Strings & args, void printVersion(const std::string & programName) { - std::cout << fmt("%1% (Determinate Nix) %2%", programName, nixVersion) << std::endl; + std::cout << fmt("%s (Determinate Nix %s) %s", programName, determinateNixVersion, nixVersion) << std::endl; if (verbosity > lvlInfo) { Strings cfg; #if HAVE_BOEHMGC diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 439a6f97c08..89b79141cfb 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -273,6 +273,8 @@ Path Settings::getDefaultSSLCertFile() const std::string nixVersion = PACKAGE_VERSION; +const std::string determinateNixVersion = DETERMINATE_NIX_VERSION; + NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, { {SandboxMode::smEnabled, true}, {SandboxMode::smRelaxed, "relaxed"}, diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 8760c9d145b..6c66b579575 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -1247,6 +1247,8 @@ std::vector getUserConfigFiles(); extern const std::string nixVersion; +extern const std::string determinateNixVersion; + /** * @param loadConfig Whether to load configuration from `nix.conf`, `NIX_CONFIG`, etc. May be disabled for unit tests. * @note When using libexpr, and/or libmain, This is not sufficient. See initNix(). diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 5dc8f3370bc..c138ed0a86e 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -73,7 +73,8 @@ libstore_CXXFLAGS += \ -DNIX_CONF_DIR=\"$(NIX_ROOT)$(sysconfdir)/nix\" \ -DNIX_BIN_DIR=\"$(NIX_ROOT)$(bindir)\" \ -DNIX_MAN_DIR=\"$(NIX_ROOT)$(mandir)\" \ - -DLSOF=\"$(NIX_ROOT)$(lsof)\" + -DLSOF=\"$(NIX_ROOT)$(lsof)\" \ + -DDETERMINATE_NIX_VERSION=\""$(shell cat ./.version-determinate)"\" ifeq ($(embedded_sandbox_shell),yes) libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\" diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 50b15e15dc7..196400d098a 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -12,6 +12,8 @@ project('nix-store', 'cpp', license : 'LGPL-2.1-or-later', ) +fs = import('fs') + cxx = meson.get_compiler('cpp') subdir('build-utils-meson/deps-lists') @@ -21,6 +23,8 @@ configdata = configuration_data() # TODO rename, because it will conflict with downstream projects configdata.set_quoted('PACKAGE_VERSION', meson.project_version()) +configdata.set_quoted('DETERMINATE_NIX_VERSION', fs.read('./.version-determinate')) + configdata.set_quoted('SYSTEM', host_machine.system()) deps_private_maybe_subproject = [ @@ -320,8 +324,6 @@ else subdir('unix') endif -fs = import('fs') - prefix = get_option('prefix') # For each of these paths, assume that it is relative to the prefix unless # it is already an absolute path (which is the default for store-dir, state-dir, and log-dir). From e84063c54073b3aa8c5898732af50a3f8fc45b1e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 31 Oct 2024 12:08:17 +0100 Subject: [PATCH 112/361] .version-determinate: Use semver Co-authored-by: Cole Helbling --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index d3827e75a5c..3eefcb9dd5b 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -1.0 +1.0.0 From 3a4a97aa55466093cde1f488bdd8f16d5b51bf7a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 1 Nov 2024 16:03:58 +0100 Subject: [PATCH 113/361] Set the Determinate version number to 0.0.1 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index 3eefcb9dd5b..8acdd82b765 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -1.0.0 +0.0.1 From 3fdd7d035d5c8e86198fdb08193c5e6833660976 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 4 Nov 2024 14:31:52 +0100 Subject: [PATCH 114/361] Fix Meson build --- src/libstore/.version-determinate | 1 + src/libstore/meson.build | 2 +- src/libstore/package.nix | 3 +++ 3 files changed, 5 insertions(+), 1 deletion(-) create mode 120000 src/libstore/.version-determinate diff --git a/src/libstore/.version-determinate b/src/libstore/.version-determinate new file mode 120000 index 00000000000..c4121e0c32d --- /dev/null +++ b/src/libstore/.version-determinate @@ -0,0 +1 @@ +../../.version-determinate \ No newline at end of file diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 2670dfcde4b..41bbb438d04 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -23,7 +23,7 @@ configdata = configuration_data() # TODO rename, because it will conflict with downstream projects configdata.set_quoted('PACKAGE_VERSION', meson.project_version()) -configdata.set_quoted('DETERMINATE_NIX_VERSION', fs.read('./.version-determinate')) +configdata.set_quoted('DETERMINATE_NIX_VERSION', fs.read('.version-determinate').strip()) configdata.set_quoted('SYSTEM', host_machine.system()) diff --git a/src/libstore/package.nix b/src/libstore/package.nix index d98bac16d33..f1c880e24bc 100644 --- a/src/libstore/package.nix +++ b/src/libstore/package.nix @@ -38,8 +38,11 @@ mkMesonDerivation (finalAttrs: { fileset = fileset.unions [ ../../build-utils-meson ./build-utils-meson + # FIXME: get rid of these symlinks. ../../.version ./.version + ../../.version-determinate + ./.version-determinate ./meson.build ./meson.options ./linux/meson.build From 7c9f19ceeeb4f9d1527a75f724031f96c94e98fc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 6 Nov 2024 15:19:03 +0100 Subject: [PATCH 115/361] Move scripts/flake-regressions.sh into the flake-regressions repo It already contained a script "eval-all.sh" that did almost the same thing. --- .github/workflows/ci.yml | 2 +- scripts/flake-regressions.sh | 27 --------------------------- 2 files changed, 1 insertion(+), 28 deletions(-) delete mode 100755 scripts/flake-regressions.sh diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fd89614f703..693cd6ea679 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -78,4 +78,4 @@ jobs: with: flakehub: true - uses: DeterminateSystems/magic-nix-cache-action@main - - run: nix build --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH scripts/flake-regressions.sh + - run: nix build --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh diff --git a/scripts/flake-regressions.sh b/scripts/flake-regressions.sh deleted file mode 100755 index d765311345c..00000000000 --- a/scripts/flake-regressions.sh +++ /dev/null @@ -1,27 +0,0 @@ -#! /usr/bin/env bash - -set -e - -echo "Nix version:" -nix --version - -cd flake-regressions - -status=0 - -flakes=$(find tests -mindepth 3 -maxdepth 3 -type d -not -path '*/.*' | sort | head -n25) - -echo "Running flake tests..." - -for flake in $flakes; do - - if ! REGENERATE=0 ./eval-flake.sh "$flake"; then - status=1 - echo "❌ $flake" - else - echo "✅ $flake" - fi - -done - -exit "$status" From a7c484ecc5987dde6c9bf52f025c94c2d086b615 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 6 Nov 2024 18:48:05 +0100 Subject: [PATCH 116/361] flake_regressions: Use a bigger runner --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 693cd6ea679..9f1c1e8771b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -60,7 +60,7 @@ jobs: flake_regressions: needs: build_x86_64-linux - runs-on: ubuntu-22.04 + runs-on: UbuntuLatest32Cores128G steps: - name: Checkout nix uses: actions/checkout@v4 From 28752fe28868f2c1a4d3c8a86a1ada94b99cce35 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 22 Jan 2025 12:16:44 +0100 Subject: [PATCH 117/361] Mark official release --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 8edc2266f08..d8a458c1f4d 100644 --- a/flake.nix +++ b/flake.nix @@ -24,7 +24,7 @@ let inherit (nixpkgs) lib; - officialRelease = false; + officialRelease = true; linux32BitSystems = [ "i686-linux" ]; linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ]; From 91e60321f617990c06216abcc6a836e12e04aa0f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 22 Jan 2025 13:36:30 +0100 Subject: [PATCH 118/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 7a25c70f90c..f34083e034a 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.26.0 +2.26.1 From 2301d86f32625ea9dfe87061203da45f2476afdc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 22 Jan 2025 17:42:52 +0100 Subject: [PATCH 119/361] GitRepo::fetch(): Cleanup (cherry picked from commit bd10b859f71751e349af59349385af27aea40a13) --- src/libfetchers/git-utils.cc | 14 ++++++-------- src/libutil/util.hh | 11 +++++++++++ 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index b54416b1062..3b15a85ceaf 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -206,7 +206,8 @@ static git_packbuilder_progress PACKBUILDER_PROGRESS_CHECK_INTERRUPT = &packBuil } // extern "C" -static void initRepoAtomically(std::filesystem::path &path, bool bare) { +static void initRepoAtomically(std::filesystem::path &path, bool bare) +{ if (pathExists(path.string())) return; Path tmpDir = createTempDir(os_string_to_string(PathViewNG { std::filesystem::path(path).parent_path() })); @@ -544,13 +545,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this // then use code that was removed in this commit (see blame) auto dir = this->path; - Strings gitArgs; - if (shallow) { - gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec }; - } - else { - gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--", url, refspec }; - } + Strings gitArgs{"-C", dir.string(), "fetch", "--quiet", "--force"}; + if (shallow) + append(gitArgs, {"--depth", "1"}); + append(gitArgs, {std::string("--"), url, refspec}); runProgram(RunOptions { .program = "git", diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 4d5683e2bda..0d55cf93bed 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -274,6 +274,17 @@ std::optional pop(T & c) } +/** + * Append items to a container. TODO: remove this once we can use + * C++23's `append_range()`. + */ +template +void append(C & c, std::initializer_list l) +{ + c.insert(c.end(), l.begin(), l.end()); +} + + template class Callback; From 832221650b4cf74d7f05f92e7e8cfcd1cbf5d6ad Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 22 Jan 2025 17:54:19 +0100 Subject: [PATCH 120/361] GitRepo::fetch(): Ignore $GIT_DIR Fixes #12325. (cherry picked from commit 41983dba8febc89a506d407ee9c597347bdd91b5) --- src/libfetchers/git-utils.cc | 2 +- tests/functional/common/vars.sh | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 3b15a85ceaf..6a75daf6124 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -545,7 +545,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this // then use code that was removed in this commit (see blame) auto dir = this->path; - Strings gitArgs{"-C", dir.string(), "fetch", "--quiet", "--force"}; + Strings gitArgs{"-C", dir.string(), "--git-dir", ".", "fetch", "--quiet", "--force"}; if (shallow) append(gitArgs, {"--depth", "1"}); append(gitArgs, {std::string("--"), url, refspec}); diff --git a/tests/functional/common/vars.sh b/tests/functional/common/vars.sh index 4b88e852618..ed4b477278f 100644 --- a/tests/functional/common/vars.sh +++ b/tests/functional/common/vars.sh @@ -60,6 +60,7 @@ unset XDG_DATA_HOME unset XDG_CONFIG_HOME unset XDG_CONFIG_DIRS unset XDG_CACHE_HOME +unset GIT_DIR export IMPURE_VAR1=foo export IMPURE_VAR2=bar From 9cf3d3368e8de1083f11d9521e8331f80bcf2e98 Mon Sep 17 00:00:00 2001 From: Philipp Otterbein Date: Thu, 23 Jan 2025 02:18:27 +0100 Subject: [PATCH 121/361] libstore: fix progress bars (cherry picked from commit be97dc1efc4276e41ced2014c0a909a27f1fb848) --- src/libstore/remote-store.cc | 12 +++++++++++- src/libstore/store-api.cc | 14 ++++++-------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 6781e4743f6..b230079eb27 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -539,11 +539,21 @@ void RemoteStore::addMultipleToStore( RepairFlag repair, CheckSigsFlag checkSigs) { + // `addMultipleToStore` is single threaded + size_t bytesExpected = 0; + for (auto & [pathInfo, _] : pathsToCopy) { + bytesExpected += pathInfo.narSize; + } + act.setExpected(actCopyPath, bytesExpected); + auto source = sinkToSource([&](Sink & sink) { - sink << pathsToCopy.size(); + size_t nrTotal = pathsToCopy.size(); + sink << nrTotal; // Reverse, so we can release memory at the original start std::reverse(pathsToCopy.begin(), pathsToCopy.end()); while (!pathsToCopy.empty()) { + act.progress(nrTotal - pathsToCopy.size(), nrTotal, size_t(1), size_t(0)); + auto & [pathInfo, pathSource] = pathsToCopy.back(); WorkerProto::Serialise::write(*this, WorkerProto::WriteConn { diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 6cd8e47f0ab..236622eae37 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -242,8 +242,8 @@ void Store::addMultipleToStore( storePathsToAdd.insert(thingToAdd.first.path); } - auto showProgress = [&]() { - act.progress(nrDone, pathsToCopy.size(), nrRunning, nrFailed); + auto showProgress = [&, nrTotal = pathsToCopy.size()]() { + act.progress(nrDone, nrTotal, nrRunning, nrFailed); }; processGraph( @@ -1104,9 +1104,6 @@ std::map copyPaths( return storePathForDst; }; - // total is accessed by each copy, which are each handled in separate threads - std::atomic total = 0; - for (auto & missingPath : sortedMissing) { auto info = srcStore.queryPathInfo(missingPath); @@ -1116,9 +1113,10 @@ std::map copyPaths( ValidPathInfo infoForDst = *info; infoForDst.path = storePathForDst; - auto source = sinkToSource([&](Sink & sink) { + auto source = sinkToSource([&, narSize = info->narSize](Sink & sink) { // We can reasonably assume that the copy will happen whenever we // read the path, so log something about that at that point + uint64_t total = 0; auto srcUri = srcStore.getUri(); auto dstUri = dstStore.getUri(); auto storePathS = srcStore.printStorePath(missingPath); @@ -1129,13 +1127,13 @@ std::map copyPaths( LambdaSink progressSink([&](std::string_view data) { total += data.size(); - act.progress(total, info->narSize); + act.progress(total, narSize); }); TeeSink tee { sink, progressSink }; srcStore.narFromPath(missingPath, tee); }); - pathsToCopy.push_back(std::pair{infoForDst, std::move(source)}); + pathsToCopy.emplace_back(std::move(infoForDst), std::move(source)); } dstStore.addMultipleToStore(std::move(pathsToCopy), act, repair, checkSigs); From 90159cb197de1e2f816be716f5af09d25e316c1d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 24 Jan 2025 12:15:07 +0100 Subject: [PATCH 122/361] EvalState::resolveLookupPathPath(): Call resolveSymlinks() before pathExists() Fixes #12339. (cherry picked from commit 00d9e7e1f43e3051b793ce1c21f6e902386b93fe) --- src/libexpr/eval.cc | 2 +- tests/functional/restricted.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 345c09e7e9c..19ca1a3591e 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -3114,7 +3114,7 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pat } } - if (path.pathExists()) + if (path.resolveSymlinks().pathExists()) return finish(std::move(path)); else { logWarning({ diff --git a/tests/functional/restricted.sh b/tests/functional/restricted.sh index a92a9b8a3a2..00ee4ddc8c2 100755 --- a/tests/functional/restricted.sh +++ b/tests/functional/restricted.sh @@ -23,7 +23,7 @@ nix-instantiate --restrict-eval ./simple.nix -I src1=./simple.nix -I src2=./conf (! nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix') nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix' -I src=../.. -expectStderr 1 nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' | grepQuiet "was not found in the Nix search path" +expectStderr 1 nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' | grepQuiet "forbidden in restricted mode" nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' -I src=. p=$(nix eval --raw --expr "builtins.fetchurl file://${_NIX_TEST_SOURCE_DIR}/restricted.sh" --impure --restrict-eval --allowed-uris "file://${_NIX_TEST_SOURCE_DIR}") From 6cb17fd8360535413ec40bac2909885baf2c2754 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 24 Jan 2025 16:20:43 +0100 Subject: [PATCH 123/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index f34083e034a..ed1d6005085 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.26.1 +2.26.2 From 32aed360b8cb5ea4d28d7bf1ecb8300b9ceb5c2b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 24 Jan 2025 20:40:21 +0100 Subject: [PATCH 124/361] Format .nix files This does not include any automation for the release branch, but is based on the configuration of https://github.com/NixOS/nix/pull/12349 pre-commit run -a nixfmt-rfc-style --- default.nix | 19 +- doc/manual/generate-builtins.nix | 10 +- doc/manual/generate-manpage.nix | 152 +- doc/manual/generate-settings.nix | 116 +- doc/manual/generate-store-info.nix | 55 +- doc/manual/generate-store-types.nix | 28 +- doc/manual/generate-xp-features-shortlist.nix | 10 +- doc/manual/generate-xp-features.nix | 3 +- doc/manual/package.nix | 60 +- doc/manual/utils.nix | 44 +- docker.nix | 438 +++--- flake.nix | 420 ++++-- maintainers/flake-module.nix | 1311 +++++++++-------- packaging/components.nix | 12 +- packaging/dependencies.nix | 239 +-- packaging/dev-shell.nix | 254 ++-- packaging/everything.nix | 201 +-- packaging/hydra.nix | 211 +-- scripts/binary-tarball.nix | 16 +- scripts/installer.nix | 68 +- src/external-api-docs/package.nix | 20 +- src/internal-api-docs/package.nix | 44 +- src/libcmd/package.nix | 49 +- src/libexpr-c/package.nix | 13 +- src/libexpr-test-support/package.nix | 17 +- src/libexpr-tests/package.nix | 53 +- src/libexpr/call-flake.nix | 130 +- src/libexpr/fetchurl.nix | 96 +- src/libexpr/imported-drv-to-derivation.nix | 36 +- src/libexpr/package.nix | 66 +- src/libexpr/primops/derivation.nix | 31 +- src/libfetchers-tests/package.nix | 51 +- src/libfetchers/package.nix | 17 +- src/libflake-c/package.nix | 15 +- src/libflake-tests/package.nix | 55 +- src/libflake/package.nix | 19 +- src/libmain-c/package.nix | 17 +- src/libmain/package.nix | 15 +- src/libstore-c/package.nix | 13 +- src/libstore-test-support/package.nix | 17 +- src/libstore-tests/package.nix | 76 +- src/libstore/package.nix | 68 +- src/libutil-c/package.nix | 11 +- src/libutil-test-support/package.nix | 15 +- src/libutil-tests/package.nix | 51 +- src/libutil/package.nix | 28 +- src/nix-channel/unpack-channel.nix | 6 +- src/nix-env/buildenv.nix | 16 +- src/nix/package.nix | 126 +- src/perl/package.nix | 126 +- tests/functional/big-derivation-attr.nix | 21 +- tests/functional/build-hook-ca-fixed.nix | 68 +- tests/functional/build-hook.nix | 85 +- tests/functional/ca-shell.nix | 6 +- tests/functional/ca/content-addressed.nix | 26 +- tests/functional/ca/flake.nix | 2 +- tests/functional/ca/nondeterministic.nix | 33 +- tests/functional/ca/racy.nix | 1 - tests/functional/check-refs.nix | 29 +- tests/functional/check-reqs.nix | 42 +- tests/functional/check.nix | 51 +- tests/functional/dependencies.nix | 4 +- .../advanced-attributes-defaults.nix | 5 +- ...d-attributes-structured-attrs-defaults.nix | 10 +- .../advanced-attributes-structured-attrs.nix | 38 +- .../derivation/advanced-attributes.nix | 32 +- .../functional/dyn-drv/recursive-mod-json.nix | 4 +- tests/functional/export-graph.nix | 31 +- tests/functional/failing.nix | 33 +- tests/functional/filter-source.nix | 13 +- tests/functional/fixed.nix | 26 +- tests/functional/fod-failing.nix | 30 +- tests/functional/gc-concurrent.nix | 4 +- tests/functional/hash-check.nix | 20 +- tests/functional/hermetic.nix | 85 +- tests/functional/ifd.nix | 12 +- tests/functional/import-from-derivation.nix | 21 +- tests/functional/impure-derivations.nix | 64 +- .../lang-gc/issue-11141-gc-coroutine-test.nix | 94 +- ...vOutputDependencies-multi-elem-context.nix | 13 +- ...vOutputDependencies-wrong-element-kind.nix | 8 +- .../eval-fail-addErrorContext-example.nix | 14 +- .../eval-fail-assert-equal-attrs-names-2.nix | 8 +- .../eval-fail-assert-equal-attrs-names.nix | 8 +- ...al-fail-assert-equal-derivations-extra.nix | 17 +- .../eval-fail-assert-equal-derivations.nix | 18 +- ...eval-fail-assert-equal-function-direct.nix | 7 +- .../eval-fail-assert-equal-list-length.nix | 8 +- .../lang/eval-fail-assert-equal-paths.nix | 2 +- .../lang/eval-fail-assert-nested-bool.nix | 7 +- tests/functional/lang/eval-fail-assert.nix | 7 +- .../lang/eval-fail-attr-name-type.nix | 4 +- ...val-fail-attrset-merge-drops-later-rec.nix | 9 +- .../eval-fail-bad-string-interpolation-4.nix | 12 +- .../lang/eval-fail-dup-dynamic-attrs.nix | 8 +- .../lang/eval-fail-duplicate-traces.nix | 7 +- ...eval-fail-fetchurl-baseName-attrs-name.nix | 5 +- ...l-flake-ref-to-string-negative-integer.nix | 19 +- ...fail-foldlStrict-strict-op-application.nix | 8 +- .../lang/eval-fail-hashfile-missing.nix | 17 +- tests/functional/lang/eval-fail-list.nix | 2 +- .../functional/lang/eval-fail-missing-arg.nix | 13 +- .../lang/eval-fail-mutual-recursion.nix | 30 +- .../lang/eval-fail-nested-list-items.nix | 25 +- .../functional/lang/eval-fail-not-throws.nix | 2 +- .../lang/eval-fail-overflowing-add.nix | 3 +- .../lang/eval-fail-overflowing-div.nix | 3 +- .../lang/eval-fail-overflowing-mul.nix | 3 +- .../lang/eval-fail-overflowing-sub.nix | 3 +- tests/functional/lang/eval-fail-recursion.nix | 5 +- tests/functional/lang/eval-fail-remove.nix | 9 +- tests/functional/lang/eval-fail-scope-5.nix | 11 +- .../lang/eval-fail-undeclared-arg.nix | 6 +- .../lang/eval-fail-using-set-as-attr-name.nix | 8 +- tests/functional/lang/eval-okay-any-all.nix | 39 +- .../functional/lang/eval-okay-arithmetic.nix | 91 +- tests/functional/lang/eval-okay-attrnames.nix | 15 +- tests/functional/lang/eval-okay-attrs.nix | 19 +- tests/functional/lang/eval-okay-attrs2.nix | 21 +- tests/functional/lang/eval-okay-attrs3.nix | 34 +- tests/functional/lang/eval-okay-attrs4.nix | 17 +- tests/functional/lang/eval-okay-attrs6.nix | 4 +- tests/functional/lang/eval-okay-autoargs.nix | 15 +- .../lang/eval-okay-builtins-add.nix | 12 +- tests/functional/lang/eval-okay-builtins.nix | 2 +- .../lang/eval-okay-callable-attrs.nix | 11 +- tests/functional/lang/eval-okay-catattrs.nix | 6 +- tests/functional/lang/eval-okay-closure.nix | 24 +- tests/functional/lang/eval-okay-concat.nix | 16 +- tests/functional/lang/eval-okay-concatmap.nix | 8 +- .../lang/eval-okay-concatstringssep.nix | 19 +- .../lang/eval-okay-context-introspection.nix | 39 +- tests/functional/lang/eval-okay-context.nix | 11 +- .../functional/lang/eval-okay-convertHash.nix | 130 +- tests/functional/lang/eval-okay-deepseq.nix | 10 +- .../lang/eval-okay-delayed-with-inherit.nix | 13 +- .../lang/eval-okay-delayed-with.nix | 20 +- .../lang/eval-okay-dynamic-attrs-2.nix | 6 +- .../lang/eval-okay-dynamic-attrs-bare.nix | 17 +- .../lang/eval-okay-dynamic-attrs.nix | 17 +- tests/functional/lang/eval-okay-elem.nix | 11 +- .../functional/lang/eval-okay-empty-args.nix | 5 +- .../lang/eval-okay-eq-derivations.nix | 44 +- tests/functional/lang/eval-okay-eq.nix | 16 +- tests/functional/lang/eval-okay-filter.nix | 9 +- .../lang/eval-okay-flake-ref-to-string.nix | 8 +- tests/functional/lang/eval-okay-flatten.nix | 14 +- .../functional/lang/eval-okay-floor-ceil.nix | 9 +- .../eval-okay-foldlStrict-lazy-elements.nix | 5 +- ...y-foldlStrict-lazy-initial-accumulator.nix | 8 +- .../lang/eval-okay-fromjson-escapes.nix | 3 +- tests/functional/lang/eval-okay-fromjson.nix | 94 +- .../lang/eval-okay-functionargs.nix | 140 +- .../eval-okay-getattrpos-functionargs.nix | 8 +- .../functional/lang/eval-okay-getattrpos.nix | 6 +- tests/functional/lang/eval-okay-groupBy.nix | 6 +- tests/functional/lang/eval-okay-hashfile.nix | 14 +- .../functional/lang/eval-okay-hashstring.nix | 15 +- tests/functional/lang/eval-okay-if.nix | 7 +- tests/functional/lang/eval-okay-import.nix | 3 +- .../lang/eval-okay-inherit-attr-pos.nix | 12 +- .../lang/eval-okay-inherit-from.nix | 21 +- .../lang/eval-okay-intersectAttrs.nix | 45 +- tests/functional/lang/eval-okay-list.nix | 11 +- .../functional/lang/eval-okay-listtoattrs.nix | 27 +- tests/functional/lang/eval-okay-logic.nix | 3 +- tests/functional/lang/eval-okay-map.nix | 8 +- tests/functional/lang/eval-okay-mapattrs.nix | 5 +- .../lang/eval-okay-merge-dynamic-attrs.nix | 16 +- .../functional/lang/eval-okay-nested-with.nix | 4 +- tests/functional/lang/eval-okay-new-let.nix | 8 +- .../lang/eval-okay-null-dynamic-attrs.nix | 2 +- tests/functional/lang/eval-okay-overrides.nix | 8 +- .../lang/eval-okay-parse-flake-ref.nix | 2 +- tests/functional/lang/eval-okay-partition.nix | 9 +- tests/functional/lang/eval-okay-path.nix | 26 +- tests/functional/lang/eval-okay-patterns.nix | 61 +- tests/functional/lang/eval-okay-print.nix | 16 +- .../lang/eval-okay-readFileType.nix | 6 +- .../lang/eval-okay-redefine-builtin.nix | 3 +- .../functional/lang/eval-okay-regex-match.nix | 30 +- .../functional/lang/eval-okay-regex-split.nix | 213 ++- .../lang/eval-okay-regression-20220125.nix | 1 - ...val-okay-regrettable-rec-attrset-merge.nix | 9 +- tests/functional/lang/eval-okay-remove.nix | 9 +- .../lang/eval-okay-repeated-empty-attrs.nix | 5 +- .../lang/eval-okay-repeated-empty-list.nix | 5 +- .../lang/eval-okay-replacestrings.nix | 19 +- tests/functional/lang/eval-okay-scope-1.nix | 17 +- tests/functional/lang/eval-okay-scope-2.nix | 18 +- tests/functional/lang/eval-okay-scope-3.nix | 19 +- tests/functional/lang/eval-okay-scope-4.nix | 11 +- tests/functional/lang/eval-okay-scope-6.nix | 9 +- tests/functional/lang/eval-okay-scope-7.nix | 3 +- .../functional/lang/eval-okay-search-path.nix | 15 +- tests/functional/lang/eval-okay-sort.nix | 62 +- tests/functional/lang/eval-okay-string.nix | 21 +- .../lang/eval-okay-strings-as-attrs-names.nix | 6 +- .../lang/eval-okay-substring-context.nix | 13 +- .../functional/lang/eval-okay-tail-call-1.nix | 3 +- tests/functional/lang/eval-okay-tojson.nix | 39 +- tests/functional/lang/eval-okay-toxml2.nix | 9 +- tests/functional/lang/eval-okay-tryeval.nix | 5 +- tests/functional/lang/eval-okay-types.nix | 9 +- tests/functional/lang/eval-okay-versions.nix | 14 +- tests/functional/lang/eval-okay-xml.nix | 27 +- .../lang/eval-okay-zipAttrsWith.nix | 7 +- tests/functional/lang/lib.nix | 84 +- tests/functional/linux-sandbox-cert-test.nix | 11 +- tests/functional/multiple-outputs.nix | 153 +- tests/functional/nar-access.nix | 35 +- tests/functional/nested-sandboxing/runner.nix | 41 +- tests/functional/package.nix | 181 +-- tests/functional/parallel.nix | 38 +- tests/functional/path.nix | 16 +- tests/functional/readfile-context.nix | 3 +- tests/functional/recursive.nix | 8 +- .../functional/repl/doc-comment-function.nix | 7 +- tests/functional/repl/doc-comments.nix | 97 +- tests/functional/repl/doc-functor.nix | 45 +- tests/functional/secure-drv-outputs.nix | 18 +- tests/functional/shell-hello.nix | 90 +- tests/functional/shell.nix | 192 +-- tests/functional/simple-failing.nix | 9 +- tests/functional/structured-attrs-shell.nix | 11 +- tests/functional/structured-attrs.nix | 21 +- tests/functional/undefined-variable.nix | 5 +- tests/functional/user-envs.nix | 57 +- tests/installer/default.nix | 63 +- tests/nixos/authorization.nix | 145 +- tests/nixos/ca-fd-leak/default.nix | 90 +- tests/nixos/cgroups/default.nix | 67 +- tests/nixos/cgroups/hang.nix | 5 +- tests/nixos/chroot-store.nix | 52 +- tests/nixos/containers/containers.nix | 117 +- tests/nixos/containers/id-test.nix | 14 +- tests/nixos/containers/systemd-nspawn.nix | 16 +- tests/nixos/default.nix | 137 +- tests/nixos/fetch-git/default.nix | 31 +- .../test-cases/http-auth/default.nix | 3 +- .../test-cases/http-simple/default.nix | 3 +- .../test-cases/ssh-simple/default.nix | 3 +- .../fetch-git/testsupport/gitea-repo.nix | 34 +- tests/nixos/fetch-git/testsupport/gitea.nix | 72 +- tests/nixos/fetch-git/testsupport/setup.nix | 77 +- tests/nixos/fetchurl.nix | 85 +- tests/nixos/fsync.nix | 60 +- tests/nixos/functional/as-trusted-user.nix | 6 +- tests/nixos/functional/as-user.nix | 4 +- tests/nixos/functional/common.nix | 98 +- tests/nixos/functional/symlinked-home.nix | 4 +- tests/nixos/git-submodules.nix | 114 +- tests/nixos/github-flakes.nix | 350 +++-- tests/nixos/gzip-content-encoding.nix | 59 +- tests/nixos/nix-copy-closure.nix | 159 +- tests/nixos/nix-copy.nix | 210 +-- tests/nixos/nix-docker.nix | 86 +- tests/nixos/nss-preload.nix | 181 ++- tests/nixos/remote-builds-ssh-ng.nix | 183 +-- tests/nixos/remote-builds.nix | 221 +-- tests/nixos/s3-binary-cache-store.nix | 119 +- tests/nixos/setuid.nix | 234 +-- tests/nixos/sourcehut-flakes.nix | 167 ++- tests/nixos/tarball-flakes.nix | 156 +- tests/nixos/user-sandboxing/default.nix | 152 +- tests/repl-completion.nix | 73 +- 266 files changed, 7606 insertions(+), 5281 deletions(-) diff --git a/default.nix b/default.nix index 2cccff28d51..6466507b714 100644 --- a/default.nix +++ b/default.nix @@ -1,10 +1,9 @@ -(import - ( - let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in - fetchTarball { - url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; - sha256 = lock.nodes.flake-compat.locked.narHash; - } - ) - { src = ./.; } -).defaultNix +(import ( + let + lock = builtins.fromJSON (builtins.readFile ./flake.lock); + in + fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; + sha256 = lock.nodes.flake-compat.locked.narHash; + } +) { src = ./.; }).defaultNix diff --git a/doc/manual/generate-builtins.nix b/doc/manual/generate-builtins.nix index 37ed12a4330..3649560f7c6 100644 --- a/doc/manual/generate-builtins.nix +++ b/doc/manual/generate-builtins.nix @@ -5,7 +5,15 @@ in builtinsInfo: let - showBuiltin = name: { doc, type ? null, args ? [ ], experimental-feature ? null, impure-only ? false }: + showBuiltin = + name: + { + doc, + type ? null, + args ? [ ], + experimental-feature ? null, + impure-only ? false, + }: let type' = optionalString (type != null) " (${type})"; diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix index 791bfd2c756..31e74e17d26 100644 --- a/doc/manual/generate-manpage.nix +++ b/doc/manual/generate-manpage.nix @@ -32,7 +32,13 @@ let commandInfo = fromJSON commandDump; - showCommand = { command, details, filename, toplevel }: + showCommand = + { + command, + details, + filename, + toplevel, + }: let result = '' @@ -56,26 +62,27 @@ let ${maybeOptions} ''; - showSynopsis = command: args: + showSynopsis = + command: args: let - showArgument = arg: "*${arg.label}*" + optionalString (! arg ? arity) "..."; + showArgument = arg: "*${arg.label}*" + optionalString (!arg ? arity) "..."; arguments = concatStringsSep " " (map showArgument args); - in '' + in + '' `${command}` [*option*...] ${arguments} ''; - maybeSubcommands = optionalString (details ? commands && details.commands != {}) - '' - where *subcommand* is one of the following: + maybeSubcommands = optionalString (details ? commands && details.commands != { }) '' + where *subcommand* is one of the following: - ${subcommands} - ''; + ${subcommands} + ''; - subcommands = if length categories > 1 - then listCategories - else listSubcommands details.commands; + subcommands = if length categories > 1 then listCategories else listSubcommands details.commands; - categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues details.commands))); + categories = sort (x: y: x.id < y.id) ( + unique (map (cmd: cmd.category) (attrValues details.commands)) + ); listCategories = concatStrings (map showCategory categories); @@ -99,38 +106,39 @@ let ${allStores} ''; - index = replaceStrings - [ "@store-types@" "./local-store.md" "./local-daemon-store.md" ] - [ storesOverview "#local-store" "#local-daemon-store" ] - details.doc; + index = + replaceStrings + [ "@store-types@" "./local-store.md" "./local-daemon-store.md" ] + [ storesOverview "#local-store" "#local-daemon-store" ] + details.doc; storesOverview = let - showEntry = store: - "- [${store.name}](#${store.slug})"; + showEntry = store: "- [${store.name}](#${store.slug})"; in concatStringsSep "\n" (map showEntry storesList) + "\n"; allStores = concatStringsSep "\n" (attrValues storePages); - storePages = listToAttrs - (map (s: { name = s.filename; value = s.page; }) storesList); + storePages = listToAttrs ( + map (s: { + name = s.filename; + value = s.page; + }) storesList + ); storesList = showStoreDocs { storeInfo = commandInfo.stores; inherit inlineHTML; }; - hasInfix = infix: content: + hasInfix = + infix: content: builtins.stringLength content != builtins.stringLength (replaceStrings [ infix ] [ "" ] content); in optionalString (details ? doc) ( # An alternate implementation with builtins.match stack overflowed on some systems. - if hasInfix "@store-types@" details.doc - then help-stores - else details.doc + if hasInfix "@store-types@" details.doc then help-stores else details.doc ); maybeOptions = let - allVisibleOptions = filterAttrs - (_: o: ! o.hiddenCategory) - (details.flags // toplevel.flags); + allVisibleOptions = filterAttrs (_: o: !o.hiddenCategory) (details.flags // toplevel.flags); in optionalString (allVisibleOptions != { }) '' # Options @@ -142,55 +150,73 @@ let > See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags. ''; - showOptions = inlineHTML: allOptions: + showOptions = + inlineHTML: allOptions: let showCategory = cat: opts: '' ${optionalString (cat != "") "## ${cat}"} ${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))} ''; - showOption = name: option: + showOption = + name: option: let result = trim '' - ${item} ${option.description} ''; - item = if inlineHTML - then ''[`--${name}`](#opt-${name}) ${shortName} ${labels}'' - else "`--${name}` ${shortName} ${labels}"; - shortName = optionalString - (option ? shortName) - ("/ `-${option.shortName}`"); - labels = optionalString - (option ? labels) - (concatStringsSep " " (map (s: "*${s}*") option.labels)); - in result; - categories = mapAttrs - # Convert each group from a list of key-value pairs back to an attrset - (_: listToAttrs) - (groupBy - (cmd: cmd.value.category) - (attrsToList allOptions)); - in concatStrings (attrValues (mapAttrs showCategory categories)); - in squash result; + item = + if inlineHTML then + ''[`--${name}`](#opt-${name}) ${shortName} ${labels}'' + else + "`--${name}` ${shortName} ${labels}"; + shortName = optionalString (option ? shortName) ("/ `-${option.shortName}`"); + labels = optionalString (option ? labels) (concatStringsSep " " (map (s: "*${s}*") option.labels)); + in + result; + categories = + mapAttrs + # Convert each group from a list of key-value pairs back to an attrset + (_: listToAttrs) + (groupBy (cmd: cmd.value.category) (attrsToList allOptions)); + in + concatStrings (attrValues (mapAttrs showCategory categories)); + in + squash result; appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name; - processCommand = { command, details, filename, toplevel }: + processCommand = + { + command, + details, + filename, + toplevel, + }: let cmd = { inherit command; name = filename + ".md"; - value = showCommand { inherit command details filename toplevel; }; - }; - subcommand = subCmd: processCommand { - command = command + " " + subCmd; - details = details.commands.${subCmd}; - filename = appendName filename subCmd; - inherit toplevel; + value = showCommand { + inherit + command + details + filename + toplevel + ; + }; }; - in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {}); + subcommand = + subCmd: + processCommand { + command = command + " " + subCmd; + details = details.commands.${subCmd}; + filename = appendName filename subCmd; + inherit toplevel; + }; + in + [ cmd ] ++ concatMap subcommand (attrNames details.commands or { }); manpages = processCommand { command = "nix"; @@ -199,9 +225,11 @@ let toplevel = commandInfo.args; }; - tableOfContents = let - showEntry = page: - " - [${page.command}](command-ref/new-cli/${page.name})"; - in concatStringsSep "\n" (map showEntry manpages) + "\n"; + tableOfContents = + let + showEntry = page: " - [${page.command}](command-ref/new-cli/${page.name})"; + in + concatStringsSep "\n" (map showEntry manpages) + "\n"; -in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; } +in +(listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; } diff --git a/doc/manual/generate-settings.nix b/doc/manual/generate-settings.nix index 93a8e093e48..35ae73e5d1f 100644 --- a/doc/manual/generate-settings.nix +++ b/doc/manual/generate-settings.nix @@ -1,67 +1,99 @@ let - inherit (builtins) attrValues concatStringsSep isAttrs isBool mapAttrs; - inherit (import ) concatStrings indent optionalString squash; + inherit (builtins) + attrValues + concatStringsSep + isAttrs + isBool + mapAttrs + ; + inherit (import ) + concatStrings + indent + optionalString + squash + ; in # `inlineHTML` is a hack to accommodate inconsistent output from `lowdown` -{ prefix, inlineHTML ? true }: settingsInfo: +{ + prefix, + inlineHTML ? true, +}: +settingsInfo: let - showSetting = prefix: setting: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }: + showSetting = + prefix: setting: + { + description, + documentDefault, + defaultValue, + aliases, + value, + experimentalFeature, + }: let result = squash '' - - ${item} + - ${item} - ${indent " " body} - ''; - item = if inlineHTML - then ''[`${setting}`](#${prefix}-${setting})'' - else "`${setting}`"; + ${indent " " body} + ''; + item = + if inlineHTML then + ''[`${setting}`](#${prefix}-${setting})'' + else + "`${setting}`"; # separate body to cleanly handle indentation body = '' - ${experimentalFeatureNote} + ${experimentalFeatureNote} - ${description} + ${description} - **Default:** ${showDefault documentDefault defaultValue} + **Default:** ${showDefault documentDefault defaultValue} - ${showAliases aliases} - ''; + ${showAliases aliases} + ''; experimentalFeatureNote = optionalString (experimentalFeature != null) '' - > **Warning** - > - > This setting is part of an - > [experimental feature](@docroot@/development/experimental-features.md). - > - > To change this setting, make sure the - > [`${experimentalFeature}` experimental feature](@docroot@/development/experimental-features.md#xp-feature-${experimentalFeature}) - > is enabled. - > For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md): - > - > ``` - > extra-experimental-features = ${experimentalFeature} - > ${setting} = ... - > ``` - ''; + > **Warning** + > + > This setting is part of an + > [experimental feature](@docroot@/development/experimental-features.md). + > + > To change this setting, make sure the + > [`${experimentalFeature}` experimental feature](@docroot@/development/experimental-features.md#xp-feature-${experimentalFeature}) + > is enabled. + > For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md): + > + > ``` + > extra-experimental-features = ${experimentalFeature} + > ${setting} = ... + > ``` + ''; - showDefault = documentDefault: defaultValue: + showDefault = + documentDefault: defaultValue: if documentDefault then # a StringMap value type is specified as a string, but # this shows the value type. The empty stringmap is `null` in # JSON, but that converts to `{ }` here. - if defaultValue == "" || defaultValue == [] || isAttrs defaultValue - then "*empty*" - else if isBool defaultValue then - if defaultValue then "`true`" else "`false`" - else "`${toString defaultValue}`" - else "*machine-specific*"; + if defaultValue == "" || defaultValue == [ ] || isAttrs defaultValue then + "*empty*" + else if isBool defaultValue then + if defaultValue then "`true`" else "`false`" + else + "`${toString defaultValue}`" + else + "*machine-specific*"; - showAliases = aliases: - optionalString (aliases != []) - "**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}"; + showAliases = + aliases: + optionalString (aliases != [ ]) + "**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}"; - in result; + in + result; -in concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo)) +in +concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo)) diff --git a/doc/manual/generate-store-info.nix b/doc/manual/generate-store-info.nix index cc370412414..e8b7377dafd 100644 --- a/doc/manual/generate-store-info.nix +++ b/doc/manual/generate-store-info.nix @@ -1,6 +1,20 @@ let - inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings; - inherit (import ) optionalString filterAttrs trim squash toLower unique indent; + inherit (builtins) + attrNames + listToAttrs + concatStringsSep + readFile + replaceStrings + ; + inherit (import ) + optionalString + filterAttrs + trim + squash + toLower + unique + indent + ; showSettings = import ; in @@ -14,7 +28,13 @@ in let - showStore = { name, slug }: { settings, doc, experimentalFeature }: + showStore = + { name, slug }: + { + settings, + doc, + experimentalFeature, + }: let result = squash '' # ${name} @@ -25,7 +45,10 @@ let ## Settings - ${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings} + ${showSettings { + prefix = "store-${slug}"; + inherit inlineHTML; + } settings} ''; experimentalFeatureNote = optionalString (experimentalFeature != null) '' @@ -43,15 +66,15 @@ let > extra-experimental-features = ${experimentalFeature} > ``` ''; - in result; - - storesList = map - (name: rec { - inherit name; - slug = replaceStrings [ " " ] [ "-" ] (toLower name); - filename = "${slug}.md"; - page = showStore { inherit name slug; } storeInfo.${name}; - }) - (attrNames storeInfo); - -in storesList + in + result; + + storesList = map (name: rec { + inherit name; + slug = replaceStrings [ " " ] [ "-" ] (toLower name); + filename = "${slug}.md"; + page = showStore { inherit name slug; } storeInfo.${name}; + }) (attrNames storeInfo); + +in +storesList diff --git a/doc/manual/generate-store-types.nix b/doc/manual/generate-store-types.nix index 46179abc5bf..a03d3d6216e 100644 --- a/doc/manual/generate-store-types.nix +++ b/doc/manual/generate-store-types.nix @@ -1,5 +1,11 @@ let - inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings; + inherit (builtins) + attrNames + listToAttrs + concatStringsSep + readFile + replaceStrings + ; showSettings = import ; showStoreDocs = import ; in @@ -14,26 +20,28 @@ let index = let - showEntry = store: - "- [${store.name}](./${store.filename})"; + showEntry = store: "- [${store.name}](./${store.filename})"; in concatStringsSep "\n" (map showEntry storesList); - "index.md" = replaceStrings - [ "@store-types@" ] [ index ] - (readFile ./source/store/types/index.md.in); + "index.md" = + replaceStrings [ "@store-types@" ] [ index ] + (readFile ./source/store/types/index.md.in); tableOfContents = let - showEntry = store: - " - [${store.name}](store/types/${store.filename})"; + showEntry = store: " - [${store.name}](store/types/${store.filename})"; in concatStringsSep "\n" (map showEntry storesList) + "\n"; "SUMMARY.md" = tableOfContents; - storePages = listToAttrs - (map (s: { name = s.filename; value = s.page; }) storesList); + storePages = listToAttrs ( + map (s: { + name = s.filename; + value = s.page; + }) storesList + ); in storePages // { inherit "index.md" "SUMMARY.md"; } diff --git a/doc/manual/generate-xp-features-shortlist.nix b/doc/manual/generate-xp-features-shortlist.nix index eb735ba5f7a..1520fc2f815 100644 --- a/doc/manual/generate-xp-features-shortlist.nix +++ b/doc/manual/generate-xp-features-shortlist.nix @@ -2,8 +2,8 @@ with builtins; with import ; let - showExperimentalFeature = name: doc: - '' - - [`${name}`](@docroot@/development/experimental-features.md#xp-feature-${name}) - ''; -in xps: indent " " (concatStrings (attrValues (mapAttrs showExperimentalFeature xps))) + showExperimentalFeature = name: doc: '' + - [`${name}`](@docroot@/development/experimental-features.md#xp-feature-${name}) + ''; +in +xps: indent " " (concatStrings (attrValues (mapAttrs showExperimentalFeature xps))) diff --git a/doc/manual/generate-xp-features.nix b/doc/manual/generate-xp-features.nix index 0eec0e1da23..468d253bafd 100644 --- a/doc/manual/generate-xp-features.nix +++ b/doc/manual/generate-xp-features.nix @@ -2,7 +2,8 @@ with builtins; with import ; let - showExperimentalFeature = name: doc: + showExperimentalFeature = + name: doc: squash '' ## [`${name}`]{#xp-feature-${name}} diff --git a/doc/manual/package.nix b/doc/manual/package.nix index f8133f2e1dd..8f5d0dfe137 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -1,19 +1,20 @@ -{ lib -, mkMesonDerivation +{ + lib, + mkMesonDerivation, -, meson -, ninja -, lowdown-unsandboxed -, mdbook -, mdbook-linkcheck -, jq -, python3 -, rsync -, nix-cli + meson, + ninja, + lowdown-unsandboxed, + mdbook, + mdbook-linkcheck, + jq, + python3, + rsync, + nix-cli, -# Configuration Options + # Configuration Options -, version + version, }: let @@ -25,18 +26,22 @@ mkMesonDerivation (finalAttrs: { inherit version; workDir = ./.; - fileset = fileset.difference - (fileset.unions [ - ../../.version - # Too many different types of files to filter for now - ../../doc/manual - ./. - ]) - # Do a blacklist instead - ../../doc/manual/package.nix; + fileset = + fileset.difference + (fileset.unions [ + ../../.version + # Too many different types of files to filter for now + ../../doc/manual + ./. + ]) + # Do a blacklist instead + ../../doc/manual/package.nix; # TODO the man pages should probably be separate - outputs = [ "out" "man" ]; + outputs = [ + "out" + "man" + ]; # Hack for sake of the dev shell passthru.externalNativeBuildInputs = [ @@ -54,11 +59,10 @@ mkMesonDerivation (finalAttrs: { nix-cli ]; - preConfigure = - '' - chmod u+w ./.version - echo ${finalAttrs.version} > ./.version - ''; + preConfigure = '' + chmod u+w ./.version + echo ${finalAttrs.version} > ./.version + ''; postInstall = '' mkdir -p ''$out/nix-support diff --git a/doc/manual/utils.nix b/doc/manual/utils.nix index 19ff49b64d9..db3a0e67a83 100644 --- a/doc/manual/utils.nix +++ b/doc/manual/utils.nix @@ -11,10 +11,15 @@ rec { concatStrings = concatStringsSep ""; - attrsToList = a: - map (name: { inherit name; value = a.${name}; }) (builtins.attrNames a); + attrsToList = + a: + map (name: { + inherit name; + value = a.${name}; + }) (builtins.attrNames a); - replaceStringsRec = from: to: string: + replaceStringsRec = + from: to: string: # recursively replace occurrences of `from` with `to` within `string` # example: # replaceStringRec "--" "-" "hello-----world" @@ -22,16 +27,18 @@ rec { let replaced = replaceStrings [ from ] [ to ] string; in - if replaced == string then string else replaceStringsRec from to replaced; + if replaced == string then string else replaceStringsRec from to replaced; toLower = replaceStrings upperChars lowerChars; squash = replaceStringsRec "\n\n\n" "\n\n"; - trim = string: + trim = + string: # trim trailing spaces and squash non-leading spaces let - trimLine = line: + trimLine = + line: let # separate leading spaces from the rest parts = split "(^ *)" line; @@ -39,19 +46,30 @@ rec { rest = elemAt parts 2; # drop trailing spaces body = head (split " *$" rest); - in spaces + replaceStringsRec " " " " body; - in concatStringsSep "\n" (map trimLine (splitLines string)); + in + spaces + replaceStringsRec " " " " body; + in + concatStringsSep "\n" (map trimLine (splitLines string)); # FIXME: O(n^2) - unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) []; + unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [ ]; nameValuePair = name: value: { inherit name value; }; - filterAttrs = pred: set: - listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set)); + filterAttrs = + pred: set: + listToAttrs ( + concatMap ( + name: + let + v = set.${name}; + in + if pred name v then [ (nameValuePair name v) ] else [ ] + ) (attrNames set) + ); optionalString = cond: string: if cond then string else ""; - indent = prefix: s: - concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s)); + indent = + prefix: s: concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s)); } diff --git a/docker.nix b/docker.nix index e2e9da72831..d52c317d6b1 100644 --- a/docker.nix +++ b/docker.nix @@ -1,112 +1,113 @@ -{ pkgs ? import { } -, lib ? pkgs.lib -, name ? "nix" -, tag ? "latest" -, bundleNixpkgs ? true -, channelName ? "nixpkgs" -, channelURL ? "https://nixos.org/channels/nixpkgs-unstable" -, extraPkgs ? [] -, maxLayers ? 100 -, nixConf ? {} -, flake-registry ? null -, uid ? 0 -, gid ? 0 -, uname ? "root" -, gname ? "root" +{ + pkgs ? import { }, + lib ? pkgs.lib, + name ? "nix", + tag ? "latest", + bundleNixpkgs ? true, + channelName ? "nixpkgs", + channelURL ? "https://nixos.org/channels/nixpkgs-unstable", + extraPkgs ? [ ], + maxLayers ? 100, + nixConf ? { }, + flake-registry ? null, + uid ? 0, + gid ? 0, + uname ? "root", + gname ? "root", }: let - defaultPkgs = with pkgs; [ - nix - bashInteractive - coreutils-full - gnutar - gzip - gnugrep - which - curl - less - wget - man - cacert.out - findutils - iana-etc - git - openssh - ] ++ extraPkgs; - - users = { - - root = { - uid = 0; - shell = "${pkgs.bashInteractive}/bin/bash"; - home = "/root"; - gid = 0; - groups = [ "root" ]; - description = "System administrator"; - }; + defaultPkgs = + with pkgs; + [ + nix + bashInteractive + coreutils-full + gnutar + gzip + gnugrep + which + curl + less + wget + man + cacert.out + findutils + iana-etc + git + openssh + ] + ++ extraPkgs; + + users = + { + + root = { + uid = 0; + shell = "${pkgs.bashInteractive}/bin/bash"; + home = "/root"; + gid = 0; + groups = [ "root" ]; + description = "System administrator"; + }; - nobody = { - uid = 65534; - shell = "${pkgs.shadow}/bin/nologin"; - home = "/var/empty"; - gid = 65534; - groups = [ "nobody" ]; - description = "Unprivileged account (don't use!)"; - }; + nobody = { + uid = 65534; + shell = "${pkgs.shadow}/bin/nologin"; + home = "/var/empty"; + gid = 65534; + groups = [ "nobody" ]; + description = "Unprivileged account (don't use!)"; + }; - } // lib.optionalAttrs (uid != 0) { - "${uname}" = { - uid = uid; - shell = "${pkgs.bashInteractive}/bin/bash"; - home = "/home/${uname}"; - gid = gid; - groups = [ "${gname}" ]; - description = "Nix user"; + } + // lib.optionalAttrs (uid != 0) { + "${uname}" = { + uid = uid; + shell = "${pkgs.bashInteractive}/bin/bash"; + home = "/home/${uname}"; + gid = gid; + groups = [ "${gname}" ]; + description = "Nix user"; + }; + } + // lib.listToAttrs ( + map (n: { + name = "nixbld${toString n}"; + value = { + uid = 30000 + n; + gid = 30000; + groups = [ "nixbld" ]; + description = "Nix build user ${toString n}"; + }; + }) (lib.lists.range 1 32) + ); + + groups = + { + root.gid = 0; + nixbld.gid = 30000; + nobody.gid = 65534; + } + // lib.optionalAttrs (gid != 0) { + "${gname}".gid = gid; }; - } // lib.listToAttrs ( - map - ( - n: { - name = "nixbld${toString n}"; - value = { - uid = 30000 + n; - gid = 30000; - groups = [ "nixbld" ]; - description = "Nix build user ${toString n}"; - }; - } - ) - (lib.lists.range 1 32) - ); - - groups = { - root.gid = 0; - nixbld.gid = 30000; - nobody.gid = 65534; - } // lib.optionalAttrs (gid != 0) { - "${gname}".gid = gid; - }; userToPasswd = ( k: - { uid - , gid ? 65534 - , home ? "/var/empty" - , description ? "" - , shell ? "/bin/false" - , groups ? [ ] - }: "${k}:x:${toString uid}:${toString gid}:${description}:${home}:${shell}" - ); - passwdContents = ( - lib.concatStringsSep "\n" - (lib.attrValues (lib.mapAttrs userToPasswd users)) + { + uid, + gid ? 65534, + home ? "/var/empty", + description ? "", + shell ? "/bin/false", + groups ? [ ], + }: + "${k}:x:${toString uid}:${toString gid}:${description}:${home}:${shell}" ); + passwdContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs userToPasswd users))); userToShadow = k: { ... }: "${k}:!:1::::::"; - shadowContents = ( - lib.concatStringsSep "\n" - (lib.attrValues (lib.mapAttrs userToShadow users)) - ); + shadowContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs userToShadow users))); # Map groups to members # { @@ -116,42 +117,35 @@ let let # Create a flat list of user/group mappings mappings = ( - builtins.foldl' - ( - acc: user: - let - groups = users.${user}.groups or [ ]; - in - acc ++ map - (group: { - inherit user group; - }) - groups - ) - [ ] - (lib.attrNames users) + builtins.foldl' ( + acc: user: + let + groups = users.${user}.groups or [ ]; + in + acc + ++ map (group: { + inherit user group; + }) groups + ) [ ] (lib.attrNames users) ); in - ( - builtins.foldl' - ( - acc: v: acc // { - ${v.group} = acc.${v.group} or [ ] ++ [ v.user ]; - } - ) - { } - mappings) + (builtins.foldl' ( + acc: v: + acc + // { + ${v.group} = acc.${v.group} or [ ] ++ [ v.user ]; + } + ) { } mappings) ); - groupToGroup = k: { gid }: + groupToGroup = + k: + { gid }: let members = groupMemberMap.${k} or [ ]; in "${k}:x:${toString gid}:${lib.concatStringsSep "," members}"; - groupContents = ( - lib.concatStringsSep "\n" - (lib.attrValues (lib.mapAttrs groupToGroup groups)) - ); + groupContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs groupToGroup groups))); defaultNixConf = { sandbox = "false"; @@ -159,11 +153,17 @@ let trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ]; }; - nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v: - let - vStr = if builtins.isList v then lib.concatStringsSep " " v else v; - in - "${n} = ${vStr}") (defaultNixConf // nixConf))) + "\n"; + nixConfContents = + (lib.concatStringsSep "\n" ( + lib.mapAttrsFlatten ( + n: v: + let + vStr = if builtins.isList v then lib.concatStringsSep " " v else v; + in + "${n} = ${vStr}" + ) (defaultNixConf // nixConf) + )) + + "\n"; userHome = if uid == 0 then "/root" else "/home/${uname}"; @@ -184,21 +184,29 @@ let manifest = pkgs.buildPackages.runCommand "manifest.nix" { } '' cat > $out < $out/etc/passwd - echo "" >> $out/etc/passwd + cat $passwdContentsPath > $out/etc/passwd + echo "" >> $out/etc/passwd - cat $groupContentsPath > $out/etc/group - echo "" >> $out/etc/group + cat $groupContentsPath > $out/etc/group + echo "" >> $out/etc/group - cat $shadowContentsPath > $out/etc/shadow - echo "" >> $out/etc/shadow + cat $shadowContentsPath > $out/etc/shadow + echo "" >> $out/etc/shadow - mkdir -p $out/usr - ln -s /nix/var/nix/profiles/share $out/usr/ + mkdir -p $out/usr + ln -s /nix/var/nix/profiles/share $out/usr/ - mkdir -p $out/nix/var/nix/gcroots + mkdir -p $out/nix/var/nix/gcroots - mkdir $out/tmp + mkdir $out/tmp - mkdir -p $out/var/tmp + mkdir -p $out/var/tmp - mkdir -p $out/etc/nix - cat $nixConfContentsPath > $out/etc/nix/nix.conf + mkdir -p $out/etc/nix + cat $nixConfContentsPath > $out/etc/nix/nix.conf - mkdir -p $out${userHome} - mkdir -p $out/nix/var/nix/profiles/per-user/${uname} + mkdir -p $out${userHome} + mkdir -p $out/nix/var/nix/profiles/per-user/${uname} - ln -s ${profile} $out/nix/var/nix/profiles/default-1-link - ln -s /nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default - ln -s /nix/var/nix/profiles/default $out${userHome}/.nix-profile + ln -s ${profile} $out/nix/var/nix/profiles/default-1-link + ln -s /nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default + ln -s /nix/var/nix/profiles/default $out${userHome}/.nix-profile - ln -s ${channel} $out/nix/var/nix/profiles/per-user/${uname}/channels-1-link - ln -s /nix/var/nix/profiles/per-user/${uname}/channels-1-link $out/nix/var/nix/profiles/per-user/${uname}/channels + ln -s ${channel} $out/nix/var/nix/profiles/per-user/${uname}/channels-1-link + ln -s /nix/var/nix/profiles/per-user/${uname}/channels-1-link $out/nix/var/nix/profiles/per-user/${uname}/channels - mkdir -p $out${userHome}/.nix-defexpr - ln -s /nix/var/nix/profiles/per-user/${uname}/channels $out${userHome}/.nix-defexpr/channels - echo "${channelURL} ${channelName}" > $out${userHome}/.nix-channels + mkdir -p $out${userHome}/.nix-defexpr + ln -s /nix/var/nix/profiles/per-user/${uname}/channels $out${userHome}/.nix-defexpr/channels + echo "${channelURL} ${channelName}" > $out${userHome}/.nix-channels - mkdir -p $out/bin $out/usr/bin - ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env - ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh + mkdir -p $out/bin $out/usr/bin + ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env + ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh - '' + (lib.optionalString (flake-registry-path != null) '' - nixCacheDir="${userHome}/.cache/nix" - mkdir -p $out$nixCacheDir - globalFlakeRegistryPath="$nixCacheDir/flake-registry.json" - ln -s ${flake-registry-path} $out$globalFlakeRegistryPath - mkdir -p $out/nix/var/nix/gcroots/auto - rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath)) - ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName - '')); + '' + + (lib.optionalString (flake-registry-path != null) '' + nixCacheDir="${userHome}/.cache/nix" + mkdir -p $out$nixCacheDir + globalFlakeRegistryPath="$nixCacheDir/flake-registry.json" + ln -s ${flake-registry-path} $out$globalFlakeRegistryPath + mkdir -p $out/nix/var/nix/gcroots/auto + rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath)) + ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName + '') + ); in pkgs.dockerTools.buildLayeredImageWithNixDb { - inherit name tag maxLayers uid gid uname gname; + inherit + name + tag + maxLayers + uid + gid + uname + gname + ; contents = [ baseSystem ]; @@ -305,15 +331,19 @@ pkgs.dockerTools.buildLayeredImageWithNixDb { User = "${toString uid}:${toString gid}"; Env = [ "USER=${uname}" - "PATH=${lib.concatStringsSep ":" [ - "${userHome}/.nix-profile/bin" - "/nix/var/nix/profiles/default/bin" - "/nix/var/nix/profiles/default/sbin" - ]}" - "MANPATH=${lib.concatStringsSep ":" [ - "${userHome}/.nix-profile/share/man" - "/nix/var/nix/profiles/default/share/man" - ]}" + "PATH=${ + lib.concatStringsSep ":" [ + "${userHome}/.nix-profile/bin" + "/nix/var/nix/profiles/default/bin" + "/nix/var/nix/profiles/default/sbin" + ] + }" + "MANPATH=${ + lib.concatStringsSep ":" [ + "${userHome}/.nix-profile/share/man" + "/nix/var/nix/profiles/default/share/man" + ] + }" "SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt" "GIT_SSL_CAINFO=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt" "NIX_SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt" diff --git a/flake.nix b/flake.nix index d8a458c1f4d..eafb6535302 100644 --- a/flake.nix +++ b/flake.nix @@ -5,7 +5,10 @@ inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446"; - inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; + inputs.flake-compat = { + url = "github:edolstra/flake-compat"; + flake = false; + }; # dev tooling inputs.flake-parts.url = "github:hercules-ci/flake-parts"; @@ -18,8 +21,13 @@ inputs.git-hooks-nix.inputs.flake-compat.follows = ""; inputs.git-hooks-nix.inputs.gitignore.follows = ""; - outputs = inputs@{ self, nixpkgs, nixpkgs-regression, ... }: - + outputs = + inputs@{ + self, + nixpkgs, + nixpkgs-regression, + ... + }: let inherit (nixpkgs) lib; @@ -27,9 +35,15 @@ officialRelease = true; linux32BitSystems = [ "i686-linux" ]; - linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ]; + linux64BitSystems = [ + "x86_64-linux" + "aarch64-linux" + ]; linuxSystems = linux32BitSystems ++ linux64BitSystems; - darwinSystems = [ "x86_64-darwin" "aarch64-darwin" ]; + darwinSystems = [ + "x86_64-darwin" + "aarch64-darwin" + ]; systems = linuxSystems ++ darwinSystems; crossSystems = [ @@ -59,63 +73,77 @@ (Provided that the names are unique.) See https://nixos.org/manual/nixpkgs/stable/index.html#function-library-lib.attrsets.concatMapAttrs - */ + */ flatMapAttrs = attrs: f: lib.concatMapAttrs f attrs; forAllSystems = lib.genAttrs systems; forAllCrossSystems = lib.genAttrs crossSystems; - forAllStdenvs = f: - lib.listToAttrs - (map - (stdenvName: { - name = "${stdenvName}Packages"; - value = f stdenvName; - }) - stdenvs); - + forAllStdenvs = + f: + lib.listToAttrs ( + map (stdenvName: { + name = "${stdenvName}Packages"; + value = f stdenvName; + }) stdenvs + ); # We don't apply flake-parts to the whole flake so that non-development attributes # load without fetching any development inputs. devFlake = inputs.flake-parts.lib.mkFlake { inherit inputs; } { imports = [ ./maintainers/flake-module.nix ]; systems = lib.subtractLists crossSystems systems; - perSystem = { system, ... }: { - _module.args.pkgs = nixpkgsFor.${system}.native; - }; + perSystem = + { system, ... }: + { + _module.args.pkgs = nixpkgsFor.${system}.native; + }; }; # Memoize nixpkgs for different platforms for efficiency. - nixpkgsFor = forAllSystems - (system: let - make-pkgs = crossSystem: stdenv: import nixpkgs { - localSystem = { - inherit system; - }; - crossSystem = if crossSystem == null then null else { - config = crossSystem; - } // lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") { - useLLVM = true; + nixpkgsFor = forAllSystems ( + system: + let + make-pkgs = + crossSystem: stdenv: + import nixpkgs { + localSystem = { + inherit system; + }; + crossSystem = + if crossSystem == null then + null + else + { + config = crossSystem; + } + // lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") { + useLLVM = true; + }; + overlays = [ + (overlayFor (p: p.${stdenv})) + ]; }; - overlays = [ - (overlayFor (p: p.${stdenv})) - ]; - }; stdenvs = forAllStdenvs (make-pkgs null); native = stdenvs.stdenvPackages; - in { + in + { inherit stdenvs native; static = native.pkgsStatic; llvm = native.pkgsLLVM; cross = forAllCrossSystems (crossSystem: make-pkgs crossSystem "stdenv"); - }); + } + ); - binaryTarball = nix: pkgs: pkgs.callPackage ./scripts/binary-tarball.nix { - inherit nix; - }; + binaryTarball = + nix: pkgs: + pkgs.callPackage ./scripts/binary-tarball.nix { + inherit nix; + }; - overlayFor = getStdenv: final: prev: + overlayFor = + getStdenv: final: prev: let stdenv = getStdenv final; in @@ -162,12 +190,19 @@ # See https://github.com/NixOS/nixpkgs/pull/214409 # Remove when fixed in this flake's nixpkgs pre-commit = - if prev.stdenv.hostPlatform.system == "i686-linux" - then (prev.pre-commit.override (o: { dotnet-sdk = ""; })).overridePythonAttrs (o: { doCheck = false; }) - else prev.pre-commit; + if prev.stdenv.hostPlatform.system == "i686-linux" then + (prev.pre-commit.override (o: { + dotnet-sdk = ""; + })).overridePythonAttrs + (o: { + doCheck = false; + }) + else + prev.pre-commit; }; - in { + in + { # A Nixpkgs overlay that overrides the 'nix' and # 'nix-perl-bindings' packages. overlays.default = overlayFor (p: p.stdenv); @@ -186,53 +221,69 @@ ; }; - checks = forAllSystems (system: { - installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system}; - installTests = self.hydraJobs.installTests.${system}; - nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system}; - rl-next = - let pkgs = nixpkgsFor.${system}.native; - in pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } '' - LANG=C.UTF-8 ${pkgs.changelog-d}/bin/changelog-d ${./doc/manual/rl-next} >$out - ''; - repl-completion = nixpkgsFor.${system}.native.callPackage ./tests/repl-completion.nix { }; - } // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) { - dockerImage = self.hydraJobs.dockerImage.${system}; - } // (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) { - # Some perl dependencies are broken on i686-linux. - # Since the support is only best-effort there, disable the perl - # bindings - perlBindings = self.hydraJobs.perlBindings.${system}; - } - # Add "passthru" tests - // flatMapAttrs ({ - "" = nixpkgsFor.${system}.native; - } // lib.optionalAttrs (! nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) { - # TODO: enable static builds for darwin, blocked on: - # https://github.com/NixOS/nixpkgs/issues/320448 - # TODO: disabled to speed up GHA CI. - #"static-" = nixpkgsFor.${system}.static; - }) - (nixpkgsPrefix: nixpkgs: - flatMapAttrs nixpkgs.nixComponents - (pkgName: pkg: - flatMapAttrs pkg.tests or {} - (testName: test: { - "${nixpkgsPrefix}${pkgName}-${testName}" = test; - }) + checks = forAllSystems ( + system: + { + installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system}; + installTests = self.hydraJobs.installTests.${system}; + nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system}; + rl-next = + let + pkgs = nixpkgsFor.${system}.native; + in + pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } '' + LANG=C.UTF-8 ${pkgs.changelog-d}/bin/changelog-d ${./doc/manual/rl-next} >$out + ''; + repl-completion = nixpkgsFor.${system}.native.callPackage ./tests/repl-completion.nix { }; + } + // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) { + dockerImage = self.hydraJobs.dockerImage.${system}; + } + // (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) { + # Some perl dependencies are broken on i686-linux. + # Since the support is only best-effort there, disable the perl + # bindings + perlBindings = self.hydraJobs.perlBindings.${system}; + } + # Add "passthru" tests + // + flatMapAttrs + ( + { + "" = nixpkgsFor.${system}.native; + } + // lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) { + # TODO: enable static builds for darwin, blocked on: + # https://github.com/NixOS/nixpkgs/issues/320448 + # TODO: disabled to speed up GHA CI. + #"static-" = nixpkgsFor.${system}.static; + } ) - // lib.optionalAttrs (nixpkgs.stdenv.hostPlatform == nixpkgs.stdenv.buildPlatform) { - "${nixpkgsPrefix}nix-functional-tests" = nixpkgs.nixComponents.nix-functional-tests; - } - ) - // devFlake.checks.${system} or {} + ( + nixpkgsPrefix: nixpkgs: + flatMapAttrs nixpkgs.nixComponents ( + pkgName: pkg: + flatMapAttrs pkg.tests or { } ( + testName: test: { + "${nixpkgsPrefix}${pkgName}-${testName}" = test; + } + ) + ) + // lib.optionalAttrs (nixpkgs.stdenv.hostPlatform == nixpkgs.stdenv.buildPlatform) { + "${nixpkgsPrefix}nix-functional-tests" = nixpkgs.nixComponents.nix-functional-tests; + } + ) + // devFlake.checks.${system} or { } ); - packages = forAllSystems (system: - { # Here we put attributes that map 1:1 into packages., ie + packages = forAllSystems ( + system: + { + # Here we put attributes that map 1:1 into packages., ie # for which we don't apply the full build matrix such as cross or static. inherit (nixpkgsFor.${system}.native) - changelog-d; + changelog-d + ; default = self.packages.${system}.nix; installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system}; binaryTarball = self.hydraJobs.binaryTarball.${system}; @@ -243,96 +294,143 @@ nix-external-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-external-api-docs; } # We need to flatten recursive attribute sets of derivations to pass `flake check`. - // flatMapAttrs - { # Components we'll iterate over in the upcoming lambda - "nix-util" = { }; - "nix-util-c" = { }; - "nix-util-test-support" = { }; - "nix-util-tests" = { }; + // + flatMapAttrs + { + # Components we'll iterate over in the upcoming lambda + "nix-util" = { }; + "nix-util-c" = { }; + "nix-util-test-support" = { }; + "nix-util-tests" = { }; - "nix-store" = { }; - "nix-store-c" = { }; - "nix-store-test-support" = { }; - "nix-store-tests" = { }; + "nix-store" = { }; + "nix-store-c" = { }; + "nix-store-test-support" = { }; + "nix-store-tests" = { }; - "nix-fetchers" = { }; - "nix-fetchers-tests" = { }; + "nix-fetchers" = { }; + "nix-fetchers-tests" = { }; - "nix-expr" = { }; - "nix-expr-c" = { }; - "nix-expr-test-support" = { }; - "nix-expr-tests" = { }; + "nix-expr" = { }; + "nix-expr-c" = { }; + "nix-expr-test-support" = { }; + "nix-expr-tests" = { }; - "nix-flake" = { }; - "nix-flake-tests" = { }; + "nix-flake" = { }; + "nix-flake-tests" = { }; - "nix-main" = { }; - "nix-main-c" = { }; + "nix-main" = { }; + "nix-main-c" = { }; - "nix-cmd" = { }; + "nix-cmd" = { }; - "nix-cli" = { }; + "nix-cli" = { }; - "nix-everything" = { }; + "nix-everything" = { }; - "nix-functional-tests" = { supportsCross = false; }; + "nix-functional-tests" = { + supportsCross = false; + }; - "nix-perl-bindings" = { supportsCross = false; }; - } - (pkgName: { supportsCross ? true }: { - # These attributes go right into `packages.`. - "${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName}; - "${pkgName}-static" = nixpkgsFor.${system}.static.nixComponents.${pkgName}; - "${pkgName}-llvm" = nixpkgsFor.${system}.llvm.nixComponents.${pkgName}; + "nix-perl-bindings" = { + supportsCross = false; + }; } - // lib.optionalAttrs supportsCross (flatMapAttrs (lib.genAttrs crossSystems (_: { })) (crossSystem: {}: { - # These attributes go right into `packages.`. - "${pkgName}-${crossSystem}" = nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName}; - })) - // flatMapAttrs (lib.genAttrs stdenvs (_: { })) (stdenvName: {}: { - # These attributes go right into `packages.`. - "${pkgName}-${stdenvName}" = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".nixComponents.${pkgName}; - }) - ) + ( + pkgName: + { + supportsCross ? true, + }: + { + # These attributes go right into `packages.`. + "${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName}; + "${pkgName}-static" = nixpkgsFor.${system}.static.nixComponents.${pkgName}; + "${pkgName}-llvm" = nixpkgsFor.${system}.llvm.nixComponents.${pkgName}; + } + // lib.optionalAttrs supportsCross ( + flatMapAttrs (lib.genAttrs crossSystems (_: { })) ( + crossSystem: + { }: + { + # These attributes go right into `packages.`. + "${pkgName}-${crossSystem}" = nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName}; + } + ) + ) + // flatMapAttrs (lib.genAttrs stdenvs (_: { })) ( + stdenvName: + { }: + { + # These attributes go right into `packages.`. + "${pkgName}-${stdenvName}" = + nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".nixComponents.${pkgName}; + } + ) + ) // lib.optionalAttrs (builtins.elem system linux64BitSystems) { - dockerImage = - let - pkgs = nixpkgsFor.${system}.native; - image = import ./docker.nix { inherit pkgs; tag = pkgs.nix.version; }; - in - pkgs.runCommand - "docker-image-tarball-${pkgs.nix.version}" - { meta.description = "Docker image with Nix for ${system}"; } - '' - mkdir -p $out/nix-support - image=$out/image.tar.gz - ln -s ${image} $image - echo "file binary-dist $image" >> $out/nix-support/hydra-build-products - ''; - }); - - devShells = let - makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; }; - prefixAttrs = prefix: lib.concatMapAttrs (k: v: { "${prefix}-${k}" = v; }); - in - forAllSystems (system: - prefixAttrs "native" (forAllStdenvs (stdenvName: makeShell { - pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages"; - })) // - lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) ( - prefixAttrs "static" (forAllStdenvs (stdenvName: makeShell { - pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsStatic; - })) // - prefixAttrs "llvm" (forAllStdenvs (stdenvName: makeShell { - pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsLLVM; - })) // - prefixAttrs "cross" (forAllCrossSystems (crossSystem: makeShell { - pkgs = nixpkgsFor.${system}.cross.${crossSystem}; - })) - ) // - { + dockerImage = + let + pkgs = nixpkgsFor.${system}.native; + image = import ./docker.nix { + inherit pkgs; + tag = pkgs.nix.version; + }; + in + pkgs.runCommand "docker-image-tarball-${pkgs.nix.version}" + { meta.description = "Docker image with Nix for ${system}"; } + '' + mkdir -p $out/nix-support + image=$out/image.tar.gz + ln -s ${image} $image + echo "file binary-dist $image" >> $out/nix-support/hydra-build-products + ''; + } + ); + + devShells = + let + makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; }; + prefixAttrs = prefix: lib.concatMapAttrs (k: v: { "${prefix}-${k}" = v; }); + in + forAllSystems ( + system: + prefixAttrs "native" ( + forAllStdenvs ( + stdenvName: + makeShell { + pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages"; + } + ) + ) + // lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) ( + prefixAttrs "static" ( + forAllStdenvs ( + stdenvName: + makeShell { + pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsStatic; + } + ) + ) + // prefixAttrs "llvm" ( + forAllStdenvs ( + stdenvName: + makeShell { + pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsLLVM; + } + ) + ) + // prefixAttrs "cross" ( + forAllCrossSystems ( + crossSystem: + makeShell { + pkgs = nixpkgsFor.${system}.cross.${crossSystem}; + } + ) + ) + ) + // { default = self.devShells.${system}.native-stdenvPackages; } ); - }; + }; } diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix index fcf370b7145..9b2c6dcbf80 100644 --- a/maintainers/flake-module.nix +++ b/maintainers/flake-module.nix @@ -1,669 +1,676 @@ -{ lib, getSystem, inputs, ... }: +{ + lib, + getSystem, + inputs, + ... +}: { imports = [ inputs.git-hooks-nix.flakeModule ]; - perSystem = { config, pkgs, ... }: { + perSystem = + { config, pkgs, ... }: + { - # https://flake.parts/options/git-hooks-nix#options - pre-commit.settings = { - hooks = { - # Conflicts are usually found by other checks, but not those in docs, - # and potentially other places. - check-merge-conflicts.enable = true; - # built-in check-merge-conflicts seems ineffective against those produced by mergify backports - check-merge-conflicts-2 = { - enable = true; - entry = "${pkgs.writeScript "check-merge-conflicts" '' - #!${pkgs.runtimeShell} - conflicts=false - for file in "$@"; do - if grep --with-filename --line-number -E '^>>>>>>> ' -- "$file"; then - conflicts=true + # https://flake.parts/options/git-hooks-nix#options + pre-commit.settings = { + hooks = { + # Conflicts are usually found by other checks, but not those in docs, + # and potentially other places. + check-merge-conflicts.enable = true; + # built-in check-merge-conflicts seems ineffective against those produced by mergify backports + check-merge-conflicts-2 = { + enable = true; + entry = "${pkgs.writeScript "check-merge-conflicts" '' + #!${pkgs.runtimeShell} + conflicts=false + for file in "$@"; do + if grep --with-filename --line-number -E '^>>>>>>> ' -- "$file"; then + conflicts=true + fi + done + if $conflicts; then + echo "ERROR: found merge/patch conflicts in files" + exit 1 fi - done - if $conflicts; then - echo "ERROR: found merge/patch conflicts in files" - exit 1 - fi - touch $out - ''}"; - }; - clang-format = { - enable = true; - # https://github.com/cachix/git-hooks.nix/pull/532 - package = pkgs.llvmPackages_latest.clang-tools; - excludes = [ - # We don't want to format test data - # ''tests/(?!nixos/).*\.nix'' - ''^src/[^/]*-tests/data/.*$'' + touch $out + ''}"; + }; + clang-format = { + enable = true; + # https://github.com/cachix/git-hooks.nix/pull/532 + package = pkgs.llvmPackages_latest.clang-tools; + excludes = [ + # We don't want to format test data + # ''tests/(?!nixos/).*\.nix'' + ''^src/[^/]*-tests/data/.*$'' - # Don't format vendored code - ''^doc/manual/redirects\.js$'' - ''^doc/manual/theme/highlight\.js$'' + # Don't format vendored code + ''^doc/manual/redirects\.js$'' + ''^doc/manual/theme/highlight\.js$'' - # We haven't applied formatting to these files yet - ''^doc/manual/redirects\.js$'' - ''^doc/manual/theme/highlight\.js$'' - ''^precompiled-headers\.h$'' - ''^src/build-remote/build-remote\.cc$'' - ''^src/libcmd/built-path\.cc$'' - ''^src/libcmd/built-path\.hh$'' - ''^src/libcmd/common-eval-args\.cc$'' - ''^src/libcmd/common-eval-args\.hh$'' - ''^src/libcmd/editor-for\.cc$'' - ''^src/libcmd/installable-attr-path\.cc$'' - ''^src/libcmd/installable-attr-path\.hh$'' - ''^src/libcmd/installable-derived-path\.cc$'' - ''^src/libcmd/installable-derived-path\.hh$'' - ''^src/libcmd/installable-flake\.cc$'' - ''^src/libcmd/installable-flake\.hh$'' - ''^src/libcmd/installable-value\.cc$'' - ''^src/libcmd/installable-value\.hh$'' - ''^src/libcmd/installables\.cc$'' - ''^src/libcmd/installables\.hh$'' - ''^src/libcmd/legacy\.hh$'' - ''^src/libcmd/markdown\.cc$'' - ''^src/libcmd/misc-store-flags\.cc$'' - ''^src/libcmd/repl-interacter\.cc$'' - ''^src/libcmd/repl-interacter\.hh$'' - ''^src/libcmd/repl\.cc$'' - ''^src/libcmd/repl\.hh$'' - ''^src/libexpr-c/nix_api_expr\.cc$'' - ''^src/libexpr-c/nix_api_external\.cc$'' - ''^src/libexpr/attr-path\.cc$'' - ''^src/libexpr/attr-path\.hh$'' - ''^src/libexpr/attr-set\.cc$'' - ''^src/libexpr/attr-set\.hh$'' - ''^src/libexpr/eval-cache\.cc$'' - ''^src/libexpr/eval-cache\.hh$'' - ''^src/libexpr/eval-error\.cc$'' - ''^src/libexpr/eval-inline\.hh$'' - ''^src/libexpr/eval-settings\.cc$'' - ''^src/libexpr/eval-settings\.hh$'' - ''^src/libexpr/eval\.cc$'' - ''^src/libexpr/eval\.hh$'' - ''^src/libexpr/function-trace\.cc$'' - ''^src/libexpr/gc-small-vector\.hh$'' - ''^src/libexpr/get-drvs\.cc$'' - ''^src/libexpr/get-drvs\.hh$'' - ''^src/libexpr/json-to-value\.cc$'' - ''^src/libexpr/nixexpr\.cc$'' - ''^src/libexpr/nixexpr\.hh$'' - ''^src/libexpr/parser-state\.hh$'' - ''^src/libexpr/pos-table\.hh$'' - ''^src/libexpr/primops\.cc$'' - ''^src/libexpr/primops\.hh$'' - ''^src/libexpr/primops/context\.cc$'' - ''^src/libexpr/primops/fetchClosure\.cc$'' - ''^src/libexpr/primops/fetchMercurial\.cc$'' - ''^src/libexpr/primops/fetchTree\.cc$'' - ''^src/libexpr/primops/fromTOML\.cc$'' - ''^src/libexpr/print-ambiguous\.cc$'' - ''^src/libexpr/print-ambiguous\.hh$'' - ''^src/libexpr/print-options\.hh$'' - ''^src/libexpr/print\.cc$'' - ''^src/libexpr/print\.hh$'' - ''^src/libexpr/search-path\.cc$'' - ''^src/libexpr/symbol-table\.hh$'' - ''^src/libexpr/value-to-json\.cc$'' - ''^src/libexpr/value-to-json\.hh$'' - ''^src/libexpr/value-to-xml\.cc$'' - ''^src/libexpr/value-to-xml\.hh$'' - ''^src/libexpr/value\.hh$'' - ''^src/libexpr/value/context\.cc$'' - ''^src/libexpr/value/context\.hh$'' - ''^src/libfetchers/attrs\.cc$'' - ''^src/libfetchers/cache\.cc$'' - ''^src/libfetchers/cache\.hh$'' - ''^src/libfetchers/fetch-settings\.cc$'' - ''^src/libfetchers/fetch-settings\.hh$'' - ''^src/libfetchers/fetch-to-store\.cc$'' - ''^src/libfetchers/fetchers\.cc$'' - ''^src/libfetchers/fetchers\.hh$'' - ''^src/libfetchers/filtering-source-accessor\.cc$'' - ''^src/libfetchers/filtering-source-accessor\.hh$'' - ''^src/libfetchers/fs-source-accessor\.cc$'' - ''^src/libfetchers/fs-source-accessor\.hh$'' - ''^src/libfetchers/git-utils\.cc$'' - ''^src/libfetchers/git-utils\.hh$'' - ''^src/libfetchers/github\.cc$'' - ''^src/libfetchers/indirect\.cc$'' - ''^src/libfetchers/memory-source-accessor\.cc$'' - ''^src/libfetchers/path\.cc$'' - ''^src/libfetchers/registry\.cc$'' - ''^src/libfetchers/registry\.hh$'' - ''^src/libfetchers/tarball\.cc$'' - ''^src/libfetchers/tarball\.hh$'' - ''^src/libfetchers/git\.cc$'' - ''^src/libfetchers/mercurial\.cc$'' - ''^src/libflake/flake/config\.cc$'' - ''^src/libflake/flake/flake\.cc$'' - ''^src/libflake/flake/flake\.hh$'' - ''^src/libflake/flake/flakeref\.cc$'' - ''^src/libflake/flake/flakeref\.hh$'' - ''^src/libflake/flake/lockfile\.cc$'' - ''^src/libflake/flake/lockfile\.hh$'' - ''^src/libflake/flake/url-name\.cc$'' - ''^src/libmain/common-args\.cc$'' - ''^src/libmain/common-args\.hh$'' - ''^src/libmain/loggers\.cc$'' - ''^src/libmain/loggers\.hh$'' - ''^src/libmain/progress-bar\.cc$'' - ''^src/libmain/shared\.cc$'' - ''^src/libmain/shared\.hh$'' - ''^src/libmain/unix/stack\.cc$'' - ''^src/libstore/binary-cache-store\.cc$'' - ''^src/libstore/binary-cache-store\.hh$'' - ''^src/libstore/build-result\.hh$'' - ''^src/libstore/builtins\.hh$'' - ''^src/libstore/builtins/buildenv\.cc$'' - ''^src/libstore/builtins/buildenv\.hh$'' - ''^src/libstore/common-protocol-impl\.hh$'' - ''^src/libstore/common-protocol\.cc$'' - ''^src/libstore/common-protocol\.hh$'' - ''^src/libstore/common-ssh-store-config\.hh$'' - ''^src/libstore/content-address\.cc$'' - ''^src/libstore/content-address\.hh$'' - ''^src/libstore/daemon\.cc$'' - ''^src/libstore/daemon\.hh$'' - ''^src/libstore/derivations\.cc$'' - ''^src/libstore/derivations\.hh$'' - ''^src/libstore/derived-path-map\.cc$'' - ''^src/libstore/derived-path-map\.hh$'' - ''^src/libstore/derived-path\.cc$'' - ''^src/libstore/derived-path\.hh$'' - ''^src/libstore/downstream-placeholder\.cc$'' - ''^src/libstore/downstream-placeholder\.hh$'' - ''^src/libstore/dummy-store\.cc$'' - ''^src/libstore/export-import\.cc$'' - ''^src/libstore/filetransfer\.cc$'' - ''^src/libstore/filetransfer\.hh$'' - ''^src/libstore/gc-store\.hh$'' - ''^src/libstore/globals\.cc$'' - ''^src/libstore/globals\.hh$'' - ''^src/libstore/http-binary-cache-store\.cc$'' - ''^src/libstore/legacy-ssh-store\.cc$'' - ''^src/libstore/legacy-ssh-store\.hh$'' - ''^src/libstore/length-prefixed-protocol-helper\.hh$'' - ''^src/libstore/linux/personality\.cc$'' - ''^src/libstore/linux/personality\.hh$'' - ''^src/libstore/local-binary-cache-store\.cc$'' - ''^src/libstore/local-fs-store\.cc$'' - ''^src/libstore/local-fs-store\.hh$'' - ''^src/libstore/log-store\.cc$'' - ''^src/libstore/log-store\.hh$'' - ''^src/libstore/machines\.cc$'' - ''^src/libstore/machines\.hh$'' - ''^src/libstore/make-content-addressed\.cc$'' - ''^src/libstore/make-content-addressed\.hh$'' - ''^src/libstore/misc\.cc$'' - ''^src/libstore/names\.cc$'' - ''^src/libstore/names\.hh$'' - ''^src/libstore/nar-accessor\.cc$'' - ''^src/libstore/nar-accessor\.hh$'' - ''^src/libstore/nar-info-disk-cache\.cc$'' - ''^src/libstore/nar-info-disk-cache\.hh$'' - ''^src/libstore/nar-info\.cc$'' - ''^src/libstore/nar-info\.hh$'' - ''^src/libstore/outputs-spec\.cc$'' - ''^src/libstore/outputs-spec\.hh$'' - ''^src/libstore/parsed-derivations\.cc$'' - ''^src/libstore/path-info\.cc$'' - ''^src/libstore/path-info\.hh$'' - ''^src/libstore/path-references\.cc$'' - ''^src/libstore/path-regex\.hh$'' - ''^src/libstore/path-with-outputs\.cc$'' - ''^src/libstore/path\.cc$'' - ''^src/libstore/path\.hh$'' - ''^src/libstore/pathlocks\.cc$'' - ''^src/libstore/pathlocks\.hh$'' - ''^src/libstore/profiles\.cc$'' - ''^src/libstore/profiles\.hh$'' - ''^src/libstore/realisation\.cc$'' - ''^src/libstore/realisation\.hh$'' - ''^src/libstore/remote-fs-accessor\.cc$'' - ''^src/libstore/remote-fs-accessor\.hh$'' - ''^src/libstore/remote-store-connection\.hh$'' - ''^src/libstore/remote-store\.cc$'' - ''^src/libstore/remote-store\.hh$'' - ''^src/libstore/s3-binary-cache-store\.cc$'' - ''^src/libstore/s3\.hh$'' - ''^src/libstore/serve-protocol-impl\.cc$'' - ''^src/libstore/serve-protocol-impl\.hh$'' - ''^src/libstore/serve-protocol\.cc$'' - ''^src/libstore/serve-protocol\.hh$'' - ''^src/libstore/sqlite\.cc$'' - ''^src/libstore/sqlite\.hh$'' - ''^src/libstore/ssh-store\.cc$'' - ''^src/libstore/ssh\.cc$'' - ''^src/libstore/ssh\.hh$'' - ''^src/libstore/store-api\.cc$'' - ''^src/libstore/store-api\.hh$'' - ''^src/libstore/store-dir-config\.hh$'' - ''^src/libstore/build/derivation-goal\.cc$'' - ''^src/libstore/build/derivation-goal\.hh$'' - ''^src/libstore/build/drv-output-substitution-goal\.cc$'' - ''^src/libstore/build/drv-output-substitution-goal\.hh$'' - ''^src/libstore/build/entry-points\.cc$'' - ''^src/libstore/build/goal\.cc$'' - ''^src/libstore/build/goal\.hh$'' - ''^src/libstore/unix/build/hook-instance\.cc$'' - ''^src/libstore/unix/build/local-derivation-goal\.cc$'' - ''^src/libstore/unix/build/local-derivation-goal\.hh$'' - ''^src/libstore/build/substitution-goal\.cc$'' - ''^src/libstore/build/substitution-goal\.hh$'' - ''^src/libstore/build/worker\.cc$'' - ''^src/libstore/build/worker\.hh$'' - ''^src/libstore/builtins/fetchurl\.cc$'' - ''^src/libstore/builtins/unpack-channel\.cc$'' - ''^src/libstore/gc\.cc$'' - ''^src/libstore/local-overlay-store\.cc$'' - ''^src/libstore/local-overlay-store\.hh$'' - ''^src/libstore/local-store\.cc$'' - ''^src/libstore/local-store\.hh$'' - ''^src/libstore/unix/user-lock\.cc$'' - ''^src/libstore/unix/user-lock\.hh$'' - ''^src/libstore/optimise-store\.cc$'' - ''^src/libstore/unix/pathlocks\.cc$'' - ''^src/libstore/posix-fs-canonicalise\.cc$'' - ''^src/libstore/posix-fs-canonicalise\.hh$'' - ''^src/libstore/uds-remote-store\.cc$'' - ''^src/libstore/uds-remote-store\.hh$'' - ''^src/libstore/windows/build\.cc$'' - ''^src/libstore/worker-protocol-impl\.hh$'' - ''^src/libstore/worker-protocol\.cc$'' - ''^src/libstore/worker-protocol\.hh$'' - ''^src/libutil-c/nix_api_util_internal\.h$'' - ''^src/libutil/archive\.cc$'' - ''^src/libutil/archive\.hh$'' - ''^src/libutil/args\.cc$'' - ''^src/libutil/args\.hh$'' - ''^src/libutil/args/root\.hh$'' - ''^src/libutil/callback\.hh$'' - ''^src/libutil/canon-path\.cc$'' - ''^src/libutil/canon-path\.hh$'' - ''^src/libutil/chunked-vector\.hh$'' - ''^src/libutil/closure\.hh$'' - ''^src/libutil/comparator\.hh$'' - ''^src/libutil/compute-levels\.cc$'' - ''^src/libutil/config-impl\.hh$'' - ''^src/libutil/config\.cc$'' - ''^src/libutil/config\.hh$'' - ''^src/libutil/current-process\.cc$'' - ''^src/libutil/current-process\.hh$'' - ''^src/libutil/english\.cc$'' - ''^src/libutil/english\.hh$'' - ''^src/libutil/error\.cc$'' - ''^src/libutil/error\.hh$'' - ''^src/libutil/exit\.hh$'' - ''^src/libutil/experimental-features\.cc$'' - ''^src/libutil/experimental-features\.hh$'' - ''^src/libutil/file-content-address\.cc$'' - ''^src/libutil/file-content-address\.hh$'' - ''^src/libutil/file-descriptor\.cc$'' - ''^src/libutil/file-descriptor\.hh$'' - ''^src/libutil/file-path-impl\.hh$'' - ''^src/libutil/file-path\.hh$'' - ''^src/libutil/file-system\.cc$'' - ''^src/libutil/file-system\.hh$'' - ''^src/libutil/finally\.hh$'' - ''^src/libutil/fmt\.hh$'' - ''^src/libutil/fs-sink\.cc$'' - ''^src/libutil/fs-sink\.hh$'' - ''^src/libutil/git\.cc$'' - ''^src/libutil/git\.hh$'' - ''^src/libutil/hash\.cc$'' - ''^src/libutil/hash\.hh$'' - ''^src/libutil/hilite\.cc$'' - ''^src/libutil/hilite\.hh$'' - ''^src/libutil/source-accessor\.hh$'' - ''^src/libutil/json-impls\.hh$'' - ''^src/libutil/json-utils\.cc$'' - ''^src/libutil/json-utils\.hh$'' - ''^src/libutil/linux/cgroup\.cc$'' - ''^src/libutil/linux/namespaces\.cc$'' - ''^src/libutil/logging\.cc$'' - ''^src/libutil/logging\.hh$'' - ''^src/libutil/lru-cache\.hh$'' - ''^src/libutil/memory-source-accessor\.cc$'' - ''^src/libutil/memory-source-accessor\.hh$'' - ''^src/libutil/pool\.hh$'' - ''^src/libutil/position\.cc$'' - ''^src/libutil/position\.hh$'' - ''^src/libutil/posix-source-accessor\.cc$'' - ''^src/libutil/posix-source-accessor\.hh$'' - ''^src/libutil/processes\.hh$'' - ''^src/libutil/ref\.hh$'' - ''^src/libutil/references\.cc$'' - ''^src/libutil/references\.hh$'' - ''^src/libutil/regex-combinators\.hh$'' - ''^src/libutil/serialise\.cc$'' - ''^src/libutil/serialise\.hh$'' - ''^src/libutil/signals\.hh$'' - ''^src/libutil/signature/local-keys\.cc$'' - ''^src/libutil/signature/local-keys\.hh$'' - ''^src/libutil/signature/signer\.cc$'' - ''^src/libutil/signature/signer\.hh$'' - ''^src/libutil/source-accessor\.cc$'' - ''^src/libutil/source-accessor\.hh$'' - ''^src/libutil/source-path\.cc$'' - ''^src/libutil/source-path\.hh$'' - ''^src/libutil/split\.hh$'' - ''^src/libutil/suggestions\.cc$'' - ''^src/libutil/suggestions\.hh$'' - ''^src/libutil/sync\.hh$'' - ''^src/libutil/terminal\.cc$'' - ''^src/libutil/terminal\.hh$'' - ''^src/libutil/thread-pool\.cc$'' - ''^src/libutil/thread-pool\.hh$'' - ''^src/libutil/topo-sort\.hh$'' - ''^src/libutil/types\.hh$'' - ''^src/libutil/unix/file-descriptor\.cc$'' - ''^src/libutil/unix/file-path\.cc$'' - ''^src/libutil/unix/monitor-fd\.hh$'' - ''^src/libutil/unix/processes\.cc$'' - ''^src/libutil/unix/signals-impl\.hh$'' - ''^src/libutil/unix/signals\.cc$'' - ''^src/libutil/unix-domain-socket\.cc$'' - ''^src/libutil/unix/users\.cc$'' - ''^src/libutil/url-parts\.hh$'' - ''^src/libutil/url\.cc$'' - ''^src/libutil/url\.hh$'' - ''^src/libutil/users\.cc$'' - ''^src/libutil/users\.hh$'' - ''^src/libutil/util\.cc$'' - ''^src/libutil/util\.hh$'' - ''^src/libutil/variant-wrapper\.hh$'' - ''^src/libutil/widecharwidth/widechar_width\.h$'' # vendored source - ''^src/libutil/windows/file-descriptor\.cc$'' - ''^src/libutil/windows/file-path\.cc$'' - ''^src/libutil/windows/processes\.cc$'' - ''^src/libutil/windows/users\.cc$'' - ''^src/libutil/windows/windows-error\.cc$'' - ''^src/libutil/windows/windows-error\.hh$'' - ''^src/libutil/xml-writer\.cc$'' - ''^src/libutil/xml-writer\.hh$'' - ''^src/nix-build/nix-build\.cc$'' - ''^src/nix-channel/nix-channel\.cc$'' - ''^src/nix-collect-garbage/nix-collect-garbage\.cc$'' - ''^src/nix-env/buildenv.nix$'' - ''^src/nix-env/nix-env\.cc$'' - ''^src/nix-env/user-env\.cc$'' - ''^src/nix-env/user-env\.hh$'' - ''^src/nix-instantiate/nix-instantiate\.cc$'' - ''^src/nix-store/dotgraph\.cc$'' - ''^src/nix-store/graphml\.cc$'' - ''^src/nix-store/nix-store\.cc$'' - ''^src/nix/add-to-store\.cc$'' - ''^src/nix/app\.cc$'' - ''^src/nix/build\.cc$'' - ''^src/nix/bundle\.cc$'' - ''^src/nix/cat\.cc$'' - ''^src/nix/config-check\.cc$'' - ''^src/nix/config\.cc$'' - ''^src/nix/copy\.cc$'' - ''^src/nix/derivation-add\.cc$'' - ''^src/nix/derivation-show\.cc$'' - ''^src/nix/derivation\.cc$'' - ''^src/nix/develop\.cc$'' - ''^src/nix/diff-closures\.cc$'' - ''^src/nix/dump-path\.cc$'' - ''^src/nix/edit\.cc$'' - ''^src/nix/eval\.cc$'' - ''^src/nix/flake\.cc$'' - ''^src/nix/fmt\.cc$'' - ''^src/nix/hash\.cc$'' - ''^src/nix/log\.cc$'' - ''^src/nix/ls\.cc$'' - ''^src/nix/main\.cc$'' - ''^src/nix/make-content-addressed\.cc$'' - ''^src/nix/nar\.cc$'' - ''^src/nix/optimise-store\.cc$'' - ''^src/nix/path-from-hash-part\.cc$'' - ''^src/nix/path-info\.cc$'' - ''^src/nix/prefetch\.cc$'' - ''^src/nix/profile\.cc$'' - ''^src/nix/realisation\.cc$'' - ''^src/nix/registry\.cc$'' - ''^src/nix/repl\.cc$'' - ''^src/nix/run\.cc$'' - ''^src/nix/run\.hh$'' - ''^src/nix/search\.cc$'' - ''^src/nix/sigs\.cc$'' - ''^src/nix/store-copy-log\.cc$'' - ''^src/nix/store-delete\.cc$'' - ''^src/nix/store-gc\.cc$'' - ''^src/nix/store-info\.cc$'' - ''^src/nix/store-repair\.cc$'' - ''^src/nix/store\.cc$'' - ''^src/nix/unix/daemon\.cc$'' - ''^src/nix/upgrade-nix\.cc$'' - ''^src/nix/verify\.cc$'' - ''^src/nix/why-depends\.cc$'' + # We haven't applied formatting to these files yet + ''^doc/manual/redirects\.js$'' + ''^doc/manual/theme/highlight\.js$'' + ''^precompiled-headers\.h$'' + ''^src/build-remote/build-remote\.cc$'' + ''^src/libcmd/built-path\.cc$'' + ''^src/libcmd/built-path\.hh$'' + ''^src/libcmd/common-eval-args\.cc$'' + ''^src/libcmd/common-eval-args\.hh$'' + ''^src/libcmd/editor-for\.cc$'' + ''^src/libcmd/installable-attr-path\.cc$'' + ''^src/libcmd/installable-attr-path\.hh$'' + ''^src/libcmd/installable-derived-path\.cc$'' + ''^src/libcmd/installable-derived-path\.hh$'' + ''^src/libcmd/installable-flake\.cc$'' + ''^src/libcmd/installable-flake\.hh$'' + ''^src/libcmd/installable-value\.cc$'' + ''^src/libcmd/installable-value\.hh$'' + ''^src/libcmd/installables\.cc$'' + ''^src/libcmd/installables\.hh$'' + ''^src/libcmd/legacy\.hh$'' + ''^src/libcmd/markdown\.cc$'' + ''^src/libcmd/misc-store-flags\.cc$'' + ''^src/libcmd/repl-interacter\.cc$'' + ''^src/libcmd/repl-interacter\.hh$'' + ''^src/libcmd/repl\.cc$'' + ''^src/libcmd/repl\.hh$'' + ''^src/libexpr-c/nix_api_expr\.cc$'' + ''^src/libexpr-c/nix_api_external\.cc$'' + ''^src/libexpr/attr-path\.cc$'' + ''^src/libexpr/attr-path\.hh$'' + ''^src/libexpr/attr-set\.cc$'' + ''^src/libexpr/attr-set\.hh$'' + ''^src/libexpr/eval-cache\.cc$'' + ''^src/libexpr/eval-cache\.hh$'' + ''^src/libexpr/eval-error\.cc$'' + ''^src/libexpr/eval-inline\.hh$'' + ''^src/libexpr/eval-settings\.cc$'' + ''^src/libexpr/eval-settings\.hh$'' + ''^src/libexpr/eval\.cc$'' + ''^src/libexpr/eval\.hh$'' + ''^src/libexpr/function-trace\.cc$'' + ''^src/libexpr/gc-small-vector\.hh$'' + ''^src/libexpr/get-drvs\.cc$'' + ''^src/libexpr/get-drvs\.hh$'' + ''^src/libexpr/json-to-value\.cc$'' + ''^src/libexpr/nixexpr\.cc$'' + ''^src/libexpr/nixexpr\.hh$'' + ''^src/libexpr/parser-state\.hh$'' + ''^src/libexpr/pos-table\.hh$'' + ''^src/libexpr/primops\.cc$'' + ''^src/libexpr/primops\.hh$'' + ''^src/libexpr/primops/context\.cc$'' + ''^src/libexpr/primops/fetchClosure\.cc$'' + ''^src/libexpr/primops/fetchMercurial\.cc$'' + ''^src/libexpr/primops/fetchTree\.cc$'' + ''^src/libexpr/primops/fromTOML\.cc$'' + ''^src/libexpr/print-ambiguous\.cc$'' + ''^src/libexpr/print-ambiguous\.hh$'' + ''^src/libexpr/print-options\.hh$'' + ''^src/libexpr/print\.cc$'' + ''^src/libexpr/print\.hh$'' + ''^src/libexpr/search-path\.cc$'' + ''^src/libexpr/symbol-table\.hh$'' + ''^src/libexpr/value-to-json\.cc$'' + ''^src/libexpr/value-to-json\.hh$'' + ''^src/libexpr/value-to-xml\.cc$'' + ''^src/libexpr/value-to-xml\.hh$'' + ''^src/libexpr/value\.hh$'' + ''^src/libexpr/value/context\.cc$'' + ''^src/libexpr/value/context\.hh$'' + ''^src/libfetchers/attrs\.cc$'' + ''^src/libfetchers/cache\.cc$'' + ''^src/libfetchers/cache\.hh$'' + ''^src/libfetchers/fetch-settings\.cc$'' + ''^src/libfetchers/fetch-settings\.hh$'' + ''^src/libfetchers/fetch-to-store\.cc$'' + ''^src/libfetchers/fetchers\.cc$'' + ''^src/libfetchers/fetchers\.hh$'' + ''^src/libfetchers/filtering-source-accessor\.cc$'' + ''^src/libfetchers/filtering-source-accessor\.hh$'' + ''^src/libfetchers/fs-source-accessor\.cc$'' + ''^src/libfetchers/fs-source-accessor\.hh$'' + ''^src/libfetchers/git-utils\.cc$'' + ''^src/libfetchers/git-utils\.hh$'' + ''^src/libfetchers/github\.cc$'' + ''^src/libfetchers/indirect\.cc$'' + ''^src/libfetchers/memory-source-accessor\.cc$'' + ''^src/libfetchers/path\.cc$'' + ''^src/libfetchers/registry\.cc$'' + ''^src/libfetchers/registry\.hh$'' + ''^src/libfetchers/tarball\.cc$'' + ''^src/libfetchers/tarball\.hh$'' + ''^src/libfetchers/git\.cc$'' + ''^src/libfetchers/mercurial\.cc$'' + ''^src/libflake/flake/config\.cc$'' + ''^src/libflake/flake/flake\.cc$'' + ''^src/libflake/flake/flake\.hh$'' + ''^src/libflake/flake/flakeref\.cc$'' + ''^src/libflake/flake/flakeref\.hh$'' + ''^src/libflake/flake/lockfile\.cc$'' + ''^src/libflake/flake/lockfile\.hh$'' + ''^src/libflake/flake/url-name\.cc$'' + ''^src/libmain/common-args\.cc$'' + ''^src/libmain/common-args\.hh$'' + ''^src/libmain/loggers\.cc$'' + ''^src/libmain/loggers\.hh$'' + ''^src/libmain/progress-bar\.cc$'' + ''^src/libmain/shared\.cc$'' + ''^src/libmain/shared\.hh$'' + ''^src/libmain/unix/stack\.cc$'' + ''^src/libstore/binary-cache-store\.cc$'' + ''^src/libstore/binary-cache-store\.hh$'' + ''^src/libstore/build-result\.hh$'' + ''^src/libstore/builtins\.hh$'' + ''^src/libstore/builtins/buildenv\.cc$'' + ''^src/libstore/builtins/buildenv\.hh$'' + ''^src/libstore/common-protocol-impl\.hh$'' + ''^src/libstore/common-protocol\.cc$'' + ''^src/libstore/common-protocol\.hh$'' + ''^src/libstore/common-ssh-store-config\.hh$'' + ''^src/libstore/content-address\.cc$'' + ''^src/libstore/content-address\.hh$'' + ''^src/libstore/daemon\.cc$'' + ''^src/libstore/daemon\.hh$'' + ''^src/libstore/derivations\.cc$'' + ''^src/libstore/derivations\.hh$'' + ''^src/libstore/derived-path-map\.cc$'' + ''^src/libstore/derived-path-map\.hh$'' + ''^src/libstore/derived-path\.cc$'' + ''^src/libstore/derived-path\.hh$'' + ''^src/libstore/downstream-placeholder\.cc$'' + ''^src/libstore/downstream-placeholder\.hh$'' + ''^src/libstore/dummy-store\.cc$'' + ''^src/libstore/export-import\.cc$'' + ''^src/libstore/filetransfer\.cc$'' + ''^src/libstore/filetransfer\.hh$'' + ''^src/libstore/gc-store\.hh$'' + ''^src/libstore/globals\.cc$'' + ''^src/libstore/globals\.hh$'' + ''^src/libstore/http-binary-cache-store\.cc$'' + ''^src/libstore/legacy-ssh-store\.cc$'' + ''^src/libstore/legacy-ssh-store\.hh$'' + ''^src/libstore/length-prefixed-protocol-helper\.hh$'' + ''^src/libstore/linux/personality\.cc$'' + ''^src/libstore/linux/personality\.hh$'' + ''^src/libstore/local-binary-cache-store\.cc$'' + ''^src/libstore/local-fs-store\.cc$'' + ''^src/libstore/local-fs-store\.hh$'' + ''^src/libstore/log-store\.cc$'' + ''^src/libstore/log-store\.hh$'' + ''^src/libstore/machines\.cc$'' + ''^src/libstore/machines\.hh$'' + ''^src/libstore/make-content-addressed\.cc$'' + ''^src/libstore/make-content-addressed\.hh$'' + ''^src/libstore/misc\.cc$'' + ''^src/libstore/names\.cc$'' + ''^src/libstore/names\.hh$'' + ''^src/libstore/nar-accessor\.cc$'' + ''^src/libstore/nar-accessor\.hh$'' + ''^src/libstore/nar-info-disk-cache\.cc$'' + ''^src/libstore/nar-info-disk-cache\.hh$'' + ''^src/libstore/nar-info\.cc$'' + ''^src/libstore/nar-info\.hh$'' + ''^src/libstore/outputs-spec\.cc$'' + ''^src/libstore/outputs-spec\.hh$'' + ''^src/libstore/parsed-derivations\.cc$'' + ''^src/libstore/path-info\.cc$'' + ''^src/libstore/path-info\.hh$'' + ''^src/libstore/path-references\.cc$'' + ''^src/libstore/path-regex\.hh$'' + ''^src/libstore/path-with-outputs\.cc$'' + ''^src/libstore/path\.cc$'' + ''^src/libstore/path\.hh$'' + ''^src/libstore/pathlocks\.cc$'' + ''^src/libstore/pathlocks\.hh$'' + ''^src/libstore/profiles\.cc$'' + ''^src/libstore/profiles\.hh$'' + ''^src/libstore/realisation\.cc$'' + ''^src/libstore/realisation\.hh$'' + ''^src/libstore/remote-fs-accessor\.cc$'' + ''^src/libstore/remote-fs-accessor\.hh$'' + ''^src/libstore/remote-store-connection\.hh$'' + ''^src/libstore/remote-store\.cc$'' + ''^src/libstore/remote-store\.hh$'' + ''^src/libstore/s3-binary-cache-store\.cc$'' + ''^src/libstore/s3\.hh$'' + ''^src/libstore/serve-protocol-impl\.cc$'' + ''^src/libstore/serve-protocol-impl\.hh$'' + ''^src/libstore/serve-protocol\.cc$'' + ''^src/libstore/serve-protocol\.hh$'' + ''^src/libstore/sqlite\.cc$'' + ''^src/libstore/sqlite\.hh$'' + ''^src/libstore/ssh-store\.cc$'' + ''^src/libstore/ssh\.cc$'' + ''^src/libstore/ssh\.hh$'' + ''^src/libstore/store-api\.cc$'' + ''^src/libstore/store-api\.hh$'' + ''^src/libstore/store-dir-config\.hh$'' + ''^src/libstore/build/derivation-goal\.cc$'' + ''^src/libstore/build/derivation-goal\.hh$'' + ''^src/libstore/build/drv-output-substitution-goal\.cc$'' + ''^src/libstore/build/drv-output-substitution-goal\.hh$'' + ''^src/libstore/build/entry-points\.cc$'' + ''^src/libstore/build/goal\.cc$'' + ''^src/libstore/build/goal\.hh$'' + ''^src/libstore/unix/build/hook-instance\.cc$'' + ''^src/libstore/unix/build/local-derivation-goal\.cc$'' + ''^src/libstore/unix/build/local-derivation-goal\.hh$'' + ''^src/libstore/build/substitution-goal\.cc$'' + ''^src/libstore/build/substitution-goal\.hh$'' + ''^src/libstore/build/worker\.cc$'' + ''^src/libstore/build/worker\.hh$'' + ''^src/libstore/builtins/fetchurl\.cc$'' + ''^src/libstore/builtins/unpack-channel\.cc$'' + ''^src/libstore/gc\.cc$'' + ''^src/libstore/local-overlay-store\.cc$'' + ''^src/libstore/local-overlay-store\.hh$'' + ''^src/libstore/local-store\.cc$'' + ''^src/libstore/local-store\.hh$'' + ''^src/libstore/unix/user-lock\.cc$'' + ''^src/libstore/unix/user-lock\.hh$'' + ''^src/libstore/optimise-store\.cc$'' + ''^src/libstore/unix/pathlocks\.cc$'' + ''^src/libstore/posix-fs-canonicalise\.cc$'' + ''^src/libstore/posix-fs-canonicalise\.hh$'' + ''^src/libstore/uds-remote-store\.cc$'' + ''^src/libstore/uds-remote-store\.hh$'' + ''^src/libstore/windows/build\.cc$'' + ''^src/libstore/worker-protocol-impl\.hh$'' + ''^src/libstore/worker-protocol\.cc$'' + ''^src/libstore/worker-protocol\.hh$'' + ''^src/libutil-c/nix_api_util_internal\.h$'' + ''^src/libutil/archive\.cc$'' + ''^src/libutil/archive\.hh$'' + ''^src/libutil/args\.cc$'' + ''^src/libutil/args\.hh$'' + ''^src/libutil/args/root\.hh$'' + ''^src/libutil/callback\.hh$'' + ''^src/libutil/canon-path\.cc$'' + ''^src/libutil/canon-path\.hh$'' + ''^src/libutil/chunked-vector\.hh$'' + ''^src/libutil/closure\.hh$'' + ''^src/libutil/comparator\.hh$'' + ''^src/libutil/compute-levels\.cc$'' + ''^src/libutil/config-impl\.hh$'' + ''^src/libutil/config\.cc$'' + ''^src/libutil/config\.hh$'' + ''^src/libutil/current-process\.cc$'' + ''^src/libutil/current-process\.hh$'' + ''^src/libutil/english\.cc$'' + ''^src/libutil/english\.hh$'' + ''^src/libutil/error\.cc$'' + ''^src/libutil/error\.hh$'' + ''^src/libutil/exit\.hh$'' + ''^src/libutil/experimental-features\.cc$'' + ''^src/libutil/experimental-features\.hh$'' + ''^src/libutil/file-content-address\.cc$'' + ''^src/libutil/file-content-address\.hh$'' + ''^src/libutil/file-descriptor\.cc$'' + ''^src/libutil/file-descriptor\.hh$'' + ''^src/libutil/file-path-impl\.hh$'' + ''^src/libutil/file-path\.hh$'' + ''^src/libutil/file-system\.cc$'' + ''^src/libutil/file-system\.hh$'' + ''^src/libutil/finally\.hh$'' + ''^src/libutil/fmt\.hh$'' + ''^src/libutil/fs-sink\.cc$'' + ''^src/libutil/fs-sink\.hh$'' + ''^src/libutil/git\.cc$'' + ''^src/libutil/git\.hh$'' + ''^src/libutil/hash\.cc$'' + ''^src/libutil/hash\.hh$'' + ''^src/libutil/hilite\.cc$'' + ''^src/libutil/hilite\.hh$'' + ''^src/libutil/source-accessor\.hh$'' + ''^src/libutil/json-impls\.hh$'' + ''^src/libutil/json-utils\.cc$'' + ''^src/libutil/json-utils\.hh$'' + ''^src/libutil/linux/cgroup\.cc$'' + ''^src/libutil/linux/namespaces\.cc$'' + ''^src/libutil/logging\.cc$'' + ''^src/libutil/logging\.hh$'' + ''^src/libutil/lru-cache\.hh$'' + ''^src/libutil/memory-source-accessor\.cc$'' + ''^src/libutil/memory-source-accessor\.hh$'' + ''^src/libutil/pool\.hh$'' + ''^src/libutil/position\.cc$'' + ''^src/libutil/position\.hh$'' + ''^src/libutil/posix-source-accessor\.cc$'' + ''^src/libutil/posix-source-accessor\.hh$'' + ''^src/libutil/processes\.hh$'' + ''^src/libutil/ref\.hh$'' + ''^src/libutil/references\.cc$'' + ''^src/libutil/references\.hh$'' + ''^src/libutil/regex-combinators\.hh$'' + ''^src/libutil/serialise\.cc$'' + ''^src/libutil/serialise\.hh$'' + ''^src/libutil/signals\.hh$'' + ''^src/libutil/signature/local-keys\.cc$'' + ''^src/libutil/signature/local-keys\.hh$'' + ''^src/libutil/signature/signer\.cc$'' + ''^src/libutil/signature/signer\.hh$'' + ''^src/libutil/source-accessor\.cc$'' + ''^src/libutil/source-accessor\.hh$'' + ''^src/libutil/source-path\.cc$'' + ''^src/libutil/source-path\.hh$'' + ''^src/libutil/split\.hh$'' + ''^src/libutil/suggestions\.cc$'' + ''^src/libutil/suggestions\.hh$'' + ''^src/libutil/sync\.hh$'' + ''^src/libutil/terminal\.cc$'' + ''^src/libutil/terminal\.hh$'' + ''^src/libutil/thread-pool\.cc$'' + ''^src/libutil/thread-pool\.hh$'' + ''^src/libutil/topo-sort\.hh$'' + ''^src/libutil/types\.hh$'' + ''^src/libutil/unix/file-descriptor\.cc$'' + ''^src/libutil/unix/file-path\.cc$'' + ''^src/libutil/unix/monitor-fd\.hh$'' + ''^src/libutil/unix/processes\.cc$'' + ''^src/libutil/unix/signals-impl\.hh$'' + ''^src/libutil/unix/signals\.cc$'' + ''^src/libutil/unix-domain-socket\.cc$'' + ''^src/libutil/unix/users\.cc$'' + ''^src/libutil/url-parts\.hh$'' + ''^src/libutil/url\.cc$'' + ''^src/libutil/url\.hh$'' + ''^src/libutil/users\.cc$'' + ''^src/libutil/users\.hh$'' + ''^src/libutil/util\.cc$'' + ''^src/libutil/util\.hh$'' + ''^src/libutil/variant-wrapper\.hh$'' + ''^src/libutil/widecharwidth/widechar_width\.h$'' # vendored source + ''^src/libutil/windows/file-descriptor\.cc$'' + ''^src/libutil/windows/file-path\.cc$'' + ''^src/libutil/windows/processes\.cc$'' + ''^src/libutil/windows/users\.cc$'' + ''^src/libutil/windows/windows-error\.cc$'' + ''^src/libutil/windows/windows-error\.hh$'' + ''^src/libutil/xml-writer\.cc$'' + ''^src/libutil/xml-writer\.hh$'' + ''^src/nix-build/nix-build\.cc$'' + ''^src/nix-channel/nix-channel\.cc$'' + ''^src/nix-collect-garbage/nix-collect-garbage\.cc$'' + ''^src/nix-env/buildenv.nix$'' + ''^src/nix-env/nix-env\.cc$'' + ''^src/nix-env/user-env\.cc$'' + ''^src/nix-env/user-env\.hh$'' + ''^src/nix-instantiate/nix-instantiate\.cc$'' + ''^src/nix-store/dotgraph\.cc$'' + ''^src/nix-store/graphml\.cc$'' + ''^src/nix-store/nix-store\.cc$'' + ''^src/nix/add-to-store\.cc$'' + ''^src/nix/app\.cc$'' + ''^src/nix/build\.cc$'' + ''^src/nix/bundle\.cc$'' + ''^src/nix/cat\.cc$'' + ''^src/nix/config-check\.cc$'' + ''^src/nix/config\.cc$'' + ''^src/nix/copy\.cc$'' + ''^src/nix/derivation-add\.cc$'' + ''^src/nix/derivation-show\.cc$'' + ''^src/nix/derivation\.cc$'' + ''^src/nix/develop\.cc$'' + ''^src/nix/diff-closures\.cc$'' + ''^src/nix/dump-path\.cc$'' + ''^src/nix/edit\.cc$'' + ''^src/nix/eval\.cc$'' + ''^src/nix/flake\.cc$'' + ''^src/nix/fmt\.cc$'' + ''^src/nix/hash\.cc$'' + ''^src/nix/log\.cc$'' + ''^src/nix/ls\.cc$'' + ''^src/nix/main\.cc$'' + ''^src/nix/make-content-addressed\.cc$'' + ''^src/nix/nar\.cc$'' + ''^src/nix/optimise-store\.cc$'' + ''^src/nix/path-from-hash-part\.cc$'' + ''^src/nix/path-info\.cc$'' + ''^src/nix/prefetch\.cc$'' + ''^src/nix/profile\.cc$'' + ''^src/nix/realisation\.cc$'' + ''^src/nix/registry\.cc$'' + ''^src/nix/repl\.cc$'' + ''^src/nix/run\.cc$'' + ''^src/nix/run\.hh$'' + ''^src/nix/search\.cc$'' + ''^src/nix/sigs\.cc$'' + ''^src/nix/store-copy-log\.cc$'' + ''^src/nix/store-delete\.cc$'' + ''^src/nix/store-gc\.cc$'' + ''^src/nix/store-info\.cc$'' + ''^src/nix/store-repair\.cc$'' + ''^src/nix/store\.cc$'' + ''^src/nix/unix/daemon\.cc$'' + ''^src/nix/upgrade-nix\.cc$'' + ''^src/nix/verify\.cc$'' + ''^src/nix/why-depends\.cc$'' - ''^tests/functional/plugins/plugintest\.cc'' - ''^tests/functional/test-libstoreconsumer/main\.cc'' - ''^tests/nixos/ca-fd-leak/sender\.c'' - ''^tests/nixos/ca-fd-leak/smuggler\.c'' - ''^tests/nixos/user-sandboxing/attacker\.c'' - ''^src/libexpr-test-support/tests/libexpr\.hh'' - ''^src/libexpr-test-support/tests/value/context\.cc'' - ''^src/libexpr-test-support/tests/value/context\.hh'' - ''^src/libexpr-tests/derived-path\.cc'' - ''^src/libexpr-tests/error_traces\.cc'' - ''^src/libexpr-tests/eval\.cc'' - ''^src/libexpr-tests/json\.cc'' - ''^src/libexpr-tests/main\.cc'' - ''^src/libexpr-tests/primops\.cc'' - ''^src/libexpr-tests/search-path\.cc'' - ''^src/libexpr-tests/trivial\.cc'' - ''^src/libexpr-tests/value/context\.cc'' - ''^src/libexpr-tests/value/print\.cc'' - ''^src/libfetchers-tests/public-key\.cc'' - ''^src/libflake-tests/flakeref\.cc'' - ''^src/libflake-tests/url-name\.cc'' - ''^src/libstore-test-support/tests/derived-path\.cc'' - ''^src/libstore-test-support/tests/derived-path\.hh'' - ''^src/libstore-test-support/tests/nix_api_store\.hh'' - ''^src/libstore-test-support/tests/outputs-spec\.cc'' - ''^src/libstore-test-support/tests/outputs-spec\.hh'' - ''^src/libstore-test-support/tests/path\.cc'' - ''^src/libstore-test-support/tests/path\.hh'' - ''^src/libstore-test-support/tests/protocol\.hh'' - ''^src/libstore-tests/common-protocol\.cc'' - ''^src/libstore-tests/content-address\.cc'' - ''^src/libstore-tests/derivation\.cc'' - ''^src/libstore-tests/derived-path\.cc'' - ''^src/libstore-tests/downstream-placeholder\.cc'' - ''^src/libstore-tests/machines\.cc'' - ''^src/libstore-tests/nar-info-disk-cache\.cc'' - ''^src/libstore-tests/nar-info\.cc'' - ''^src/libstore-tests/outputs-spec\.cc'' - ''^src/libstore-tests/path-info\.cc'' - ''^src/libstore-tests/path\.cc'' - ''^src/libstore-tests/serve-protocol\.cc'' - ''^src/libstore-tests/worker-protocol\.cc'' - ''^src/libutil-test-support/tests/characterization\.hh'' - ''^src/libutil-test-support/tests/hash\.cc'' - ''^src/libutil-test-support/tests/hash\.hh'' - ''^src/libutil-tests/args\.cc'' - ''^src/libutil-tests/canon-path\.cc'' - ''^src/libutil-tests/chunked-vector\.cc'' - ''^src/libutil-tests/closure\.cc'' - ''^src/libutil-tests/compression\.cc'' - ''^src/libutil-tests/config\.cc'' - ''^src/libutil-tests/file-content-address\.cc'' - ''^src/libutil-tests/git\.cc'' - ''^src/libutil-tests/hash\.cc'' - ''^src/libutil-tests/hilite\.cc'' - ''^src/libutil-tests/json-utils\.cc'' - ''^src/libutil-tests/logging\.cc'' - ''^src/libutil-tests/lru-cache\.cc'' - ''^src/libutil-tests/pool\.cc'' - ''^src/libutil-tests/references\.cc'' - ''^src/libutil-tests/suggestions\.cc'' - ''^src/libutil-tests/url\.cc'' - ''^src/libutil-tests/xml-writer\.cc'' - ]; - }; - shellcheck = { - enable = true; - excludes = [ - # We haven't linted these files yet - ''^config/install-sh$'' - ''^misc/bash/completion\.sh$'' - ''^misc/fish/completion\.fish$'' - ''^misc/zsh/completion\.zsh$'' - ''^scripts/create-darwin-volume\.sh$'' - ''^scripts/install-darwin-multi-user\.sh$'' - ''^scripts/install-multi-user\.sh$'' - ''^scripts/install-systemd-multi-user\.sh$'' - ''^src/nix/get-env\.sh$'' - ''^tests/functional/ca/build-dry\.sh$'' - ''^tests/functional/ca/build-with-garbage-path\.sh$'' - ''^tests/functional/ca/common\.sh$'' - ''^tests/functional/ca/concurrent-builds\.sh$'' - ''^tests/functional/ca/eval-store\.sh$'' - ''^tests/functional/ca/gc\.sh$'' - ''^tests/functional/ca/import-from-derivation\.sh$'' - ''^tests/functional/ca/new-build-cmd\.sh$'' - ''^tests/functional/ca/nix-shell\.sh$'' - ''^tests/functional/ca/post-hook\.sh$'' - ''^tests/functional/ca/recursive\.sh$'' - ''^tests/functional/ca/repl\.sh$'' - ''^tests/functional/ca/selfref-gc\.sh$'' - ''^tests/functional/ca/why-depends\.sh$'' - ''^tests/functional/characterisation-test-infra\.sh$'' - ''^tests/functional/common/vars-and-functions\.sh$'' - ''^tests/functional/completions\.sh$'' - ''^tests/functional/compute-levels\.sh$'' - ''^tests/functional/config\.sh$'' - ''^tests/functional/db-migration\.sh$'' - ''^tests/functional/debugger\.sh$'' - ''^tests/functional/dependencies\.builder0\.sh$'' - ''^tests/functional/dependencies\.sh$'' - ''^tests/functional/dump-db\.sh$'' - ''^tests/functional/dyn-drv/build-built-drv\.sh$'' - ''^tests/functional/dyn-drv/common\.sh$'' - ''^tests/functional/dyn-drv/dep-built-drv\.sh$'' - ''^tests/functional/dyn-drv/eval-outputOf\.sh$'' - ''^tests/functional/dyn-drv/old-daemon-error-hack\.sh$'' - ''^tests/functional/dyn-drv/recursive-mod-json\.sh$'' - ''^tests/functional/eval-store\.sh$'' - ''^tests/functional/export-graph\.sh$'' - ''^tests/functional/export\.sh$'' - ''^tests/functional/extra-sandbox-profile\.sh$'' - ''^tests/functional/fetchClosure\.sh$'' - ''^tests/functional/fetchGit\.sh$'' - ''^tests/functional/fetchGitRefs\.sh$'' - ''^tests/functional/fetchGitSubmodules\.sh$'' - ''^tests/functional/fetchGitVerification\.sh$'' - ''^tests/functional/fetchMercurial\.sh$'' - ''^tests/functional/fixed\.builder1\.sh$'' - ''^tests/functional/fixed\.builder2\.sh$'' - ''^tests/functional/fixed\.sh$'' - ''^tests/functional/flakes/absolute-paths\.sh$'' - ''^tests/functional/flakes/check\.sh$'' - ''^tests/functional/flakes/config\.sh$'' - ''^tests/functional/flakes/flakes\.sh$'' - ''^tests/functional/flakes/follow-paths\.sh$'' - ''^tests/functional/flakes/prefetch\.sh$'' - ''^tests/functional/flakes/run\.sh$'' - ''^tests/functional/flakes/show\.sh$'' - ''^tests/functional/fmt\.sh$'' - ''^tests/functional/fmt\.simple\.sh$'' - ''^tests/functional/gc-auto\.sh$'' - ''^tests/functional/gc-concurrent\.builder\.sh$'' - ''^tests/functional/gc-concurrent\.sh$'' - ''^tests/functional/gc-concurrent2\.builder\.sh$'' - ''^tests/functional/gc-non-blocking\.sh$'' - ''^tests/functional/git-hashing/common\.sh$'' - ''^tests/functional/git-hashing/simple\.sh$'' - ''^tests/functional/hash-convert\.sh$'' - ''^tests/functional/impure-derivations\.sh$'' - ''^tests/functional/impure-eval\.sh$'' - ''^tests/functional/install-darwin\.sh$'' - ''^tests/functional/legacy-ssh-store\.sh$'' - ''^tests/functional/linux-sandbox\.sh$'' - ''^tests/functional/local-overlay-store/add-lower-inner\.sh$'' - ''^tests/functional/local-overlay-store/add-lower\.sh$'' - ''^tests/functional/local-overlay-store/bad-uris\.sh$'' - ''^tests/functional/local-overlay-store/build-inner\.sh$'' - ''^tests/functional/local-overlay-store/build\.sh$'' - ''^tests/functional/local-overlay-store/check-post-init-inner\.sh$'' - ''^tests/functional/local-overlay-store/check-post-init\.sh$'' - ''^tests/functional/local-overlay-store/common\.sh$'' - ''^tests/functional/local-overlay-store/delete-duplicate-inner\.sh$'' - ''^tests/functional/local-overlay-store/delete-duplicate\.sh$'' - ''^tests/functional/local-overlay-store/delete-refs-inner\.sh$'' - ''^tests/functional/local-overlay-store/delete-refs\.sh$'' - ''^tests/functional/local-overlay-store/gc-inner\.sh$'' - ''^tests/functional/local-overlay-store/gc\.sh$'' - ''^tests/functional/local-overlay-store/optimise-inner\.sh$'' - ''^tests/functional/local-overlay-store/optimise\.sh$'' - ''^tests/functional/local-overlay-store/redundant-add-inner\.sh$'' - ''^tests/functional/local-overlay-store/redundant-add\.sh$'' - ''^tests/functional/local-overlay-store/remount\.sh$'' - ''^tests/functional/local-overlay-store/stale-file-handle-inner\.sh$'' - ''^tests/functional/local-overlay-store/stale-file-handle\.sh$'' - ''^tests/functional/local-overlay-store/verify-inner\.sh$'' - ''^tests/functional/local-overlay-store/verify\.sh$'' - ''^tests/functional/logging\.sh$'' - ''^tests/functional/misc\.sh$'' - ''^tests/functional/multiple-outputs\.sh$'' - ''^tests/functional/nested-sandboxing\.sh$'' - ''^tests/functional/nested-sandboxing/command\.sh$'' - ''^tests/functional/nix-build\.sh$'' - ''^tests/functional/nix-channel\.sh$'' - ''^tests/functional/nix-collect-garbage-d\.sh$'' - ''^tests/functional/nix-copy-ssh-common\.sh$'' - ''^tests/functional/nix-copy-ssh-ng\.sh$'' - ''^tests/functional/nix-copy-ssh\.sh$'' - ''^tests/functional/nix-daemon-untrusting\.sh$'' - ''^tests/functional/nix-profile\.sh$'' - ''^tests/functional/nix-shell\.sh$'' - ''^tests/functional/nix_path\.sh$'' - ''^tests/functional/optimise-store\.sh$'' - ''^tests/functional/output-normalization\.sh$'' - ''^tests/functional/parallel\.builder\.sh$'' - ''^tests/functional/parallel\.sh$'' - ''^tests/functional/pass-as-file\.sh$'' - ''^tests/functional/path-from-hash-part\.sh$'' - ''^tests/functional/path-info\.sh$'' - ''^tests/functional/placeholders\.sh$'' - ''^tests/functional/post-hook\.sh$'' - ''^tests/functional/pure-eval\.sh$'' - ''^tests/functional/push-to-store-old\.sh$'' - ''^tests/functional/push-to-store\.sh$'' - ''^tests/functional/read-only-store\.sh$'' - ''^tests/functional/readfile-context\.sh$'' - ''^tests/functional/recursive\.sh$'' - ''^tests/functional/referrers\.sh$'' - ''^tests/functional/remote-store\.sh$'' - ''^tests/functional/repair\.sh$'' - ''^tests/functional/restricted\.sh$'' - ''^tests/functional/search\.sh$'' - ''^tests/functional/secure-drv-outputs\.sh$'' - ''^tests/functional/selfref-gc\.sh$'' - ''^tests/functional/shell\.shebang\.sh$'' - ''^tests/functional/simple\.builder\.sh$'' - ''^tests/functional/supplementary-groups\.sh$'' - ''^tests/functional/toString-path\.sh$'' - ''^tests/functional/user-envs-migration\.sh$'' - ''^tests/functional/user-envs-test-case\.sh$'' - ''^tests/functional/user-envs\.builder\.sh$'' - ''^tests/functional/user-envs\.sh$'' - ''^tests/functional/why-depends\.sh$'' - ''^src/libutil-tests/data/git/check-data\.sh$'' - ]; + ''^tests/functional/plugins/plugintest\.cc'' + ''^tests/functional/test-libstoreconsumer/main\.cc'' + ''^tests/nixos/ca-fd-leak/sender\.c'' + ''^tests/nixos/ca-fd-leak/smuggler\.c'' + ''^tests/nixos/user-sandboxing/attacker\.c'' + ''^src/libexpr-test-support/tests/libexpr\.hh'' + ''^src/libexpr-test-support/tests/value/context\.cc'' + ''^src/libexpr-test-support/tests/value/context\.hh'' + ''^src/libexpr-tests/derived-path\.cc'' + ''^src/libexpr-tests/error_traces\.cc'' + ''^src/libexpr-tests/eval\.cc'' + ''^src/libexpr-tests/json\.cc'' + ''^src/libexpr-tests/main\.cc'' + ''^src/libexpr-tests/primops\.cc'' + ''^src/libexpr-tests/search-path\.cc'' + ''^src/libexpr-tests/trivial\.cc'' + ''^src/libexpr-tests/value/context\.cc'' + ''^src/libexpr-tests/value/print\.cc'' + ''^src/libfetchers-tests/public-key\.cc'' + ''^src/libflake-tests/flakeref\.cc'' + ''^src/libflake-tests/url-name\.cc'' + ''^src/libstore-test-support/tests/derived-path\.cc'' + ''^src/libstore-test-support/tests/derived-path\.hh'' + ''^src/libstore-test-support/tests/nix_api_store\.hh'' + ''^src/libstore-test-support/tests/outputs-spec\.cc'' + ''^src/libstore-test-support/tests/outputs-spec\.hh'' + ''^src/libstore-test-support/tests/path\.cc'' + ''^src/libstore-test-support/tests/path\.hh'' + ''^src/libstore-test-support/tests/protocol\.hh'' + ''^src/libstore-tests/common-protocol\.cc'' + ''^src/libstore-tests/content-address\.cc'' + ''^src/libstore-tests/derivation\.cc'' + ''^src/libstore-tests/derived-path\.cc'' + ''^src/libstore-tests/downstream-placeholder\.cc'' + ''^src/libstore-tests/machines\.cc'' + ''^src/libstore-tests/nar-info-disk-cache\.cc'' + ''^src/libstore-tests/nar-info\.cc'' + ''^src/libstore-tests/outputs-spec\.cc'' + ''^src/libstore-tests/path-info\.cc'' + ''^src/libstore-tests/path\.cc'' + ''^src/libstore-tests/serve-protocol\.cc'' + ''^src/libstore-tests/worker-protocol\.cc'' + ''^src/libutil-test-support/tests/characterization\.hh'' + ''^src/libutil-test-support/tests/hash\.cc'' + ''^src/libutil-test-support/tests/hash\.hh'' + ''^src/libutil-tests/args\.cc'' + ''^src/libutil-tests/canon-path\.cc'' + ''^src/libutil-tests/chunked-vector\.cc'' + ''^src/libutil-tests/closure\.cc'' + ''^src/libutil-tests/compression\.cc'' + ''^src/libutil-tests/config\.cc'' + ''^src/libutil-tests/file-content-address\.cc'' + ''^src/libutil-tests/git\.cc'' + ''^src/libutil-tests/hash\.cc'' + ''^src/libutil-tests/hilite\.cc'' + ''^src/libutil-tests/json-utils\.cc'' + ''^src/libutil-tests/logging\.cc'' + ''^src/libutil-tests/lru-cache\.cc'' + ''^src/libutil-tests/pool\.cc'' + ''^src/libutil-tests/references\.cc'' + ''^src/libutil-tests/suggestions\.cc'' + ''^src/libutil-tests/url\.cc'' + ''^src/libutil-tests/xml-writer\.cc'' + ]; + }; + shellcheck = { + enable = true; + excludes = [ + # We haven't linted these files yet + ''^config/install-sh$'' + ''^misc/bash/completion\.sh$'' + ''^misc/fish/completion\.fish$'' + ''^misc/zsh/completion\.zsh$'' + ''^scripts/create-darwin-volume\.sh$'' + ''^scripts/install-darwin-multi-user\.sh$'' + ''^scripts/install-multi-user\.sh$'' + ''^scripts/install-systemd-multi-user\.sh$'' + ''^src/nix/get-env\.sh$'' + ''^tests/functional/ca/build-dry\.sh$'' + ''^tests/functional/ca/build-with-garbage-path\.sh$'' + ''^tests/functional/ca/common\.sh$'' + ''^tests/functional/ca/concurrent-builds\.sh$'' + ''^tests/functional/ca/eval-store\.sh$'' + ''^tests/functional/ca/gc\.sh$'' + ''^tests/functional/ca/import-from-derivation\.sh$'' + ''^tests/functional/ca/new-build-cmd\.sh$'' + ''^tests/functional/ca/nix-shell\.sh$'' + ''^tests/functional/ca/post-hook\.sh$'' + ''^tests/functional/ca/recursive\.sh$'' + ''^tests/functional/ca/repl\.sh$'' + ''^tests/functional/ca/selfref-gc\.sh$'' + ''^tests/functional/ca/why-depends\.sh$'' + ''^tests/functional/characterisation-test-infra\.sh$'' + ''^tests/functional/common/vars-and-functions\.sh$'' + ''^tests/functional/completions\.sh$'' + ''^tests/functional/compute-levels\.sh$'' + ''^tests/functional/config\.sh$'' + ''^tests/functional/db-migration\.sh$'' + ''^tests/functional/debugger\.sh$'' + ''^tests/functional/dependencies\.builder0\.sh$'' + ''^tests/functional/dependencies\.sh$'' + ''^tests/functional/dump-db\.sh$'' + ''^tests/functional/dyn-drv/build-built-drv\.sh$'' + ''^tests/functional/dyn-drv/common\.sh$'' + ''^tests/functional/dyn-drv/dep-built-drv\.sh$'' + ''^tests/functional/dyn-drv/eval-outputOf\.sh$'' + ''^tests/functional/dyn-drv/old-daemon-error-hack\.sh$'' + ''^tests/functional/dyn-drv/recursive-mod-json\.sh$'' + ''^tests/functional/eval-store\.sh$'' + ''^tests/functional/export-graph\.sh$'' + ''^tests/functional/export\.sh$'' + ''^tests/functional/extra-sandbox-profile\.sh$'' + ''^tests/functional/fetchClosure\.sh$'' + ''^tests/functional/fetchGit\.sh$'' + ''^tests/functional/fetchGitRefs\.sh$'' + ''^tests/functional/fetchGitSubmodules\.sh$'' + ''^tests/functional/fetchGitVerification\.sh$'' + ''^tests/functional/fetchMercurial\.sh$'' + ''^tests/functional/fixed\.builder1\.sh$'' + ''^tests/functional/fixed\.builder2\.sh$'' + ''^tests/functional/fixed\.sh$'' + ''^tests/functional/flakes/absolute-paths\.sh$'' + ''^tests/functional/flakes/check\.sh$'' + ''^tests/functional/flakes/config\.sh$'' + ''^tests/functional/flakes/flakes\.sh$'' + ''^tests/functional/flakes/follow-paths\.sh$'' + ''^tests/functional/flakes/prefetch\.sh$'' + ''^tests/functional/flakes/run\.sh$'' + ''^tests/functional/flakes/show\.sh$'' + ''^tests/functional/fmt\.sh$'' + ''^tests/functional/fmt\.simple\.sh$'' + ''^tests/functional/gc-auto\.sh$'' + ''^tests/functional/gc-concurrent\.builder\.sh$'' + ''^tests/functional/gc-concurrent\.sh$'' + ''^tests/functional/gc-concurrent2\.builder\.sh$'' + ''^tests/functional/gc-non-blocking\.sh$'' + ''^tests/functional/git-hashing/common\.sh$'' + ''^tests/functional/git-hashing/simple\.sh$'' + ''^tests/functional/hash-convert\.sh$'' + ''^tests/functional/impure-derivations\.sh$'' + ''^tests/functional/impure-eval\.sh$'' + ''^tests/functional/install-darwin\.sh$'' + ''^tests/functional/legacy-ssh-store\.sh$'' + ''^tests/functional/linux-sandbox\.sh$'' + ''^tests/functional/local-overlay-store/add-lower-inner\.sh$'' + ''^tests/functional/local-overlay-store/add-lower\.sh$'' + ''^tests/functional/local-overlay-store/bad-uris\.sh$'' + ''^tests/functional/local-overlay-store/build-inner\.sh$'' + ''^tests/functional/local-overlay-store/build\.sh$'' + ''^tests/functional/local-overlay-store/check-post-init-inner\.sh$'' + ''^tests/functional/local-overlay-store/check-post-init\.sh$'' + ''^tests/functional/local-overlay-store/common\.sh$'' + ''^tests/functional/local-overlay-store/delete-duplicate-inner\.sh$'' + ''^tests/functional/local-overlay-store/delete-duplicate\.sh$'' + ''^tests/functional/local-overlay-store/delete-refs-inner\.sh$'' + ''^tests/functional/local-overlay-store/delete-refs\.sh$'' + ''^tests/functional/local-overlay-store/gc-inner\.sh$'' + ''^tests/functional/local-overlay-store/gc\.sh$'' + ''^tests/functional/local-overlay-store/optimise-inner\.sh$'' + ''^tests/functional/local-overlay-store/optimise\.sh$'' + ''^tests/functional/local-overlay-store/redundant-add-inner\.sh$'' + ''^tests/functional/local-overlay-store/redundant-add\.sh$'' + ''^tests/functional/local-overlay-store/remount\.sh$'' + ''^tests/functional/local-overlay-store/stale-file-handle-inner\.sh$'' + ''^tests/functional/local-overlay-store/stale-file-handle\.sh$'' + ''^tests/functional/local-overlay-store/verify-inner\.sh$'' + ''^tests/functional/local-overlay-store/verify\.sh$'' + ''^tests/functional/logging\.sh$'' + ''^tests/functional/misc\.sh$'' + ''^tests/functional/multiple-outputs\.sh$'' + ''^tests/functional/nested-sandboxing\.sh$'' + ''^tests/functional/nested-sandboxing/command\.sh$'' + ''^tests/functional/nix-build\.sh$'' + ''^tests/functional/nix-channel\.sh$'' + ''^tests/functional/nix-collect-garbage-d\.sh$'' + ''^tests/functional/nix-copy-ssh-common\.sh$'' + ''^tests/functional/nix-copy-ssh-ng\.sh$'' + ''^tests/functional/nix-copy-ssh\.sh$'' + ''^tests/functional/nix-daemon-untrusting\.sh$'' + ''^tests/functional/nix-profile\.sh$'' + ''^tests/functional/nix-shell\.sh$'' + ''^tests/functional/nix_path\.sh$'' + ''^tests/functional/optimise-store\.sh$'' + ''^tests/functional/output-normalization\.sh$'' + ''^tests/functional/parallel\.builder\.sh$'' + ''^tests/functional/parallel\.sh$'' + ''^tests/functional/pass-as-file\.sh$'' + ''^tests/functional/path-from-hash-part\.sh$'' + ''^tests/functional/path-info\.sh$'' + ''^tests/functional/placeholders\.sh$'' + ''^tests/functional/post-hook\.sh$'' + ''^tests/functional/pure-eval\.sh$'' + ''^tests/functional/push-to-store-old\.sh$'' + ''^tests/functional/push-to-store\.sh$'' + ''^tests/functional/read-only-store\.sh$'' + ''^tests/functional/readfile-context\.sh$'' + ''^tests/functional/recursive\.sh$'' + ''^tests/functional/referrers\.sh$'' + ''^tests/functional/remote-store\.sh$'' + ''^tests/functional/repair\.sh$'' + ''^tests/functional/restricted\.sh$'' + ''^tests/functional/search\.sh$'' + ''^tests/functional/secure-drv-outputs\.sh$'' + ''^tests/functional/selfref-gc\.sh$'' + ''^tests/functional/shell\.shebang\.sh$'' + ''^tests/functional/simple\.builder\.sh$'' + ''^tests/functional/supplementary-groups\.sh$'' + ''^tests/functional/toString-path\.sh$'' + ''^tests/functional/user-envs-migration\.sh$'' + ''^tests/functional/user-envs-test-case\.sh$'' + ''^tests/functional/user-envs\.builder\.sh$'' + ''^tests/functional/user-envs\.sh$'' + ''^tests/functional/why-depends\.sh$'' + ''^src/libutil-tests/data/git/check-data\.sh$'' + ]; + }; + # TODO: nixfmt, https://github.com/NixOS/nixfmt/issues/153 }; - # TODO: nixfmt, https://github.com/NixOS/nixfmt/issues/153 }; }; - }; # We'll be pulling from this in the main flake flake.getSystem = getSystem; diff --git a/packaging/components.nix b/packaging/components.nix index e1f661be8fb..d1bfe83bf0e 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -13,9 +13,11 @@ let versionSuffix = lib.optionalString (!officialRelease) "pre"; - fineVersionSuffix = lib.optionalString - (!officialRelease) - "pre${builtins.substring 0 8 (src.lastModifiedDate or src.lastModified or "19700101")}_${src.shortRev or "dirty"}"; + fineVersionSuffix = + lib.optionalString (!officialRelease) + "pre${ + builtins.substring 0 8 (src.lastModifiedDate or src.lastModified or "19700101") + }_${src.shortRev or "dirty"}"; fineVersion = baseVersion + fineVersionSuffix; in @@ -54,7 +56,9 @@ in nix-cli = callPackage ../src/nix/package.nix { version = fineVersion; }; - nix-functional-tests = callPackage ../src/nix-functional-tests/package.nix { version = fineVersion; }; + nix-functional-tests = callPackage ../src/nix-functional-tests/package.nix { + version = fineVersion; + }; nix-manual = callPackage ../doc/manual/package.nix { version = fineVersion; }; nix-internal-api-docs = callPackage ../src/internal-api-docs/package.nix { version = fineVersion; }; diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index acdbc9cfc79..afbc31fc6df 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -19,9 +19,7 @@ let root = ../.; - stdenv = if prevStdenv.isDarwin && prevStdenv.isx86_64 - then darwinStdenv - else prevStdenv; + stdenv = if prevStdenv.isDarwin && prevStdenv.isx86_64 then darwinStdenv else prevStdenv; # Fix the following error with the default x86_64-darwin SDK: # @@ -38,11 +36,14 @@ let # Indirection for Nixpkgs to override when package.nix files are vendored filesetToSource = lib.fileset.toSource; - /** Given a set of layers, create a mkDerivation-like function */ - mkPackageBuilder = exts: userFn: - stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn); + /** + Given a set of layers, create a mkDerivation-like function + */ + mkPackageBuilder = + exts: userFn: stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn); - localSourceLayer = finalAttrs: prevAttrs: + localSourceLayer = + finalAttrs: prevAttrs: let workDirPath = # Ideally we'd pick finalAttrs.workDir, but for now `mkDerivation` has @@ -51,8 +52,13 @@ let prevAttrs.workDir; workDirSubpath = lib.path.removePrefix root workDirPath; - sources = assert prevAttrs.fileset._type == "fileset"; prevAttrs.fileset; - src = lib.fileset.toSource { fileset = sources; inherit root; }; + sources = + assert prevAttrs.fileset._type == "fileset"; + prevAttrs.fileset; + src = lib.fileset.toSource { + fileset = sources; + inherit root; + }; in { @@ -64,117 +70,129 @@ let workDir = null; }; - mesonLayer = finalAttrs: prevAttrs: - { - # NOTE: - # As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26, - # `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default. - # More on build types here: https://mesonbuild.com/Builtin-options.html#details-for-buildtype. - mesonBuildType = "release"; - # NOTE: - # Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the - # guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10. - # For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable. - preConfigure = prevAttrs.preConfigure or "" + lib.optionalString ( - !stdenv.hostPlatform.isWindows - # build failure - && !stdenv.hostPlatform.isStatic - # LTO breaks exception handling on x86-64-darwin. - && stdenv.system != "x86_64-darwin" - ) '' - case "$mesonBuildType" in - release|minsize) appendToVar mesonFlags "-Db_lto=true" ;; - *) appendToVar mesonFlags "-Db_lto=false" ;; - esac - ''; - nativeBuildInputs = [ - pkgs.buildPackages.meson - pkgs.buildPackages.ninja - ] ++ prevAttrs.nativeBuildInputs or []; - mesonCheckFlags = prevAttrs.mesonCheckFlags or [] ++ [ - "--print-errorlogs" - ]; - }; + mesonLayer = finalAttrs: prevAttrs: { + # NOTE: + # As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26, + # `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default. + # More on build types here: https://mesonbuild.com/Builtin-options.html#details-for-buildtype. + mesonBuildType = "release"; + # NOTE: + # Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the + # guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10. + # For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable. + preConfigure = + prevAttrs.preConfigure or "" + + + lib.optionalString + ( + !stdenv.hostPlatform.isWindows + # build failure + && !stdenv.hostPlatform.isStatic + # LTO breaks exception handling on x86-64-darwin. + && stdenv.system != "x86_64-darwin" + ) + '' + case "$mesonBuildType" in + release|minsize) appendToVar mesonFlags "-Db_lto=true" ;; + *) appendToVar mesonFlags "-Db_lto=false" ;; + esac + ''; + nativeBuildInputs = [ + pkgs.buildPackages.meson + pkgs.buildPackages.ninja + ] ++ prevAttrs.nativeBuildInputs or [ ]; + mesonCheckFlags = prevAttrs.mesonCheckFlags or [ ] ++ [ + "--print-errorlogs" + ]; + }; - mesonBuildLayer = finalAttrs: prevAttrs: - { - nativeBuildInputs = prevAttrs.nativeBuildInputs or [] ++ [ - pkgs.buildPackages.pkg-config - ]; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - env = prevAttrs.env or {} - // lib.optionalAttrs - (stdenv.isLinux - && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux") - && !(stdenv.hostPlatform.useLLVM or false)) - { LDFLAGS = "-fuse-ld=gold"; }; - }; + mesonBuildLayer = finalAttrs: prevAttrs: { + nativeBuildInputs = prevAttrs.nativeBuildInputs or [ ] ++ [ + pkgs.buildPackages.pkg-config + ]; + separateDebugInfo = !stdenv.hostPlatform.isStatic; + hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; + env = + prevAttrs.env or { } + // lib.optionalAttrs ( + stdenv.isLinux + && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux") + && !(stdenv.hostPlatform.useLLVM or false) + ) { LDFLAGS = "-fuse-ld=gold"; }; + }; - mesonLibraryLayer = finalAttrs: prevAttrs: - { - outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ]; - }; + mesonLibraryLayer = finalAttrs: prevAttrs: { + outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ]; + }; # Work around weird `--as-needed` linker behavior with BSD, see # https://github.com/mesonbuild/meson/issues/3593 - bsdNoLinkAsNeeded = finalAttrs: prevAttrs: + bsdNoLinkAsNeeded = + finalAttrs: prevAttrs: lib.optionalAttrs stdenv.hostPlatform.isBSD { - mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or []; + mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or [ ]; }; - miscGoodPractice = finalAttrs: prevAttrs: - { - strictDeps = prevAttrs.strictDeps or true; - enableParallelBuilding = true; - }; + miscGoodPractice = finalAttrs: prevAttrs: { + strictDeps = prevAttrs.strictDeps or true; + enableParallelBuilding = true; + }; in scope: { inherit stdenv; - aws-sdk-cpp = (pkgs.aws-sdk-cpp.override { - apis = [ "s3" "transfer" ]; - customMemoryManagement = false; - }).overrideAttrs { - # only a stripped down version is built, which takes a lot less resources - # to build, so we don't need a "big-parallel" machine. - requiredSystemFeatures = [ ]; - }; + aws-sdk-cpp = + (pkgs.aws-sdk-cpp.override { + apis = [ + "s3" + "transfer" + ]; + customMemoryManagement = false; + }).overrideAttrs + { + # only a stripped down version is built, which takes a lot less resources + # to build, so we don't need a "big-parallel" machine. + requiredSystemFeatures = [ ]; + }; boehmgc = pkgs.boehmgc.override { enableLargeConfig = true; }; # TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed. - boost = (pkgs.boost.override { - extraB2Args = [ - "--with-container" - "--with-context" - "--with-coroutine" - ]; - }).overrideAttrs (old: { - # Need to remove `--with-*` to use `--with-libraries=...` - buildPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.buildPhase; - installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase; - }); + boost = + (pkgs.boost.override { + extraB2Args = [ + "--with-container" + "--with-context" + "--with-coroutine" + ]; + }).overrideAttrs + (old: { + # Need to remove `--with-*` to use `--with-libraries=...` + buildPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.buildPhase; + installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase; + }); libgit2 = pkgs.libgit2.overrideAttrs (attrs: { - cmakeFlags = attrs.cmakeFlags or [] - ++ [ "-DUSE_SSH=exec" ]; - nativeBuildInputs = attrs.nativeBuildInputs or [] + cmakeFlags = attrs.cmakeFlags or [ ] ++ [ "-DUSE_SSH=exec" ]; + nativeBuildInputs = + attrs.nativeBuildInputs or [ ] # gitMinimal does not build on Windows. See packbuilder patch. ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ # Needed for `git apply`; see `prePatch` pkgs.buildPackages.gitMinimal ]; # Only `git apply` can handle git binary patches - prePatch = attrs.prePatch or "" + prePatch = + attrs.prePatch or "" + lib.optionalString (!stdenv.hostPlatform.isWindows) '' patch() { git apply } ''; - patches = attrs.patches or [] + patches = + attrs.patches or [ ] ++ [ ./patches/libgit2-mempack-thin-packfile.patch ] @@ -188,27 +206,24 @@ scope: { inherit resolvePath filesetToSource; - mkMesonDerivation = - mkPackageBuilder [ - miscGoodPractice - localSourceLayer - mesonLayer - ]; - mkMesonExecutable = - mkPackageBuilder [ - miscGoodPractice - bsdNoLinkAsNeeded - localSourceLayer - mesonLayer - mesonBuildLayer - ]; - mkMesonLibrary = - mkPackageBuilder [ - miscGoodPractice - bsdNoLinkAsNeeded - localSourceLayer - mesonLayer - mesonBuildLayer - mesonLibraryLayer - ]; + mkMesonDerivation = mkPackageBuilder [ + miscGoodPractice + localSourceLayer + mesonLayer + ]; + mkMesonExecutable = mkPackageBuilder [ + miscGoodPractice + bsdNoLinkAsNeeded + localSourceLayer + mesonLayer + mesonBuildLayer + ]; + mkMesonLibrary = mkPackageBuilder [ + miscGoodPractice + bsdNoLinkAsNeeded + localSourceLayer + mesonLayer + mesonBuildLayer + mesonLibraryLayer + ]; } diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix index 30ac518d5f7..1651a86bee1 100644 --- a/packaging/dev-shell.nix +++ b/packaging/dev-shell.nix @@ -2,127 +2,135 @@ { pkgs }: -pkgs.nixComponents.nix-util.overrideAttrs (attrs: - -let - stdenv = pkgs.nixDependencies.stdenv; - buildCanExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform; - modular = devFlake.getSystem stdenv.buildPlatform.system; - transformFlag = prefix: flag: - assert builtins.isString flag; - let - rest = builtins.substring 2 (builtins.stringLength flag) flag; - in +pkgs.nixComponents.nix-util.overrideAttrs ( + attrs: + + let + stdenv = pkgs.nixDependencies.stdenv; + buildCanExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform; + modular = devFlake.getSystem stdenv.buildPlatform.system; + transformFlag = + prefix: flag: + assert builtins.isString flag; + let + rest = builtins.substring 2 (builtins.stringLength flag) flag; + in "-D${prefix}:${rest}"; - havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix; - ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags; -in { - pname = "shell-for-" + attrs.pname; - - # Remove the version suffix to avoid unnecessary attempts to substitute in nix develop - version = lib.fileContents ../.version; - name = attrs.pname; - - installFlags = "sysconfdir=$(out)/etc"; - shellHook = '' - PATH=$prefix/bin:$PATH - unset PYTHONPATH - export MANPATH=$out/share/man:$MANPATH - - # Make bash completion work. - XDG_DATA_DIRS+=:$out/share - - # Make the default phases do the right thing. - # FIXME: this wouldn't be needed if the ninja package set buildPhase() instead of $buildPhase. - # FIXME: mesonConfigurePhase shouldn't cd to the build directory. It would be better to pass '-C ' to ninja. - - cdToBuildDir() { - if [[ ! -e build.ninja ]]; then - cd build - fi - } - - configurePhase() { - mesonConfigurePhase - } - - buildPhase() { - cdToBuildDir - ninjaBuildPhase - } - - checkPhase() { - cdToBuildDir - mesonCheckPhase - } - - installPhase() { - cdToBuildDir - ninjaInstallPhase - } - ''; - - # We use this shell with the local checkout, not unpackPhase. - src = null; - - env = { - # Needed for Meson to find Boost. - # https://github.com/NixOS/nixpkgs/issues/86131. - BOOST_INCLUDEDIR = "${lib.getDev pkgs.nixDependencies.boost}/include"; - BOOST_LIBRARYDIR = "${lib.getLib pkgs.nixDependencies.boost}/lib"; - # For `make format`, to work without installing pre-commit - _NIX_PRE_COMMIT_HOOKS_CONFIG = - "${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml" modular.pre-commit.settings.rawConfig}"; - }; - - mesonFlags = - map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents.nix-util.mesonFlags) - ++ map (transformFlag "libstore") (ignoreCrossFile pkgs.nixComponents.nix-store.mesonFlags) - ++ map (transformFlag "libfetchers") (ignoreCrossFile pkgs.nixComponents.nix-fetchers.mesonFlags) - ++ lib.optionals havePerl (map (transformFlag "perl") (ignoreCrossFile pkgs.nixComponents.nix-perl-bindings.mesonFlags)) - ++ map (transformFlag "libexpr") (ignoreCrossFile pkgs.nixComponents.nix-expr.mesonFlags) - ++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents.nix-cmd.mesonFlags) - ; - - nativeBuildInputs = attrs.nativeBuildInputs or [] - ++ pkgs.nixComponents.nix-util.nativeBuildInputs - ++ pkgs.nixComponents.nix-store.nativeBuildInputs - ++ pkgs.nixComponents.nix-fetchers.nativeBuildInputs - ++ pkgs.nixComponents.nix-expr.nativeBuildInputs - ++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.nativeBuildInputs - ++ lib.optionals buildCanExecuteHost pkgs.nixComponents.nix-manual.externalNativeBuildInputs - ++ pkgs.nixComponents.nix-internal-api-docs.nativeBuildInputs - ++ pkgs.nixComponents.nix-external-api-docs.nativeBuildInputs - ++ pkgs.nixComponents.nix-functional-tests.externalNativeBuildInputs - ++ lib.optional - (!buildCanExecuteHost - # Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479 - && !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin) - && stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages - && lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages)) - pkgs.buildPackages.mesonEmulatorHook - ++ [ - pkgs.buildPackages.cmake - pkgs.buildPackages.shellcheck - pkgs.buildPackages.changelog-d - modular.pre-commit.settings.package - (pkgs.writeScriptBin "pre-commit-hooks-install" - modular.pre-commit.settings.installationScript) - ] - # TODO: Remove the darwin check once - # https://github.com/NixOS/nixpkgs/pull/291814 is available - ++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear - ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) (lib.hiPrio pkgs.buildPackages.clang-tools); - - buildInputs = attrs.buildInputs or [] - ++ pkgs.nixComponents.nix-util.buildInputs - ++ pkgs.nixComponents.nix-store.buildInputs - ++ pkgs.nixComponents.nix-store-tests.externalBuildInputs - ++ pkgs.nixComponents.nix-fetchers.buildInputs - ++ pkgs.nixComponents.nix-expr.buildInputs - ++ pkgs.nixComponents.nix-expr.externalPropagatedBuildInputs - ++ pkgs.nixComponents.nix-cmd.buildInputs - ++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.externalBuildInputs - ++ lib.optional havePerl pkgs.perl - ; -}) + havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix; + ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags; + in + { + pname = "shell-for-" + attrs.pname; + + # Remove the version suffix to avoid unnecessary attempts to substitute in nix develop + version = lib.fileContents ../.version; + name = attrs.pname; + + installFlags = "sysconfdir=$(out)/etc"; + shellHook = '' + PATH=$prefix/bin:$PATH + unset PYTHONPATH + export MANPATH=$out/share/man:$MANPATH + + # Make bash completion work. + XDG_DATA_DIRS+=:$out/share + + # Make the default phases do the right thing. + # FIXME: this wouldn't be needed if the ninja package set buildPhase() instead of $buildPhase. + # FIXME: mesonConfigurePhase shouldn't cd to the build directory. It would be better to pass '-C ' to ninja. + + cdToBuildDir() { + if [[ ! -e build.ninja ]]; then + cd build + fi + } + + configurePhase() { + mesonConfigurePhase + } + + buildPhase() { + cdToBuildDir + ninjaBuildPhase + } + + checkPhase() { + cdToBuildDir + mesonCheckPhase + } + + installPhase() { + cdToBuildDir + ninjaInstallPhase + } + ''; + + # We use this shell with the local checkout, not unpackPhase. + src = null; + + env = { + # Needed for Meson to find Boost. + # https://github.com/NixOS/nixpkgs/issues/86131. + BOOST_INCLUDEDIR = "${lib.getDev pkgs.nixDependencies.boost}/include"; + BOOST_LIBRARYDIR = "${lib.getLib pkgs.nixDependencies.boost}/lib"; + # For `make format`, to work without installing pre-commit + _NIX_PRE_COMMIT_HOOKS_CONFIG = "${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml" + modular.pre-commit.settings.rawConfig + }"; + }; + + mesonFlags = + map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents.nix-util.mesonFlags) + ++ map (transformFlag "libstore") (ignoreCrossFile pkgs.nixComponents.nix-store.mesonFlags) + ++ map (transformFlag "libfetchers") (ignoreCrossFile pkgs.nixComponents.nix-fetchers.mesonFlags) + ++ lib.optionals havePerl ( + map (transformFlag "perl") (ignoreCrossFile pkgs.nixComponents.nix-perl-bindings.mesonFlags) + ) + ++ map (transformFlag "libexpr") (ignoreCrossFile pkgs.nixComponents.nix-expr.mesonFlags) + ++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents.nix-cmd.mesonFlags); + + nativeBuildInputs = + attrs.nativeBuildInputs or [ ] + ++ pkgs.nixComponents.nix-util.nativeBuildInputs + ++ pkgs.nixComponents.nix-store.nativeBuildInputs + ++ pkgs.nixComponents.nix-fetchers.nativeBuildInputs + ++ pkgs.nixComponents.nix-expr.nativeBuildInputs + ++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.nativeBuildInputs + ++ lib.optionals buildCanExecuteHost pkgs.nixComponents.nix-manual.externalNativeBuildInputs + ++ pkgs.nixComponents.nix-internal-api-docs.nativeBuildInputs + ++ pkgs.nixComponents.nix-external-api-docs.nativeBuildInputs + ++ pkgs.nixComponents.nix-functional-tests.externalNativeBuildInputs + ++ lib.optional ( + !buildCanExecuteHost + # Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479 + && !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin) + && stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages + && lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages) + ) pkgs.buildPackages.mesonEmulatorHook + ++ [ + pkgs.buildPackages.cmake + pkgs.buildPackages.shellcheck + pkgs.buildPackages.changelog-d + modular.pre-commit.settings.package + (pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript) + ] + # TODO: Remove the darwin check once + # https://github.com/NixOS/nixpkgs/pull/291814 is available + ++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear + ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) ( + lib.hiPrio pkgs.buildPackages.clang-tools + ); + + buildInputs = + attrs.buildInputs or [ ] + ++ pkgs.nixComponents.nix-util.buildInputs + ++ pkgs.nixComponents.nix-store.buildInputs + ++ pkgs.nixComponents.nix-store-tests.externalBuildInputs + ++ pkgs.nixComponents.nix-fetchers.buildInputs + ++ pkgs.nixComponents.nix-expr.buildInputs + ++ pkgs.nixComponents.nix-expr.externalPropagatedBuildInputs + ++ pkgs.nixComponents.nix-cmd.buildInputs + ++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.externalBuildInputs + ++ lib.optional havePerl pkgs.perl; + } +) diff --git a/packaging/everything.nix b/packaging/everything.nix index 7ca878d8d53..2b47c31bbf5 100644 --- a/packaging/everything.nix +++ b/packaging/everything.nix @@ -42,27 +42,31 @@ }: let - libs = { - inherit - nix-util - nix-util-c - nix-store - nix-store-c - nix-fetchers - nix-expr - nix-expr-c - nix-flake - nix-flake-c - nix-main - nix-main-c - nix-cmd - ; - } // lib.optionalAttrs (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) { - # Currently fails in static build - inherit - nix-perl-bindings - ; - }; + libs = + { + inherit + nix-util + nix-util-c + nix-store + nix-store-c + nix-fetchers + nix-expr + nix-expr-c + nix-flake + nix-flake-c + nix-main + nix-main-c + nix-cmd + ; + } + // lib.optionalAttrs + (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) + { + # Currently fails in static build + inherit + nix-perl-bindings + ; + }; dev = stdenv.mkDerivation (finalAttrs: { name = "nix-${nix-cli.version}-dev"; @@ -77,10 +81,9 @@ let ''; passthru = { tests = { - pkg-config = - testers.hasPkgConfigModules { - package = finalAttrs.finalPackage; - }; + pkg-config = testers.hasPkgConfigModules { + package = finalAttrs.finalPackage; + }; }; # If we were to fully emulate output selection here, we'd confuse the Nix CLIs, @@ -123,70 +126,84 @@ in ]; meta.mainProgram = "nix"; -}).overrideAttrs (finalAttrs: prevAttrs: { - doCheck = true; - doInstallCheck = true; - - checkInputs = [ - # Make sure the unit tests have passed - nix-util-tests.tests.run - nix-store-tests.tests.run - nix-expr-tests.tests.run - nix-fetchers-tests.tests.run - nix-flake-tests.tests.run - - # Make sure the functional tests have passed - nix-functional-tests - - # dev bundle is ok - # (checkInputs must be empty paths??) - (runCommand "check-pkg-config" { checked = dev.tests.pkg-config; } "mkdir $out") - ] ++ lib.optionals (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) [ - # Perl currently fails in static build - # TODO: Split out tests into a separate derivation? - nix-perl-bindings - ]; - passthru = prevAttrs.passthru // { - inherit (nix-cli) version; - - /** - These are the libraries that are part of the Nix project. They are used - by the Nix CLI and other tools. - - If you need to use these libraries in your project, we recommend to use - the `-c` C API libraries exclusively, if possible. - - We also recommend that you build the complete package to ensure that the unit tests pass. - You could do this in CI, or by passing it in an unused environment variable. e.g in a `mkDerivation` call: - - ```nix - buildInputs = [ nix.libs.nix-util-c nix.libs.nix-store-c ]; - # Make sure the nix libs we use are ok - unusedInputsForTests = [ nix ]; - disallowedReferences = nix.all; - ``` - */ - inherit libs; - - tests = prevAttrs.passthru.tests or {} // { - # TODO: create a proper fixpoint and: - # pkg-config = - # testers.hasPkgConfigModules { - # package = finalPackage; - # }; - }; - - /** - A derivation referencing the `dev` outputs of the Nix libraries. - */ - inherit dev; - inherit devdoc; - doc = nix-manual; - outputs = [ "out" "dev" "devdoc" "doc" ]; - all = lib.attrValues (lib.genAttrs finalAttrs.passthru.outputs (outName: finalAttrs.finalPackage.${outName})); - }; - meta = prevAttrs.meta // { - description = "The Nix package manager"; - pkgConfigModules = dev.meta.pkgConfigModules; - }; -}) +}).overrideAttrs + ( + finalAttrs: prevAttrs: { + doCheck = true; + doInstallCheck = true; + + checkInputs = + [ + # Make sure the unit tests have passed + nix-util-tests.tests.run + nix-store-tests.tests.run + nix-expr-tests.tests.run + nix-fetchers-tests.tests.run + nix-flake-tests.tests.run + + # Make sure the functional tests have passed + nix-functional-tests + + # dev bundle is ok + # (checkInputs must be empty paths??) + (runCommand "check-pkg-config" { checked = dev.tests.pkg-config; } "mkdir $out") + ] + ++ lib.optionals + (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) + [ + # Perl currently fails in static build + # TODO: Split out tests into a separate derivation? + nix-perl-bindings + ]; + passthru = prevAttrs.passthru // { + inherit (nix-cli) version; + + /** + These are the libraries that are part of the Nix project. They are used + by the Nix CLI and other tools. + + If you need to use these libraries in your project, we recommend to use + the `-c` C API libraries exclusively, if possible. + + We also recommend that you build the complete package to ensure that the unit tests pass. + You could do this in CI, or by passing it in an unused environment variable. e.g in a `mkDerivation` call: + + ```nix + buildInputs = [ nix.libs.nix-util-c nix.libs.nix-store-c ]; + # Make sure the nix libs we use are ok + unusedInputsForTests = [ nix ]; + disallowedReferences = nix.all; + ``` + */ + inherit libs; + + tests = prevAttrs.passthru.tests or { } // { + # TODO: create a proper fixpoint and: + # pkg-config = + # testers.hasPkgConfigModules { + # package = finalPackage; + # }; + }; + + /** + A derivation referencing the `dev` outputs of the Nix libraries. + */ + inherit dev; + inherit devdoc; + doc = nix-manual; + outputs = [ + "out" + "dev" + "devdoc" + "doc" + ]; + all = lib.attrValues ( + lib.genAttrs finalAttrs.passthru.outputs (outName: finalAttrs.finalPackage.${outName}) + ); + }; + meta = prevAttrs.meta // { + description = "The Nix package manager"; + pkgConfigModules = dev.meta.pkgConfigModules; + }; + } + ) diff --git a/packaging/hydra.nix b/packaging/hydra.nix index 5b1e4755948..764898515c9 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -1,22 +1,25 @@ -{ inputs -, binaryTarball -, forAllCrossSystems -, forAllSystems -, lib -, linux64BitSystems -, nixpkgsFor -, self -, officialRelease +{ + inputs, + binaryTarball, + forAllCrossSystems, + forAllSystems, + lib, + linux64BitSystems, + nixpkgsFor, + self, + officialRelease, }: let inherit (inputs) nixpkgs nixpkgs-regression; - installScriptFor = tarballs: + installScriptFor = + tarballs: nixpkgsFor.x86_64-linux.native.callPackage ../scripts/installer.nix { inherit tarballs; }; - testNixVersions = pkgs: daemon: + testNixVersions = + pkgs: daemon: pkgs.nixComponents.nix-functional-tests.override { pname = "nix-daemon-compat-tests"; version = "${pkgs.nix.version}-with-daemon-${daemon.version}"; @@ -54,44 +57,70 @@ let in { # Binary package for various platforms. - build = forAllPackages (pkgName: - forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.${pkgName})); + build = forAllPackages ( + pkgName: forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.${pkgName}) + ); - shellInputs = removeAttrs - (forAllSystems (system: self.devShells.${system}.default.inputDerivation)) - [ "i686-linux" ]; + shellInputs = removeAttrs (forAllSystems ( + system: self.devShells.${system}.default.inputDerivation + )) [ "i686-linux" ]; - buildStatic = forAllPackages (pkgName: - lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.nixComponents.${pkgName})); + buildStatic = forAllPackages ( + pkgName: + lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.nixComponents.${pkgName}) + ); - buildCross = forAllPackages (pkgName: + buildCross = forAllPackages ( + pkgName: # Hack to avoid non-evaling package - (if pkgName == "nix-functional-tests" then lib.flip builtins.removeAttrs ["x86_64-w64-mingw32"] else lib.id) - (forAllCrossSystems (crossSystem: - lib.genAttrs [ "x86_64-linux" ] (system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName})))); - - buildNoGc = let - components = forAllSystems (system: - nixpkgsFor.${system}.native.nixComponents.overrideScope (self: super: { - nix-expr = super.nix-expr.override { enableGC = false; }; - }) - ); - in forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); + ( + if pkgName == "nix-functional-tests" then + lib.flip builtins.removeAttrs [ "x86_64-w64-mingw32" ] + else + lib.id + ) + ( + forAllCrossSystems ( + crossSystem: + lib.genAttrs [ "x86_64-linux" ] ( + system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName} + ) + ) + ) + ); + + buildNoGc = + let + components = forAllSystems ( + system: + nixpkgsFor.${system}.native.nixComponents.overrideScope ( + self: super: { + nix-expr = super.nix-expr.override { enableGC = false; }; + } + ) + ); + in + forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); buildNoTests = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-cli); # Toggles some settings for better coverage. Windows needs these # library combinations, and Debian build Nix with GNU readline too. - buildReadlineNoMarkdown = let - components = forAllSystems (system: - nixpkgsFor.${system}.native.nixComponents.overrideScope (self: super: { - nix-cmd = super.nix-cmd.override { - enableMarkdown = false; - readlineFlavor = "readline"; - }; - }) - ); - in forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); + buildReadlineNoMarkdown = + let + components = forAllSystems ( + system: + nixpkgsFor.${system}.native.nixComponents.overrideScope ( + self: super: { + nix-cmd = super.nix-cmd.override { + enableMarkdown = false; + readlineFlavor = "readline"; + }; + } + ) + ); + in + forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); # Perl bindings for various platforms. perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-perl-bindings); @@ -99,13 +128,18 @@ in # Binary tarball for various platforms, containing a Nix store # with the closure of 'nix' package, and the second half of # the installation script. - binaryTarball = forAllSystems (system: binaryTarball nixpkgsFor.${system}.native.nix nixpkgsFor.${system}.native); - - binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] (system: - forAllCrossSystems (crossSystem: - binaryTarball - nixpkgsFor.${system}.cross.${crossSystem}.nix - nixpkgsFor.${system}.cross.${crossSystem})); + binaryTarball = forAllSystems ( + system: binaryTarball nixpkgsFor.${system}.native.nix nixpkgsFor.${system}.native + ); + + binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] ( + system: + forAllCrossSystems ( + crossSystem: + binaryTarball nixpkgsFor.${system}.cross.${crossSystem}.nix + nixpkgsFor.${system}.cross.${crossSystem} + ) + ); # The first half of the installation script. This is uploaded # to https://nixos.org/nix/install. It downloads the binary @@ -124,9 +158,12 @@ in self.hydraJobs.binaryTarballCross."x86_64-linux"."riscv64-unknown-linux-gnu" ]; - installerScriptForGHA = forAllSystems (system: nixpkgsFor.${system}.native.callPackage ../scripts/installer.nix { - tarballs = [ self.hydraJobs.binaryTarball.${system} ]; - }); + installerScriptForGHA = forAllSystems ( + system: + nixpkgsFor.${system}.native.callPackage ../scripts/installer.nix { + tarballs = [ self.hydraJobs.binaryTarball.${system} ]; + } + ); # docker image with Nix inside dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage); @@ -147,16 +184,24 @@ in external-api-docs = nixpkgsFor.x86_64-linux.native.nixComponents.nix-external-api-docs; # System tests. - tests = import ../tests/nixos { inherit lib nixpkgs nixpkgsFor self; } // { - - # Make sure that nix-env still produces the exact same result - # on a particular version of Nixpkgs. - evalNixpkgs = - let - inherit (nixpkgsFor.x86_64-linux.native) runCommand nix; - in - runCommand "eval-nixos" { buildInputs = [ nix ]; } - '' + tests = + import ../tests/nixos { + inherit + lib + nixpkgs + nixpkgsFor + self + ; + } + // { + + # Make sure that nix-env still produces the exact same result + # on a particular version of Nixpkgs. + evalNixpkgs = + let + inherit (nixpkgsFor.x86_64-linux.native) runCommand nix; + in + runCommand "eval-nixos" { buildInputs = [ nix ]; } '' type -p nix-env # Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593. ( @@ -167,36 +212,36 @@ in mkdir $out ''; - nixpkgsLibTests = - forAllSystems (system: - import (nixpkgs + "/lib/tests/test-with-nix.nix") - { - lib = nixpkgsFor.${system}.native.lib; - nix = self.packages.${system}.nix-cli; - pkgs = nixpkgsFor.${system}.native; - } + nixpkgsLibTests = forAllSystems ( + system: + import (nixpkgs + "/lib/tests/test-with-nix.nix") { + lib = nixpkgsFor.${system}.native.lib; + nix = self.packages.${system}.nix-cli; + pkgs = nixpkgsFor.${system}.native; + } ); - }; + }; metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" { pkgs = nixpkgsFor.x86_64-linux.native; nixpkgs = nixpkgs-regression; }; - installTests = forAllSystems (system: - let pkgs = nixpkgsFor.${system}.native; in - pkgs.runCommand "install-tests" - { - againstSelf = testNixVersions pkgs pkgs.nix; - againstCurrentLatest = - # FIXME: temporarily disable this on macOS because of #3605. - if system == "x86_64-linux" - then testNixVersions pkgs pkgs.nixVersions.latest - else null; - # Disabled because the latest stable version doesn't handle - # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work - # againstLatestStable = testNixVersions pkgs pkgs.nixStable; - } "touch $out"); + installTests = forAllSystems ( + system: + let + pkgs = nixpkgsFor.${system}.native; + in + pkgs.runCommand "install-tests" { + againstSelf = testNixVersions pkgs pkgs.nix; + againstCurrentLatest = + # FIXME: temporarily disable this on macOS because of #3605. + if system == "x86_64-linux" then testNixVersions pkgs pkgs.nixVersions.latest else null; + # Disabled because the latest stable version doesn't handle + # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work + # againstLatestStable = testNixVersions pkgs pkgs.nixStable; + } "touch $out" + ); installerTests = import ../tests/installer { binaryTarballs = self.hydraJobs.binaryTarball; diff --git a/scripts/binary-tarball.nix b/scripts/binary-tarball.nix index 9de90b7fb56..580e3859fe2 100644 --- a/scripts/binary-tarball.nix +++ b/scripts/binary-tarball.nix @@ -1,14 +1,18 @@ -{ runCommand -, system -, buildPackages -, cacert -, nix +{ + runCommand, + system, + buildPackages, + cacert, + nix, }: let installerClosureInfo = buildPackages.closureInfo { - rootPaths = [ nix cacert ]; + rootPaths = [ + nix + cacert + ]; }; inherit (nix) version; diff --git a/scripts/installer.nix b/scripts/installer.nix index cc7759c2c8e..e171f36f99f 100644 --- a/scripts/installer.nix +++ b/scripts/installer.nix @@ -1,36 +1,42 @@ -{ lib -, runCommand -, nix -, tarballs +{ + lib, + runCommand, + nix, + tarballs, }: -runCommand "installer-script" { - buildInputs = [ nix ]; -} '' - mkdir -p $out/nix-support - - # Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix. - tarballPath() { - # Remove the store prefix - local path=''${1#${builtins.storeDir}/} - # Get the path relative to the derivation root - local rest=''${path#*/} - # Get the derivation hash - local drvHash=''${path%%-*} - echo "$drvHash/$rest" +runCommand "installer-script" + { + buildInputs = [ nix ]; } + '' + mkdir -p $out/nix-support + + # Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix. + tarballPath() { + # Remove the store prefix + local path=''${1#${builtins.storeDir}/} + # Get the path relative to the derivation root + local rest=''${path#*/} + # Get the derivation hash + local drvHash=''${path%%-*} + echo "$drvHash/$rest" + } - substitute ${./install.in} $out/install \ - ${lib.concatMapStrings - (tarball: let - inherit (tarball.stdenv.hostPlatform) system; - in '' \ - --replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \ - --replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \ - '' - ) - tarballs - } --replace '@nixVersion@' ${nix.version} + substitute ${./install.in} $out/install \ + ${ + lib.concatMapStrings ( + tarball: + let + inherit (tarball.stdenv.hostPlatform) system; + in + '' + \ + --replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \ + --replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \ + '' + ) tarballs + } --replace '@nixVersion@' ${nix.version} - echo "file installer $out/install" >> $out/nix-support/hydra-build-products -'' + echo "file installer $out/install" >> $out/nix-support/hydra-build-products + '' diff --git a/src/external-api-docs/package.nix b/src/external-api-docs/package.nix index 57c5138cfdb..b194e16d460 100644 --- a/src/external-api-docs/package.nix +++ b/src/external-api-docs/package.nix @@ -1,11 +1,12 @@ -{ lib -, mkMesonDerivation +{ + lib, + mkMesonDerivation, -, doxygen + doxygen, -# Configuration Options + # Configuration Options -, version + version, }: let @@ -39,11 +40,10 @@ mkMesonDerivation (finalAttrs: { doxygen ]; - preConfigure = - '' - chmod u+w ./.version - echo ${finalAttrs.version} > ./.version - ''; + preConfigure = '' + chmod u+w ./.version + echo ${finalAttrs.version} > ./.version + ''; postInstall = '' mkdir -p ''${!outputDoc}/nix-support diff --git a/src/internal-api-docs/package.nix b/src/internal-api-docs/package.nix index 993a257a69f..6c4f354aee5 100644 --- a/src/internal-api-docs/package.nix +++ b/src/internal-api-docs/package.nix @@ -1,11 +1,12 @@ -{ lib -, mkMesonDerivation +{ + lib, + mkMesonDerivation, -, doxygen + doxygen, -# Configuration Options + # Configuration Options -, version + version, }: let @@ -17,27 +18,28 @@ mkMesonDerivation (finalAttrs: { inherit version; workDir = ./.; - fileset = let - cpp = fileset.fileFilter (file: file.hasExt "cc" || file.hasExt "hh"); - in fileset.unions [ - ./.version - ../../.version - ./meson.build - ./doxygen.cfg.in - # Source is not compiled, but still must be available for Doxygen - # to gather comments. - (cpp ../.) - ]; + fileset = + let + cpp = fileset.fileFilter (file: file.hasExt "cc" || file.hasExt "hh"); + in + fileset.unions [ + ./.version + ../../.version + ./meson.build + ./doxygen.cfg.in + # Source is not compiled, but still must be available for Doxygen + # to gather comments. + (cpp ../.) + ]; nativeBuildInputs = [ doxygen ]; - preConfigure = - '' - chmod u+w ./.version - echo ${finalAttrs.version} > ./.version - ''; + preConfigure = '' + chmod u+w ./.version + echo ${finalAttrs.version} > ./.version + ''; postInstall = '' mkdir -p ''${!outputDoc}/nix-support diff --git a/src/libcmd/package.nix b/src/libcmd/package.nix index 5cafb4dc100..d155d9f1e62 100644 --- a/src/libcmd/package.nix +++ b/src/libcmd/package.nix @@ -1,32 +1,33 @@ -{ lib -, stdenv -, mkMesonLibrary +{ + lib, + stdenv, + mkMesonLibrary, -, nix-util -, nix-store -, nix-fetchers -, nix-expr -, nix-flake -, nix-main -, editline -, readline -, lowdown -, nlohmann_json + nix-util, + nix-store, + nix-fetchers, + nix-expr, + nix-flake, + nix-main, + editline, + readline, + lowdown, + nlohmann_json, -# Configuration Options + # Configuration Options -, version + version, -# Whether to enable Markdown rendering in the Nix binary. -, enableMarkdown ? !stdenv.hostPlatform.isWindows + # Whether to enable Markdown rendering in the Nix binary. + enableMarkdown ? !stdenv.hostPlatform.isWindows, -# Which interactive line editor library to use for Nix's repl. -# -# Currently supported choices are: -# -# - editline (default) -# - readline -, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline" + # Which interactive line editor library to use for Nix's repl. + # + # Currently supported choices are: + # + # - editline (default) + # - readline + readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline", }: let diff --git a/src/libexpr-c/package.nix b/src/libexpr-c/package.nix index 5047f3e2e9a..ad1ea371c2d 100644 --- a/src/libexpr-c/package.nix +++ b/src/libexpr-c/package.nix @@ -1,12 +1,13 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, nix-store-c -, nix-expr + nix-store-c, + nix-expr, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libexpr-test-support/package.nix b/src/libexpr-test-support/package.nix index 48118fa0c75..5628d606a45 100644 --- a/src/libexpr-test-support/package.nix +++ b/src/libexpr-test-support/package.nix @@ -1,15 +1,16 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, nix-store-test-support -, nix-expr -, nix-expr-c + nix-store-test-support, + nix-expr, + nix-expr-c, -, rapidcheck + rapidcheck, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libexpr-tests/package.nix b/src/libexpr-tests/package.nix index a4a3bb0e7ec..bb5acb7c873 100644 --- a/src/libexpr-tests/package.nix +++ b/src/libexpr-tests/package.nix @@ -1,20 +1,21 @@ -{ lib -, buildPackages -, stdenv -, mkMesonExecutable +{ + lib, + buildPackages, + stdenv, + mkMesonExecutable, -, nix-expr -, nix-expr-c -, nix-expr-test-support + nix-expr, + nix-expr-c, + nix-expr-test-support, -, rapidcheck -, gtest -, runCommand + rapidcheck, + gtest, + runCommand, -# Configuration Options + # Configuration Options -, version -, resolvePath + version, + resolvePath, }: let @@ -58,16 +59,22 @@ mkMesonExecutable (finalAttrs: { passthru = { tests = { - run = runCommand "${finalAttrs.pname}-run" { - meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; - } (lib.optionalString stdenv.hostPlatform.isWindows '' - export HOME="$PWD/home-dir" - mkdir -p "$HOME" - '' + '' - export _NIX_TEST_UNIT_DATA=${resolvePath ./data} - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - ''); + run = + runCommand "${finalAttrs.pname}-run" + { + meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; + } + ( + lib.optionalString stdenv.hostPlatform.isWindows '' + export HOME="$PWD/home-dir" + mkdir -p "$HOME" + '' + + '' + export _NIX_TEST_UNIT_DATA=${resolvePath ./data} + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + touch $out + '' + ); }; }; diff --git a/src/libexpr/call-flake.nix b/src/libexpr/call-flake.nix index 964ba25219e..9b38644bb71 100644 --- a/src/libexpr/call-flake.nix +++ b/src/libexpr/call-flake.nix @@ -20,77 +20,77 @@ let # Resolve a input spec into a node name. An input spec is # either a node name, or a 'follows' path from the root # node. - resolveInput = inputSpec: - if builtins.isList inputSpec - then getInputByPath lockFile.root inputSpec - else inputSpec; + resolveInput = + inputSpec: if builtins.isList inputSpec then getInputByPath lockFile.root inputSpec else inputSpec; # Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the # root node, returning the final node. - getInputByPath = nodeName: path: - if path == [] - then nodeName + getInputByPath = + nodeName: path: + if path == [ ] then + nodeName else getInputByPath # Since this could be a 'follows' input, call resolveInput. (resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path}) (builtins.tail path); - allNodes = - builtins.mapAttrs - (key: node: - let - - parentNode = allNodes.${getInputByPath lockFile.root node.parent}; - - sourceInfo = - if overrides ? ${key} - then - overrides.${key}.sourceInfo - else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/" - then - parentNode.sourceInfo // { - outPath = parentNode.outPath + ("/" + node.locked.path); - } - else - # FIXME: remove obsolete node.info. - # Note: lock file entries are always final. - fetchTreeFinal (node.info or {} // removeAttrs node.locked ["dir"]); - - subdir = overrides.${key}.dir or node.locked.dir or ""; - - outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir); - - flake = import (outPath + "/flake.nix"); - - inputs = builtins.mapAttrs - (inputName: inputSpec: allNodes.${resolveInput inputSpec}) - (node.inputs or {}); - - outputs = flake.outputs (inputs // { self = result; }); - - result = - outputs - # We add the sourceInfo attribute for its metadata, as they are - # relevant metadata for the flake. However, the outPath of the - # sourceInfo does not necessarily match the outPath of the flake, - # as the flake may be in a subdirectory of a source. - # This is shadowed in the next // - // sourceInfo - // { - # This shadows the sourceInfo.outPath - inherit outPath; - - inherit inputs; inherit outputs; inherit sourceInfo; _type = "flake"; - }; - - in - if node.flake or true then - assert builtins.isFunction flake.outputs; - result - else - sourceInfo - ) - lockFile.nodes; - -in allNodes.${lockFile.root} + allNodes = builtins.mapAttrs ( + key: node: + let + + parentNode = allNodes.${getInputByPath lockFile.root node.parent}; + + sourceInfo = + if overrides ? ${key} then + overrides.${key}.sourceInfo + else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/" then + parentNode.sourceInfo + // { + outPath = parentNode.outPath + ("/" + node.locked.path); + } + else + # FIXME: remove obsolete node.info. + # Note: lock file entries are always final. + fetchTreeFinal (node.info or { } // removeAttrs node.locked [ "dir" ]); + + subdir = overrides.${key}.dir or node.locked.dir or ""; + + outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir); + + flake = import (outPath + "/flake.nix"); + + inputs = builtins.mapAttrs (inputName: inputSpec: allNodes.${resolveInput inputSpec}) ( + node.inputs or { } + ); + + outputs = flake.outputs (inputs // { self = result; }); + + result = + outputs + # We add the sourceInfo attribute for its metadata, as they are + # relevant metadata for the flake. However, the outPath of the + # sourceInfo does not necessarily match the outPath of the flake, + # as the flake may be in a subdirectory of a source. + # This is shadowed in the next // + // sourceInfo + // { + # This shadows the sourceInfo.outPath + inherit outPath; + + inherit inputs; + inherit outputs; + inherit sourceInfo; + _type = "flake"; + }; + + in + if node.flake or true then + assert builtins.isFunction flake.outputs; + result + else + sourceInfo + ) lockFile.nodes; + +in +allNodes.${lockFile.root} diff --git a/src/libexpr/fetchurl.nix b/src/libexpr/fetchurl.nix index 85a01d16179..72b3b00dffc 100644 --- a/src/libexpr/fetchurl.nix +++ b/src/libexpr/fetchurl.nix @@ -1,40 +1,72 @@ -{ system ? "" # obsolete -, url -, hash ? "" # an SRI hash - -# Legacy hash specification -, md5 ? "", sha1 ? "", sha256 ? "", sha512 ? "" -, outputHash ? - if hash != "" then hash else if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256 -, outputHashAlgo ? - if hash != "" then "" else if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256" - -, executable ? false -, unpack ? false -, name ? baseNameOf (toString url) -, impure ? false +{ + system ? "", # obsolete + url, + hash ? "", # an SRI hash + + # Legacy hash specification + md5 ? "", + sha1 ? "", + sha256 ? "", + sha512 ? "", + outputHash ? + if hash != "" then + hash + else if sha512 != "" then + sha512 + else if sha1 != "" then + sha1 + else if md5 != "" then + md5 + else + sha256, + outputHashAlgo ? + if hash != "" then + "" + else if sha512 != "" then + "sha512" + else if sha1 != "" then + "sha1" + else if md5 != "" then + "md5" + else + "sha256", + + executable ? false, + unpack ? false, + name ? baseNameOf (toString url), + impure ? false, }: -derivation ({ - builder = "builtin:fetchurl"; +derivation ( + { + builder = "builtin:fetchurl"; - # New-style output content requirements. - outputHashMode = if unpack || executable then "recursive" else "flat"; + # New-style output content requirements. + outputHashMode = if unpack || executable then "recursive" else "flat"; - inherit name url executable unpack; + inherit + name + url + executable + unpack + ; - system = "builtin"; + system = "builtin"; - # No need to double the amount of network traffic - preferLocalBuild = true; + # No need to double the amount of network traffic + preferLocalBuild = true; - # This attribute does nothing; it's here to avoid changing evaluation results. - impureEnvVars = [ - "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy" - ]; + # This attribute does nothing; it's here to avoid changing evaluation results. + impureEnvVars = [ + "http_proxy" + "https_proxy" + "ftp_proxy" + "all_proxy" + "no_proxy" + ]; - # To make "nix-prefetch-url" work. - urls = [ url ]; -} // (if impure - then { __impure = true; } - else { inherit outputHashAlgo outputHash; })) + # To make "nix-prefetch-url" work. + urls = [ url ]; + } + // (if impure then { __impure = true; } else { inherit outputHashAlgo outputHash; }) +) diff --git a/src/libexpr/imported-drv-to-derivation.nix b/src/libexpr/imported-drv-to-derivation.nix index eab8b050e8f..e2cf7fd2652 100644 --- a/src/libexpr/imported-drv-to-derivation.nix +++ b/src/libexpr/imported-drv-to-derivation.nix @@ -1,21 +1,27 @@ -attrs @ { drvPath, outputs, name, ... }: +attrs@{ + drvPath, + outputs, + name, + ... +}: let - commonAttrs = (builtins.listToAttrs outputsList) // - { all = map (x: x.value) outputsList; - inherit drvPath name; - type = "derivation"; - }; + commonAttrs = (builtins.listToAttrs outputsList) // { + all = map (x: x.value) outputsList; + inherit drvPath name; + type = "derivation"; + }; - outputToAttrListElement = outputName: - { name = outputName; - value = commonAttrs // { - outPath = builtins.getAttr outputName attrs; - inherit outputName; - }; + outputToAttrListElement = outputName: { + name = outputName; + value = commonAttrs // { + outPath = builtins.getAttr outputName attrs; + inherit outputName; }; - + }; + outputsList = map outputToAttrListElement outputs; - -in (builtins.head outputsList).value + +in +(builtins.head outputsList).value diff --git a/src/libexpr/package.nix b/src/libexpr/package.nix index 3d5b78e35f2..afd01c3846e 100644 --- a/src/libexpr/package.nix +++ b/src/libexpr/package.nix @@ -1,33 +1,34 @@ -{ lib -, stdenv -, mkMesonLibrary - -, bison -, flex -, cmake # for resolving toml11 dep - -, nix-util -, nix-store -, nix-fetchers -, boost -, boehmgc -, nlohmann_json -, toml11 - -# Configuration Options - -, version - -# Whether to use garbage collection for the Nix language evaluator. -# -# If it is disabled, we just leak memory, but this is not as bad as it -# sounds so long as evaluation just takes places within short-lived -# processes. (When the process exits, the memory is reclaimed; it is -# only leaked *within* the process.) -# -# Temporarily disabled on Windows because the `GC_throw_bad_alloc` -# symbol is missing during linking. -, enableGC ? !stdenv.hostPlatform.isWindows +{ + lib, + stdenv, + mkMesonLibrary, + + bison, + flex, + cmake, # for resolving toml11 dep + + nix-util, + nix-store, + nix-fetchers, + boost, + boehmgc, + nlohmann_json, + toml11, + + # Configuration Options + + version, + + # Whether to use garbage collection for the Nix language evaluator. + # + # If it is disabled, we just leak memory, but this is not as bad as it + # sounds so long as evaluation just takes places within short-lived + # processes. (When the process exits, the memory is reclaimed; it is + # only leaked *within* the process.) + # + # Temporarily disabled on Windows because the `GC_throw_bad_alloc` + # symbol is missing during linking. + enableGC ? !stdenv.hostPlatform.isWindows, }: let @@ -51,10 +52,7 @@ mkMesonLibrary (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ./lexer.l ./parser.y - (fileset.difference - (fileset.fileFilter (file: file.hasExt "nix") ./.) - ./package.nix - ) + (fileset.difference (fileset.fileFilter (file: file.hasExt "nix") ./.) ./package.nix) ]; nativeBuildInputs = [ diff --git a/src/libexpr/primops/derivation.nix b/src/libexpr/primops/derivation.nix index f329ff71e32..dbb8c218688 100644 --- a/src/libexpr/primops/derivation.nix +++ b/src/libexpr/primops/derivation.nix @@ -26,27 +26,34 @@ Note that `derivation` is very bare-bones, and provides almost no commands during the build. Most likely, you'll want to use functions like `stdenv.mkDerivation` in Nixpkgs to set up a basic environment. */ -drvAttrs @ { outputs ? [ "out" ], ... }: +drvAttrs@{ + outputs ? [ "out" ], + ... +}: let strict = derivationStrict drvAttrs; - commonAttrs = drvAttrs // (builtins.listToAttrs outputsList) // - { all = map (x: x.value) outputsList; + commonAttrs = + drvAttrs + // (builtins.listToAttrs outputsList) + // { + all = map (x: x.value) outputsList; inherit drvAttrs; }; - outputToAttrListElement = outputName: - { name = outputName; - value = commonAttrs // { - outPath = builtins.getAttr outputName strict; - drvPath = strict.drvPath; - type = "derivation"; - inherit outputName; - }; + outputToAttrListElement = outputName: { + name = outputName; + value = commonAttrs // { + outPath = builtins.getAttr outputName strict; + drvPath = strict.drvPath; + type = "derivation"; + inherit outputName; }; + }; outputsList = map outputToAttrListElement outputs; -in (builtins.head outputsList).value +in +(builtins.head outputsList).value diff --git a/src/libfetchers-tests/package.nix b/src/libfetchers-tests/package.nix index 5336672a222..f2680e9b3c1 100644 --- a/src/libfetchers-tests/package.nix +++ b/src/libfetchers-tests/package.nix @@ -1,19 +1,20 @@ -{ lib -, buildPackages -, stdenv -, mkMesonExecutable +{ + lib, + buildPackages, + stdenv, + mkMesonExecutable, -, nix-fetchers -, nix-store-test-support + nix-fetchers, + nix-store-test-support, -, rapidcheck -, gtest -, runCommand + rapidcheck, + gtest, + runCommand, -# Configuration Options + # Configuration Options -, version -, resolvePath + version, + resolvePath, }: let @@ -56,16 +57,22 @@ mkMesonExecutable (finalAttrs: { passthru = { tests = { - run = runCommand "${finalAttrs.pname}-run" { - meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; - } (lib.optionalString stdenv.hostPlatform.isWindows '' - export HOME="$PWD/home-dir" - mkdir -p "$HOME" - '' + '' - export _NIX_TEST_UNIT_DATA=${resolvePath ./data} - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - ''); + run = + runCommand "${finalAttrs.pname}-run" + { + meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; + } + ( + lib.optionalString stdenv.hostPlatform.isWindows '' + export HOME="$PWD/home-dir" + mkdir -p "$HOME" + '' + + '' + export _NIX_TEST_UNIT_DATA=${resolvePath ./data} + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + touch $out + '' + ); }; }; diff --git a/src/libfetchers/package.nix b/src/libfetchers/package.nix index d4ca1855503..b0aecd04979 100644 --- a/src/libfetchers/package.nix +++ b/src/libfetchers/package.nix @@ -1,14 +1,15 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, nix-util -, nix-store -, nlohmann_json -, libgit2 + nix-util, + nix-store, + nlohmann_json, + libgit2, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libflake-c/package.nix b/src/libflake-c/package.nix index dcd6c496609..f0615a42798 100644 --- a/src/libflake-c/package.nix +++ b/src/libflake-c/package.nix @@ -1,13 +1,14 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, nix-store-c -, nix-expr-c -, nix-flake + nix-store-c, + nix-expr-c, + nix-flake, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libflake-tests/package.nix b/src/libflake-tests/package.nix index 51b68ad581f..f9d9b0bc0c6 100644 --- a/src/libflake-tests/package.nix +++ b/src/libflake-tests/package.nix @@ -1,20 +1,21 @@ -{ lib -, buildPackages -, stdenv -, mkMesonExecutable +{ + lib, + buildPackages, + stdenv, + mkMesonExecutable, -, nix-flake -, nix-flake-c -, nix-expr-test-support + nix-flake, + nix-flake-c, + nix-expr-test-support, -, rapidcheck -, gtest -, runCommand + rapidcheck, + gtest, + runCommand, -# Configuration Options + # Configuration Options -, version -, resolvePath + version, + resolvePath, }: let @@ -58,17 +59,23 @@ mkMesonExecutable (finalAttrs: { passthru = { tests = { - run = runCommand "${finalAttrs.pname}-run" { - meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; - } (lib.optionalString stdenv.hostPlatform.isWindows '' - export HOME="$PWD/home-dir" - mkdir -p "$HOME" - '' + '' - export _NIX_TEST_UNIT_DATA=${resolvePath ./data} - export NIX_CONFIG="extra-experimental-features = flakes" - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - ''); + run = + runCommand "${finalAttrs.pname}-run" + { + meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; + } + ( + lib.optionalString stdenv.hostPlatform.isWindows '' + export HOME="$PWD/home-dir" + mkdir -p "$HOME" + '' + + '' + export _NIX_TEST_UNIT_DATA=${resolvePath ./data} + export NIX_CONFIG="extra-experimental-features = flakes" + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + touch $out + '' + ); }; }; diff --git a/src/libflake/package.nix b/src/libflake/package.nix index 3fc96a20e58..ebd38e140d3 100644 --- a/src/libflake/package.nix +++ b/src/libflake/package.nix @@ -1,15 +1,16 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, nix-util -, nix-store -, nix-fetchers -, nix-expr -, nlohmann_json + nix-util, + nix-store, + nix-fetchers, + nix-expr, + nlohmann_json, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libmain-c/package.nix b/src/libmain-c/package.nix index b96901bb46b..cf710e03b0d 100644 --- a/src/libmain-c/package.nix +++ b/src/libmain-c/package.nix @@ -1,14 +1,15 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, nix-util-c -, nix-store -, nix-store-c -, nix-main + nix-util-c, + nix-store, + nix-store-c, + nix-main, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libmain/package.nix b/src/libmain/package.nix index 9a5b9e8c2df..046b505dfd4 100644 --- a/src/libmain/package.nix +++ b/src/libmain/package.nix @@ -1,14 +1,15 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, openssl + openssl, -, nix-util -, nix-store + nix-util, + nix-store, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libstore-c/package.nix b/src/libstore-c/package.nix index c2413c3890d..89abeaab870 100644 --- a/src/libstore-c/package.nix +++ b/src/libstore-c/package.nix @@ -1,12 +1,13 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, nix-util-c -, nix-store + nix-util-c, + nix-store, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libstore-test-support/package.nix b/src/libstore-test-support/package.nix index 5d3f41b3e8b..7cc29795c19 100644 --- a/src/libstore-test-support/package.nix +++ b/src/libstore-test-support/package.nix @@ -1,15 +1,16 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, nix-util-test-support -, nix-store -, nix-store-c + nix-util-test-support, + nix-store, + nix-store-c, -, rapidcheck + rapidcheck, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index 3acf4e25c2c..670386c4a6f 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -1,21 +1,22 @@ -{ lib -, buildPackages -, stdenv -, mkMesonExecutable +{ + lib, + buildPackages, + stdenv, + mkMesonExecutable, -, nix-store -, nix-store-c -, nix-store-test-support -, sqlite + nix-store, + nix-store-c, + nix-store-test-support, + sqlite, -, rapidcheck -, gtest -, runCommand + rapidcheck, + gtest, + runCommand, -# Configuration Options + # Configuration Options -, version -, filesetToSource + version, + filesetToSource, }: let @@ -64,26 +65,33 @@ mkMesonExecutable (finalAttrs: { passthru = { tests = { - run = let - # Some data is shared with the functional tests: they create it, - # we consume it. - data = filesetToSource { - root = ../..; - fileset = lib.fileset.unions [ - ./data - ../../tests/functional/derivation - ]; - }; - in runCommand "${finalAttrs.pname}-run" { - meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; - } (lib.optionalString stdenv.hostPlatform.isWindows '' - export HOME="$PWD/home-dir" - mkdir -p "$HOME" - '' + '' - export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - ''); + run = + let + # Some data is shared with the functional tests: they create it, + # we consume it. + data = filesetToSource { + root = ../..; + fileset = lib.fileset.unions [ + ./data + ../../tests/functional/derivation + ]; + }; + in + runCommand "${finalAttrs.pname}-run" + { + meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; + } + ( + lib.optionalString stdenv.hostPlatform.isWindows '' + export HOME="$PWD/home-dir" + mkdir -p "$HOME" + '' + + '' + export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + touch $out + '' + ); }; }; diff --git a/src/libstore/package.nix b/src/libstore/package.nix index 4fbaea4acc5..c982b44f0b7 100644 --- a/src/libstore/package.nix +++ b/src/libstore/package.nix @@ -1,25 +1,26 @@ -{ lib -, stdenv -, mkMesonLibrary +{ + lib, + stdenv, + mkMesonLibrary, -, unixtools -, darwin + unixtools, + darwin, -, nix-util -, boost -, curl -, aws-sdk-cpp -, libseccomp -, nlohmann_json -, sqlite + nix-util, + boost, + curl, + aws-sdk-cpp, + libseccomp, + nlohmann_json, + sqlite, -, busybox-sandbox-shell ? null + busybox-sandbox-shell ? null, -# Configuration Options + # Configuration Options -, version + version, -, embeddedSandboxShell ? stdenv.hostPlatform.isStatic + embeddedSandboxShell ? stdenv.hostPlatform.isStatic, }: let @@ -48,19 +49,20 @@ mkMesonLibrary (finalAttrs: { (fileset.fileFilter (file: file.hasExt "sql") ./.) ]; - nativeBuildInputs = - lib.optional embeddedSandboxShell unixtools.hexdump; + nativeBuildInputs = lib.optional embeddedSandboxShell unixtools.hexdump; - buildInputs = [ - boost - curl - sqlite - ] ++ lib.optional stdenv.hostPlatform.isLinux libseccomp + buildInputs = + [ + boost + curl + sqlite + ] + ++ lib.optional stdenv.hostPlatform.isLinux libseccomp # There have been issues building these dependencies ++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox - ++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)) - aws-sdk-cpp - ; + ++ lib.optional ( + stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin) + ) aws-sdk-cpp; propagatedBuildInputs = [ nix-util @@ -75,12 +77,14 @@ mkMesonLibrary (finalAttrs: { echo ${version} > ../../.version ''; - mesonFlags = [ - (lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux) - (lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell) - ] ++ lib.optionals stdenv.hostPlatform.isLinux [ - (lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox") - ]; + mesonFlags = + [ + (lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux) + (lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell) + ] + ++ lib.optionals stdenv.hostPlatform.isLinux [ + (lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox") + ]; env = { # Needed for Meson to find Boost. diff --git a/src/libutil-c/package.nix b/src/libutil-c/package.nix index f80e0b7f0a2..72f57d6f9c6 100644 --- a/src/libutil-c/package.nix +++ b/src/libutil-c/package.nix @@ -1,11 +1,12 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, nix-util + nix-util, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libutil-test-support/package.nix b/src/libutil-test-support/package.nix index a8a239717a6..33cd5217def 100644 --- a/src/libutil-test-support/package.nix +++ b/src/libutil-test-support/package.nix @@ -1,14 +1,15 @@ -{ lib -, mkMesonLibrary +{ + lib, + mkMesonLibrary, -, nix-util -, nix-util-c + nix-util, + nix-util-c, -, rapidcheck + rapidcheck, -# Configuration Options + # Configuration Options -, version + version, }: let diff --git a/src/libutil-tests/package.nix b/src/libutil-tests/package.nix index 28769e11522..d89c544539e 100644 --- a/src/libutil-tests/package.nix +++ b/src/libutil-tests/package.nix @@ -1,19 +1,20 @@ -{ lib -, buildPackages -, stdenv -, mkMesonExecutable +{ + lib, + buildPackages, + stdenv, + mkMesonExecutable, -, nix-util -, nix-util-c -, nix-util-test-support + nix-util, + nix-util-c, + nix-util-test-support, -, rapidcheck -, gtest -, runCommand + rapidcheck, + gtest, + runCommand, -# Configuration Options + # Configuration Options -, version + version, }: let @@ -57,16 +58,22 @@ mkMesonExecutable (finalAttrs: { passthru = { tests = { - run = runCommand "${finalAttrs.pname}-run" { - meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; - } (lib.optionalString stdenv.hostPlatform.isWindows '' - export HOME="$PWD/home-dir" - mkdir -p "$HOME" - '' + '' - export _NIX_TEST_UNIT_DATA=${./data} - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - ''); + run = + runCommand "${finalAttrs.pname}-run" + { + meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; + } + ( + lib.optionalString stdenv.hostPlatform.isWindows '' + export HOME="$PWD/home-dir" + mkdir -p "$HOME" + '' + + '' + export _NIX_TEST_UNIT_DATA=${./data} + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + touch $out + '' + ); }; }; diff --git a/src/libutil/package.nix b/src/libutil/package.nix index 679872a75c5..586119a6e5d 100644 --- a/src/libutil/package.nix +++ b/src/libutil/package.nix @@ -1,18 +1,19 @@ -{ lib -, stdenv -, mkMesonLibrary +{ + lib, + stdenv, + mkMesonLibrary, -, boost -, brotli -, libarchive -, libcpuid -, libsodium -, nlohmann_json -, openssl + boost, + brotli, + libarchive, + libcpuid, + libsodium, + nlohmann_json, + openssl, -# Configuration Options + # Configuration Options -, version + version, }: let @@ -43,8 +44,7 @@ mkMesonLibrary (finalAttrs: { brotli libsodium openssl - ] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid - ; + ] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid; propagatedBuildInputs = [ boost diff --git a/src/nix-channel/unpack-channel.nix b/src/nix-channel/unpack-channel.nix index 10515bc8b91..84e324a4d89 100644 --- a/src/nix-channel/unpack-channel.nix +++ b/src/nix-channel/unpack-channel.nix @@ -1,4 +1,8 @@ -{ name, channelName, src }: +{ + name, + channelName, + src, +}: derivation { builder = "builtin:unpack-channel"; diff --git a/src/nix-env/buildenv.nix b/src/nix-env/buildenv.nix index 0bac4c44b48..c8955a94e99 100644 --- a/src/nix-env/buildenv.nix +++ b/src/nix-env/buildenv.nix @@ -8,13 +8,15 @@ derivation { inherit manifest; # !!! grmbl, need structured data for passing this in a clean way. - derivations = - map (d: - [ (d.meta.active or "true") - (d.meta.priority or 5) - (builtins.length d.outputs) - ] ++ map (output: builtins.getAttr output d) d.outputs) - derivations; + derivations = map ( + d: + [ + (d.meta.active or "true") + (d.meta.priority or 5) + (builtins.length d.outputs) + ] + ++ map (output: builtins.getAttr output d) d.outputs + ) derivations; # Building user environments remotely just causes huge amounts of # network traffic, so don't do that. diff --git a/src/nix/package.nix b/src/nix/package.nix index 171621af917..89c52c3bb05 100644 --- a/src/nix/package.nix +++ b/src/nix/package.nix @@ -1,14 +1,15 @@ -{ lib -, mkMesonExecutable +{ + lib, + mkMesonExecutable, -, nix-store -, nix-expr -, nix-main -, nix-cmd + nix-store, + nix-expr, + nix-main, + nix-cmd, -# Configuration Options + # Configuration Options -, version + version, }: let @@ -20,64 +21,67 @@ mkMesonExecutable (finalAttrs: { inherit version; workDir = ./.; - fileset = fileset.unions ([ - ../../nix-meson-build-support - ./nix-meson-build-support - ../../.version - ./.version - ./meson.build - ./meson.options + fileset = fileset.unions ( + [ + ../../nix-meson-build-support + ./nix-meson-build-support + ../../.version + ./.version + ./meson.build + ./meson.options - # Symbolic links to other dirs - ## exes - ./build-remote - ./doc - ./nix-build - ./nix-channel - ./nix-collect-garbage - ./nix-copy-closure - ./nix-env - ./nix-instantiate - ./nix-store - ## dirs - ./scripts - ../../scripts - ./misc - ../../misc + # Symbolic links to other dirs + ## exes + ./build-remote + ./doc + ./nix-build + ./nix-channel + ./nix-collect-garbage + ./nix-copy-closure + ./nix-env + ./nix-instantiate + ./nix-store + ## dirs + ./scripts + ../../scripts + ./misc + ../../misc - # Doc nix files for --help - ../../doc/manual/generate-manpage.nix - ../../doc/manual/utils.nix - ../../doc/manual/generate-settings.nix - ../../doc/manual/generate-store-info.nix + # Doc nix files for --help + ../../doc/manual/generate-manpage.nix + ../../doc/manual/utils.nix + ../../doc/manual/generate-settings.nix + ../../doc/manual/generate-store-info.nix - # Other files to be included as string literals - ../nix-channel/unpack-channel.nix - ../nix-env/buildenv.nix - ./get-env.sh - ./help-stores.md - ../../doc/manual/source/store/types/index.md.in - ./profiles.md - ../../doc/manual/source/command-ref/files/profiles.md + # Other files to be included as string literals + ../nix-channel/unpack-channel.nix + ../nix-env/buildenv.nix + ./get-env.sh + ./help-stores.md + ../../doc/manual/source/store/types/index.md.in + ./profiles.md + ../../doc/manual/source/command-ref/files/profiles.md - # Files - ] ++ lib.concatMap - (dir: [ - (fileset.fileFilter (file: file.hasExt "cc") dir) - (fileset.fileFilter (file: file.hasExt "hh") dir) - (fileset.fileFilter (file: file.hasExt "md") dir) - ]) - [ - ./. - ../build-remote - ../nix-build - ../nix-channel - ../nix-collect-garbage - ../nix-copy-closure - ../nix-env - ../nix-instantiate - ../nix-store + # Files ] + ++ + lib.concatMap + (dir: [ + (fileset.fileFilter (file: file.hasExt "cc") dir) + (fileset.fileFilter (file: file.hasExt "hh") dir) + (fileset.fileFilter (file: file.hasExt "md") dir) + ]) + [ + ./. + ../build-remote + ../nix-build + ../nix-channel + ../nix-collect-garbage + ../nix-copy-closure + ../nix-env + ../nix-instantiate + ../nix-store + ] ); buildInputs = [ diff --git a/src/perl/package.nix b/src/perl/package.nix index 5ee0df13c9d..d95d13aa921 100644 --- a/src/perl/package.nix +++ b/src/perl/package.nix @@ -1,76 +1,82 @@ -{ lib -, stdenv -, mkMesonDerivation -, pkg-config -, perl -, perlPackages -, nix-store -, version -, curl -, bzip2 -, libsodium +{ + lib, + stdenv, + mkMesonDerivation, + pkg-config, + perl, + perlPackages, + nix-store, + version, + curl, + bzip2, + libsodium, }: let inherit (lib) fileset; in -perl.pkgs.toPerlModule (mkMesonDerivation (finalAttrs: { - pname = "nix-perl"; - inherit version; +perl.pkgs.toPerlModule ( + mkMesonDerivation (finalAttrs: { + pname = "nix-perl"; + inherit version; - workDir = ./.; - fileset = fileset.unions ([ - ./.version - ../../.version - ./MANIFEST - ./lib - ./meson.build - ./meson.options - ] ++ lib.optionals finalAttrs.doCheck [ - ./.yath.rc.in - ./t - ]); + workDir = ./.; + fileset = fileset.unions ( + [ + ./.version + ../../.version + ./MANIFEST + ./lib + ./meson.build + ./meson.options + ] + ++ lib.optionals finalAttrs.doCheck [ + ./.yath.rc.in + ./t + ] + ); - nativeBuildInputs = [ - pkg-config - perl - curl - ]; + nativeBuildInputs = [ + pkg-config + perl + curl + ]; - buildInputs = [ - nix-store - ] ++ finalAttrs.passthru.externalBuildInputs; + buildInputs = [ + nix-store + ] ++ finalAttrs.passthru.externalBuildInputs; - # Hack for sake of the dev shell - passthru.externalBuildInputs = [ - bzip2 - libsodium - ]; + # Hack for sake of the dev shell + passthru.externalBuildInputs = [ + bzip2 + libsodium + ]; - # `perlPackages.Test2Harness` is marked broken for Darwin - doCheck = !stdenv.isDarwin; + # `perlPackages.Test2Harness` is marked broken for Darwin + doCheck = !stdenv.isDarwin; - nativeCheckInputs = [ - perlPackages.Test2Harness - ]; + nativeCheckInputs = [ + perlPackages.Test2Harness + ]; - preConfigure = - # "Inline" .version so its not a symlink, and includes the suffix - '' - chmod u+w .version - echo ${finalAttrs.version} > .version - ''; + preConfigure = + # "Inline" .version so its not a symlink, and includes the suffix + '' + chmod u+w .version + echo ${finalAttrs.version} > .version + ''; - mesonFlags = [ - (lib.mesonOption "dbi_path" "${perlPackages.DBI}/${perl.libPrefix}") - (lib.mesonOption "dbd_sqlite_path" "${perlPackages.DBDSQLite}/${perl.libPrefix}") - (lib.mesonEnable "tests" finalAttrs.doCheck) - ]; + mesonFlags = [ + (lib.mesonOption "dbi_path" "${perlPackages.DBI}/${perl.libPrefix}") + (lib.mesonOption "dbd_sqlite_path" "${perlPackages.DBDSQLite}/${perl.libPrefix}") + (lib.mesonEnable "tests" finalAttrs.doCheck) + ]; - mesonCheckFlags = [ - "--print-errorlogs" - ]; + mesonCheckFlags = [ + "--print-errorlogs" + ]; - strictDeps = false; -})) + strictDeps = false; + }) +) diff --git a/tests/functional/big-derivation-attr.nix b/tests/functional/big-derivation-attr.nix index 35c1187f665..d370486d6c4 100644 --- a/tests/functional/big-derivation-attr.nix +++ b/tests/functional/big-derivation-attr.nix @@ -1,6 +1,25 @@ let sixteenBytes = "0123456789abcdef"; - times16 = s: builtins.concatStringsSep "" [s s s s s s s s s s s s s s s s]; + times16 = + s: + builtins.concatStringsSep "" [ + s + s + s + s + s + s + s + s + s + s + s + s + s + s + s + s + ]; exp = n: x: if n == 1 then x else times16 (exp (n - 1) x); sixteenMegabyte = exp 6 sixteenBytes; in diff --git a/tests/functional/build-hook-ca-fixed.nix b/tests/functional/build-hook-ca-fixed.nix index 0ce6d9b128b..3d2643c1321 100644 --- a/tests/functional/build-hook-ca-fixed.nix +++ b/tests/functional/build-hook-ca-fixed.nix @@ -4,24 +4,39 @@ with import ./config.nix; let - mkDerivation = args: - derivation ({ - inherit system; - builder = busybox; - args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' - if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; - eval "$buildCommand" - '')]; - outputHashMode = "recursive"; - outputHashAlgo = "sha256"; - } // removeAttrs args ["builder" "meta" "passthru"]) - // { meta = args.meta or {}; passthru = args.passthru or {}; }; + mkDerivation = + args: + derivation ( + { + inherit system; + builder = busybox; + args = [ + "sh" + "-e" + args.builder or (builtins.toFile "builder-${args.name}.sh" '' + if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; + eval "$buildCommand" + '') + ]; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + } + // removeAttrs args [ + "builder" + "meta" + "passthru" + ] + ) + // { + meta = args.meta or { }; + passthru = args.passthru or { }; + }; input1 = mkDerivation { shell = busybox; name = "build-remote-input-1"; buildCommand = "echo hi-input1; echo FOO > $out"; - requiredSystemFeatures = ["foo"]; + requiredSystemFeatures = [ "foo" ]; outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="; }; @@ -29,7 +44,7 @@ let shell = busybox; name = "build-remote-input-2"; buildCommand = "echo hi; echo BAR > $out"; - requiredSystemFeatures = ["bar"]; + requiredSystemFeatures = [ "bar" ]; outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q="; }; @@ -41,21 +56,20 @@ let read x < ${input2} echo $x BAZ > $out ''; - requiredSystemFeatures = ["baz"]; + requiredSystemFeatures = [ "baz" ]; outputHash = "sha256-daKAcPp/+BYMQsVi/YYMlCKoNAxCNDsaivwSHgQqD2s="; }; in - mkDerivation { - shell = busybox; - name = "build-remote"; - passthru = { inherit input1 input2 input3; }; - buildCommand = - '' - read x < ${input1} - read y < ${input3} - echo "$x $y" > $out - ''; - outputHash = "sha256-5SxbkUw6xe2l9TE1uwCvTtTDysD1vhRor38OtDF0LqQ="; - } +mkDerivation { + shell = busybox; + name = "build-remote"; + passthru = { inherit input1 input2 input3; }; + buildCommand = '' + read x < ${input1} + read y < ${input3} + echo "$x $y" > $out + ''; + outputHash = "sha256-5SxbkUw6xe2l9TE1uwCvTtTDysD1vhRor38OtDF0LqQ="; +} diff --git a/tests/functional/build-hook.nix b/tests/functional/build-hook.nix index 99a13aee483..45a2a84d6d4 100644 --- a/tests/functional/build-hook.nix +++ b/tests/functional/build-hook.nix @@ -1,39 +1,61 @@ -{ busybox, contentAddressed ? false }: +{ + busybox, + contentAddressed ? false, +}: with import ./config.nix; let - caArgs = if contentAddressed then { - outputHashMode = "recursive"; - outputHashAlgo = "sha256"; - __contentAddressed = true; - } else {}; - - mkDerivation = args: - derivation ({ - inherit system; - builder = busybox; - args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' - if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; - eval "$buildCommand" - '')]; - } // removeAttrs args ["builder" "meta" "passthru"] - // caArgs) - // { meta = args.meta or {}; passthru = args.passthru or {}; }; + caArgs = + if contentAddressed then + { + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + __contentAddressed = true; + } + else + { }; + + mkDerivation = + args: + derivation ( + { + inherit system; + builder = busybox; + args = [ + "sh" + "-e" + args.builder or (builtins.toFile "builder-${args.name}.sh" '' + if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; + eval "$buildCommand" + '') + ]; + } + // removeAttrs args [ + "builder" + "meta" + "passthru" + ] + // caArgs + ) + // { + meta = args.meta or { }; + passthru = args.passthru or { }; + }; input1 = mkDerivation { shell = busybox; name = "build-remote-input-1"; buildCommand = "echo hi-input1; echo FOO > $out"; - requiredSystemFeatures = ["foo"]; + requiredSystemFeatures = [ "foo" ]; }; input2 = mkDerivation { shell = busybox; name = "build-remote-input-2"; buildCommand = "echo hi; echo BAR > $out"; - requiredSystemFeatures = ["bar"]; + requiredSystemFeatures = [ "bar" ]; }; input3 = mkDerivation { @@ -44,19 +66,18 @@ let read x < ${input2} echo $x BAZ > $out ''; - requiredSystemFeatures = ["baz"]; + requiredSystemFeatures = [ "baz" ]; }; in - mkDerivation { - shell = busybox; - name = "build-remote"; - passthru = { inherit input1 input2 input3; }; - buildCommand = - '' - read x < ${input1} - read y < ${input3} - echo "$x $y" > $out - ''; - } +mkDerivation { + shell = busybox; + name = "build-remote"; + passthru = { inherit input1 input2 input3; }; + buildCommand = '' + read x < ${input1} + read y < ${input3} + echo "$x $y" > $out + ''; +} diff --git a/tests/functional/ca-shell.nix b/tests/functional/ca-shell.nix index 36e1d1526f3..69ce6b6f17e 100644 --- a/tests/functional/ca-shell.nix +++ b/tests/functional/ca-shell.nix @@ -1 +1,5 @@ -{ inNixShell ? false, ... }@args: import ./shell.nix (args // { contentAddressed = true; }) +{ + inNixShell ? false, + ... +}@args: +import ./shell.nix (args // { contentAddressed = true; }) diff --git a/tests/functional/ca/content-addressed.nix b/tests/functional/ca/content-addressed.nix index 2559c562f92..6ed9c185b62 100644 --- a/tests/functional/ca/content-addressed.nix +++ b/tests/functional/ca/content-addressed.nix @@ -1,13 +1,21 @@ with import ./config.nix; -let mkCADerivation = args: mkDerivation ({ - __contentAddressed = true; - outputHashMode = "recursive"; - outputHashAlgo = "sha256"; -} // args); +let + mkCADerivation = + args: + mkDerivation ( + { + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + } + // args + ); in -{ seed ? 0 }: +{ + seed ? 0, +}: # A simple content-addressed derivation. # The derivation can be arbitrarily modified by passing a different `seed`, # but the output will always be the same @@ -23,7 +31,11 @@ rec { }; rootCA = mkCADerivation { name = "rootCA"; - outputs = [ "out" "dev" "foo" ]; + outputs = [ + "out" + "dev" + "foo" + ]; buildCommand = '' echo "building a CA derivation" echo "The seed is ${toString seed}" diff --git a/tests/functional/ca/flake.nix b/tests/functional/ca/flake.nix index 332c92a6792..28a27c4b31d 100644 --- a/tests/functional/ca/flake.nix +++ b/tests/functional/ca/flake.nix @@ -1,3 +1,3 @@ { - outputs = { self }: import ./content-addressed.nix {}; + outputs = { self }: import ./content-addressed.nix { }; } diff --git a/tests/functional/ca/nondeterministic.nix b/tests/functional/ca/nondeterministic.nix index d6d099a3e0e..2af26f0ac2e 100644 --- a/tests/functional/ca/nondeterministic.nix +++ b/tests/functional/ca/nondeterministic.nix @@ -1,10 +1,16 @@ with import ./config.nix; -let mkCADerivation = args: mkDerivation ({ - __contentAddressed = true; - outputHashMode = "recursive"; - outputHashAlgo = "sha256"; -} // args); +let + mkCADerivation = + args: + mkDerivation ( + { + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + } + // args + ); in rec { @@ -15,13 +21,15 @@ rec { echo $(date) > $out/current-time ''; }; - dep = seed: mkCADerivation { - name = "dep"; - inherit seed; - buildCommand = '' - echo ${currentTime} > $out - ''; - }; + dep = + seed: + mkCADerivation { + name = "dep"; + inherit seed; + buildCommand = '' + echo ${currentTime} > $out + ''; + }; dep1 = dep 1; dep2 = dep 2; toplevel = mkCADerivation { @@ -32,4 +40,3 @@ rec { ''; }; } - diff --git a/tests/functional/ca/racy.nix b/tests/functional/ca/racy.nix index 555a1548464..cbc0e1643a7 100644 --- a/tests/functional/ca/racy.nix +++ b/tests/functional/ca/racy.nix @@ -1,7 +1,6 @@ # A derivation that would certainly fail if several builders tried to # build it at once. - with import ./config.nix; mkDerivation { diff --git a/tests/functional/check-refs.nix b/tests/functional/check-refs.nix index 89690e456c1..471d9575360 100644 --- a/tests/functional/check-refs.nix +++ b/tests/functional/check-refs.nix @@ -2,11 +2,16 @@ with import ./config.nix; rec { - dep = import ./dependencies.nix {}; + dep = import ./dependencies.nix { }; - makeTest = nr: args: mkDerivation ({ - name = "check-refs-" + toString nr; - } // args); + makeTest = + nr: args: + mkDerivation ( + { + name = "check-refs-" + toString nr; + } + // args + ); src = builtins.toFile "aux-ref" "bla bla"; @@ -22,31 +27,31 @@ rec { test3 = makeTest 3 { builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link"; - allowedReferences = []; + allowedReferences = [ ]; inherit dep; }; test4 = makeTest 4 { builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link"; - allowedReferences = [dep]; + allowedReferences = [ dep ]; inherit dep; }; test5 = makeTest 5 { builder = builtins.toFile "builder.sh" "mkdir $out"; - allowedReferences = []; + allowedReferences = [ ]; inherit dep; }; test6 = makeTest 6 { builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $out $out/link"; - allowedReferences = []; + allowedReferences = [ ]; inherit dep; }; test7 = makeTest 7 { builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $out $out/link"; - allowedReferences = ["out"]; + allowedReferences = [ "out" ]; inherit dep; }; @@ -58,19 +63,19 @@ rec { test9 = makeTest 9 { builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link"; inherit dep; - disallowedReferences = [dep]; + disallowedReferences = [ dep ]; }; test10 = makeTest 10 { builder = builtins.toFile "builder.sh" "mkdir $out; echo $test5; ln -s $dep $out/link"; inherit dep test5; - disallowedReferences = [test5]; + disallowedReferences = [ test5 ]; }; test11 = makeTest 11 { __structuredAttrs = true; unsafeDiscardReferences.out = true; - outputChecks.out.allowedReferences = []; + outputChecks.out.allowedReferences = [ ]; buildCommand = ''echo ${dep} > "''${outputs[out]}"''; }; diff --git a/tests/functional/check-reqs.nix b/tests/functional/check-reqs.nix index 41436cb48e0..3cca761846a 100644 --- a/tests/functional/check-reqs.nix +++ b/tests/functional/check-reqs.nix @@ -22,36 +22,48 @@ rec { ''; }; - makeTest = nr: allowreqs: mkDerivation { - name = "check-reqs-" + toString nr; - inherit deps; - builder = builtins.toFile "builder.sh" '' - mkdir $out - ln -s $deps $out/depdir1 - ''; - allowedRequisites = allowreqs; - }; + makeTest = + nr: allowreqs: + mkDerivation { + name = "check-reqs-" + toString nr; + inherit deps; + builder = builtins.toFile "builder.sh" '' + mkdir $out + ln -s $deps $out/depdir1 + ''; + allowedRequisites = allowreqs; + }; # When specifying all the requisites, the build succeeds. - test1 = makeTest 1 [ dep1 dep2 deps ]; + test1 = makeTest 1 [ + dep1 + dep2 + deps + ]; # But missing anything it fails. - test2 = makeTest 2 [ dep2 deps ]; - test3 = makeTest 3 [ dep1 deps ]; + test2 = makeTest 2 [ + dep2 + deps + ]; + test3 = makeTest 3 [ + dep1 + deps + ]; test4 = makeTest 4 [ deps ]; - test5 = makeTest 5 []; + test5 = makeTest 5 [ ]; test6 = mkDerivation { name = "check-reqs"; inherit deps; builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $deps $out/depdir1"; - disallowedRequisites = [dep1]; + disallowedRequisites = [ dep1 ]; }; test7 = mkDerivation { name = "check-reqs"; inherit deps; builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $deps $out/depdir1"; - disallowedRequisites = [test1]; + disallowedRequisites = [ test1 ]; }; } diff --git a/tests/functional/check.nix b/tests/functional/check.nix index ddab8eea9cb..d83c28ca2ee 100644 --- a/tests/functional/check.nix +++ b/tests/functional/check.nix @@ -1,4 +1,6 @@ -{checkBuildId ? 0}: +{ + checkBuildId ? 0, +}: with import ./config.nix; @@ -6,41 +8,38 @@ with import ./config.nix; nondeterministic = mkDerivation { inherit checkBuildId; name = "nondeterministic"; - buildCommand = - '' - mkdir $out - date +%s.%N > $out/date - echo "CHECK_TMPDIR=$TMPDIR" - echo "checkBuildId=$checkBuildId" - echo "$checkBuildId" > $TMPDIR/checkBuildId - ''; + buildCommand = '' + mkdir $out + date +%s.%N > $out/date + echo "CHECK_TMPDIR=$TMPDIR" + echo "checkBuildId=$checkBuildId" + echo "$checkBuildId" > $TMPDIR/checkBuildId + ''; }; deterministic = mkDerivation { inherit checkBuildId; name = "deterministic"; - buildCommand = - '' - mkdir $out - echo date > $out/date - echo "CHECK_TMPDIR=$TMPDIR" - echo "checkBuildId=$checkBuildId" - echo "$checkBuildId" > $TMPDIR/checkBuildId - ''; + buildCommand = '' + mkdir $out + echo date > $out/date + echo "CHECK_TMPDIR=$TMPDIR" + echo "checkBuildId=$checkBuildId" + echo "$checkBuildId" > $TMPDIR/checkBuildId + ''; }; failed = mkDerivation { inherit checkBuildId; name = "failed"; - buildCommand = - '' - mkdir $out - echo date > $out/date - echo "CHECK_TMPDIR=$TMPDIR" - echo "checkBuildId=$checkBuildId" - echo "$checkBuildId" > $TMPDIR/checkBuildId - false - ''; + buildCommand = '' + mkdir $out + echo date > $out/date + echo "CHECK_TMPDIR=$TMPDIR" + echo "checkBuildId=$checkBuildId" + echo "$checkBuildId" > $TMPDIR/checkBuildId + false + ''; }; hashmismatch = import { diff --git a/tests/functional/dependencies.nix b/tests/functional/dependencies.nix index 4ff29227fd3..570ea743135 100644 --- a/tests/functional/dependencies.nix +++ b/tests/functional/dependencies.nix @@ -1,4 +1,6 @@ -{ hashInvalidator ? "" }: +{ + hashInvalidator ? "", +}: with import ./config.nix; let diff --git a/tests/functional/derivation/advanced-attributes-defaults.nix b/tests/functional/derivation/advanced-attributes-defaults.nix index 51a8d0e7e1a..d466003b00d 100644 --- a/tests/functional/derivation/advanced-attributes-defaults.nix +++ b/tests/functional/derivation/advanced-attributes-defaults.nix @@ -2,5 +2,8 @@ derivation { name = "advanced-attributes-defaults"; system = "my-system"; builder = "/bin/bash"; - args = [ "-c" "echo hello > $out" ]; + args = [ + "-c" + "echo hello > $out" + ]; } diff --git a/tests/functional/derivation/advanced-attributes-structured-attrs-defaults.nix b/tests/functional/derivation/advanced-attributes-structured-attrs-defaults.nix index 0c13a76911f..3c6ad4900d6 100644 --- a/tests/functional/derivation/advanced-attributes-structured-attrs-defaults.nix +++ b/tests/functional/derivation/advanced-attributes-structured-attrs-defaults.nix @@ -2,7 +2,13 @@ derivation { name = "advanced-attributes-structured-attrs-defaults"; system = "my-system"; builder = "/bin/bash"; - args = [ "-c" "echo hello > $out" ]; - outputs = [ "out" "dev" ]; + args = [ + "-c" + "echo hello > $out" + ]; + outputs = [ + "out" + "dev" + ]; __structuredAttrs = true; } diff --git a/tests/functional/derivation/advanced-attributes-structured-attrs.nix b/tests/functional/derivation/advanced-attributes-structured-attrs.nix index 0044b65fd41..4c596be45e9 100644 --- a/tests/functional/derivation/advanced-attributes-structured-attrs.nix +++ b/tests/functional/derivation/advanced-attributes-structured-attrs.nix @@ -4,42 +4,58 @@ let inherit system; name = "foo"; builder = "/bin/bash"; - args = ["-c" "echo foo > $out"]; + args = [ + "-c" + "echo foo > $out" + ]; }; bar = derivation { inherit system; name = "bar"; builder = "/bin/bash"; - args = ["-c" "echo bar > $out"]; + args = [ + "-c" + "echo bar > $out" + ]; }; in derivation { inherit system; name = "advanced-attributes-structured-attrs"; builder = "/bin/bash"; - args = [ "-c" "echo hello > $out" ]; + args = [ + "-c" + "echo hello > $out" + ]; __sandboxProfile = "sandcastle"; __noChroot = true; - __impureHostDeps = ["/usr/bin/ditto"]; - impureEnvVars = ["UNICORN"]; + __impureHostDeps = [ "/usr/bin/ditto" ]; + impureEnvVars = [ "UNICORN" ]; __darwinAllowLocalNetworking = true; - outputs = [ "out" "bin" "dev" ]; + outputs = [ + "out" + "bin" + "dev" + ]; __structuredAttrs = true; outputChecks = { out = { - allowedReferences = [foo]; - allowedRequisites = [foo]; + allowedReferences = [ foo ]; + allowedRequisites = [ foo ]; }; bin = { - disallowedReferences = [bar]; - disallowedRequisites = [bar]; + disallowedReferences = [ bar ]; + disallowedRequisites = [ bar ]; }; dev = { maxSize = 789; maxClosureSize = 5909; }; }; - requiredSystemFeatures = ["rainbow" "uid-range"]; + requiredSystemFeatures = [ + "rainbow" + "uid-range" + ]; preferLocalBuild = true; allowSubstitutes = false; } diff --git a/tests/functional/derivation/advanced-attributes.nix b/tests/functional/derivation/advanced-attributes.nix index ff680c5677f..7f365ce65e2 100644 --- a/tests/functional/derivation/advanced-attributes.nix +++ b/tests/functional/derivation/advanced-attributes.nix @@ -4,30 +4,42 @@ let inherit system; name = "foo"; builder = "/bin/bash"; - args = ["-c" "echo foo > $out"]; + args = [ + "-c" + "echo foo > $out" + ]; }; bar = derivation { inherit system; name = "bar"; builder = "/bin/bash"; - args = ["-c" "echo bar > $out"]; + args = [ + "-c" + "echo bar > $out" + ]; }; in derivation { inherit system; name = "advanced-attributes"; builder = "/bin/bash"; - args = [ "-c" "echo hello > $out" ]; + args = [ + "-c" + "echo hello > $out" + ]; __sandboxProfile = "sandcastle"; __noChroot = true; - __impureHostDeps = ["/usr/bin/ditto"]; - impureEnvVars = ["UNICORN"]; + __impureHostDeps = [ "/usr/bin/ditto" ]; + impureEnvVars = [ "UNICORN" ]; __darwinAllowLocalNetworking = true; - allowedReferences = [foo]; - allowedRequisites = [foo]; - disallowedReferences = [bar]; - disallowedRequisites = [bar]; - requiredSystemFeatures = ["rainbow" "uid-range"]; + allowedReferences = [ foo ]; + allowedRequisites = [ foo ]; + disallowedReferences = [ bar ]; + disallowedRequisites = [ bar ]; + requiredSystemFeatures = [ + "rainbow" + "uid-range" + ]; preferLocalBuild = true; allowSubstitutes = false; } diff --git a/tests/functional/dyn-drv/recursive-mod-json.nix b/tests/functional/dyn-drv/recursive-mod-json.nix index c6a24ca4f3b..2d46e4e2e02 100644 --- a/tests/functional/dyn-drv/recursive-mod-json.nix +++ b/tests/functional/dyn-drv/recursive-mod-json.nix @@ -1,6 +1,8 @@ with import ./config.nix; -let innerName = "foo"; in +let + innerName = "foo"; +in mkDerivation rec { name = "${innerName}.drv"; diff --git a/tests/functional/export-graph.nix b/tests/functional/export-graph.nix index 64fe36bd1ef..5078eec8319 100644 --- a/tests/functional/export-graph.nix +++ b/tests/functional/export-graph.nix @@ -2,28 +2,33 @@ with import ./config.nix; rec { - printRefs = - '' - echo $exportReferencesGraph - while read path; do - read drv - read nrRefs - echo "$path has $nrRefs references" - echo "$path" >> $out - for ((n = 0; n < $nrRefs; n++)); do read ref; echo "ref $ref"; test -e "$ref"; done - done < refs - ''; + printRefs = '' + echo $exportReferencesGraph + while read path; do + read drv + read nrRefs + echo "$path has $nrRefs references" + echo "$path" >> $out + for ((n = 0; n < $nrRefs; n++)); do read ref; echo "ref $ref"; test -e "$ref"; done + done < refs + ''; foo."bar.runtimeGraph" = mkDerivation { name = "dependencies"; builder = builtins.toFile "build-graph-builder" "${printRefs}"; - exportReferencesGraph = ["refs" (import ./dependencies.nix {})]; + exportReferencesGraph = [ + "refs" + (import ./dependencies.nix { }) + ]; }; foo."bar.buildGraph" = mkDerivation { name = "dependencies"; builder = builtins.toFile "build-graph-builder" "${printRefs}"; - exportReferencesGraph = ["refs" (import ./dependencies.nix {}).drvPath]; + exportReferencesGraph = [ + "refs" + (import ./dependencies.nix { }).drvPath + ]; }; } diff --git a/tests/functional/failing.nix b/tests/functional/failing.nix index d25e2d6b62b..8abae1856cf 100644 --- a/tests/functional/failing.nix +++ b/tests/functional/failing.nix @@ -2,16 +2,29 @@ with import ./config.nix; let - mkDerivation = args: - derivation ({ - inherit system; - builder = busybox; - args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' - if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; - eval "$buildCommand" - '')]; - } // removeAttrs args ["builder" "meta"]) - // { meta = args.meta or {}; }; + mkDerivation = + args: + derivation ( + { + inherit system; + builder = busybox; + args = [ + "sh" + "-e" + args.builder or (builtins.toFile "builder-${args.name}.sh" '' + if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; + eval "$buildCommand" + '') + ]; + } + // removeAttrs args [ + "builder" + "meta" + ] + ) + // { + meta = args.meta or { }; + }; in { diff --git a/tests/functional/filter-source.nix b/tests/functional/filter-source.nix index 9071636394a..7bad263f842 100644 --- a/tests/functional/filter-source.nix +++ b/tests/functional/filter-source.nix @@ -4,9 +4,12 @@ mkDerivation { name = "filter"; builder = builtins.toFile "builder" "ln -s $input $out"; input = - let filter = path: type: - type != "symlink" - && baseNameOf path != "foo" - && !((import ./lang/lib.nix).hasSuffix ".bak" (baseNameOf path)); - in builtins.filterSource filter ((builtins.getEnv "TEST_ROOT") + "/filterin"); + let + filter = + path: type: + type != "symlink" + && baseNameOf path != "foo" + && !((import ./lang/lib.nix).hasSuffix ".bak" (baseNameOf path)); + in + builtins.filterSource filter ((builtins.getEnv "TEST_ROOT") + "/filterin"); } diff --git a/tests/functional/fixed.nix b/tests/functional/fixed.nix index a920a21671f..4097a63741f 100644 --- a/tests/functional/fixed.nix +++ b/tests/functional/fixed.nix @@ -2,15 +2,20 @@ with import ./config.nix; rec { - f2 = dummy: builder: mode: algo: hash: mkDerivation { - name = "fixed"; - inherit builder; - outputHashMode = mode; - outputHashAlgo = algo; - outputHash = hash; - inherit dummy; - impureEnvVars = ["IMPURE_VAR1" "IMPURE_VAR2"]; - }; + f2 = + dummy: builder: mode: algo: hash: + mkDerivation { + name = "fixed"; + inherit builder; + outputHashMode = mode; + outputHashAlgo = algo; + outputHash = hash; + inherit dummy; + impureEnvVars = [ + "IMPURE_VAR1" + "IMPURE_VAR2" + ]; + }; f = f2 ""; @@ -37,7 +42,8 @@ rec { ]; sameAsAdd = - f ./fixed.builder2.sh "recursive" "sha256" "1ixr6yd3297ciyp9im522dfxpqbkhcw0pylkb2aab915278fqaik"; + f ./fixed.builder2.sh "recursive" "sha256" + "1ixr6yd3297ciyp9im522dfxpqbkhcw0pylkb2aab915278fqaik"; bad = [ (f ./fixed.builder1.sh "flat" "md5" "0ddd8be4b179a529afa5f2ffae4b9858") diff --git a/tests/functional/fod-failing.nix b/tests/functional/fod-failing.nix index 37c04fe12f8..0de676c1536 100644 --- a/tests/functional/fod-failing.nix +++ b/tests/functional/fod-failing.nix @@ -2,38 +2,34 @@ with import ./config.nix; rec { x1 = mkDerivation { name = "x1"; - builder = builtins.toFile "builder.sh" - '' - echo $name > $out - ''; + builder = builtins.toFile "builder.sh" '' + echo $name > $out + ''; outputHashMode = "recursive"; outputHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="; }; x2 = mkDerivation { name = "x2"; - builder = builtins.toFile "builder.sh" - '' - echo $name > $out - ''; + builder = builtins.toFile "builder.sh" '' + echo $name > $out + ''; outputHashMode = "recursive"; outputHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="; }; x3 = mkDerivation { name = "x3"; - builder = builtins.toFile "builder.sh" - '' - echo $name > $out - ''; + builder = builtins.toFile "builder.sh" '' + echo $name > $out + ''; outputHashMode = "recursive"; outputHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="; }; x4 = mkDerivation { name = "x4"; inherit x2 x3; - builder = builtins.toFile "builder.sh" - '' - echo $x2 $x3 - exit 1 - ''; + builder = builtins.toFile "builder.sh" '' + echo $x2 $x3 + exit 1 + ''; }; } diff --git a/tests/functional/gc-concurrent.nix b/tests/functional/gc-concurrent.nix index 0aba1f98307..d7483d88f12 100644 --- a/tests/functional/gc-concurrent.nix +++ b/tests/functional/gc-concurrent.nix @@ -1,6 +1,8 @@ with import ./config.nix; -{ lockFifo ? null }: +{ + lockFifo ? null, +}: rec { diff --git a/tests/functional/hash-check.nix b/tests/functional/hash-check.nix index 4a8e9b8a8df..7a48a620b79 100644 --- a/tests/functional/hash-check.nix +++ b/tests/functional/hash-check.nix @@ -4,14 +4,22 @@ let { name = "dependencies-input-1"; system = "i086-msdos"; builder = "/bar/sh"; - args = ["-e" "-x" ./dummy]; + args = [ + "-e" + "-x" + ./dummy + ]; }; input2 = derivation { name = "dependencies-input-2"; system = "i086-msdos"; builder = "/bar/sh"; - args = ["-e" "-x" ./dummy]; + args = [ + "-e" + "-x" + ./dummy + ]; outputHashMode = "recursive"; outputHashAlgo = "md5"; outputHash = "ffffffffffffffffffffffffffffffff"; @@ -21,9 +29,13 @@ let { name = "dependencies"; system = "i086-msdos"; builder = "/bar/sh"; - args = ["-e" "-x" (./dummy + "/FOOBAR/../.")]; + args = [ + "-e" + "-x" + (./dummy + "/FOOBAR/../.") + ]; input1 = input1 + "/."; inherit input2; }; -} \ No newline at end of file +} diff --git a/tests/functional/hermetic.nix b/tests/functional/hermetic.nix index d1dccdff3d5..a5071466474 100644 --- a/tests/functional/hermetic.nix +++ b/tests/functional/hermetic.nix @@ -1,31 +1,51 @@ -{ busybox -, seed -# If we want the final derivation output to have references to its -# dependencies. Some tests need/want this, other don't. -, withFinalRefs ? false +{ + busybox, + seed, + # If we want the final derivation output to have references to its + # dependencies. Some tests need/want this, other don't. + withFinalRefs ? false, }: with import ./config.nix; let contentAddressedByDefault = builtins.getEnv "NIX_TESTS_CA_BY_DEFAULT" == "1"; - caArgs = if contentAddressedByDefault then { - __contentAddressed = true; - outputHashMode = "recursive"; - outputHashAlgo = "sha256"; - } else {}; + caArgs = + if contentAddressedByDefault then + { + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + } + else + { }; - mkDerivation = args: - derivation ({ - inherit system; - builder = busybox; - args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' - if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; - eval "$buildCommand" - '')]; - } // removeAttrs args ["builder" "meta" "passthru"] - // caArgs) - // { meta = args.meta or {}; passthru = args.passthru or {}; }; + mkDerivation = + args: + derivation ( + { + inherit system; + builder = busybox; + args = [ + "sh" + "-e" + args.builder or (builtins.toFile "builder-${args.name}.sh" '' + if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; + eval "$buildCommand" + '') + ]; + } + // removeAttrs args [ + "builder" + "meta" + "passthru" + ] + // caArgs + ) + // { + meta = args.meta or { }; + passthru = args.passthru or { }; + }; input1 = mkDerivation { shell = busybox; @@ -51,14 +71,15 @@ let in - mkDerivation { - shell = busybox; - name = "hermetic"; - passthru = { inherit input1 input2 input3; }; - buildCommand = - '' - read x < ${input1} - read y < ${input3} - echo ${if (builtins.trace withFinalRefs withFinalRefs) then "${input1} ${input3}" else ""} "$x $y" > $out - ''; - } +mkDerivation { + shell = busybox; + name = "hermetic"; + passthru = { inherit input1 input2 input3; }; + buildCommand = '' + read x < ${input1} + read y < ${input3} + echo ${ + if (builtins.trace withFinalRefs withFinalRefs) then "${input1} ${input3}" else "" + } "$x $y" > $out + ''; +} diff --git a/tests/functional/ifd.nix b/tests/functional/ifd.nix index d0b9b54add0..b8c04f72cac 100644 --- a/tests/functional/ifd.nix +++ b/tests/functional/ifd.nix @@ -1,10 +1,8 @@ with import ./config.nix; -import ( - mkDerivation { - name = "foo"; - bla = import ./dependencies.nix {}; - buildCommand = " +import (mkDerivation { + name = "foo"; + bla = import ./dependencies.nix { }; + buildCommand = " echo \\\"hi\\\" > $out "; - } -) +}) diff --git a/tests/functional/import-from-derivation.nix b/tests/functional/import-from-derivation.nix index 770dd86cf73..600f448a6f9 100644 --- a/tests/functional/import-from-derivation.nix +++ b/tests/functional/import-from-derivation.nix @@ -3,10 +3,9 @@ with import ; rec { bar = mkDerivation { name = "bar"; - builder = builtins.toFile "builder.sh" - '' - echo 'builtins.add 123 456' > $out - ''; + builder = builtins.toFile "builder.sh" '' + echo 'builtins.add 123 456' > $out + ''; }; value = @@ -16,19 +15,17 @@ rec { result = mkDerivation { name = "foo"; - builder = builtins.toFile "builder.sh" - '' - echo -n FOO${toString value} > $out - ''; + builder = builtins.toFile "builder.sh" '' + echo -n FOO${toString value} > $out + ''; }; addPath = mkDerivation { name = "add-path"; src = builtins.filterSource (path: type: true) result; - builder = builtins.toFile "builder.sh" - '' - echo -n BLA$(cat $src) > $out - ''; + builder = builtins.toFile "builder.sh" '' + echo -n BLA$(cat $src) > $out + ''; }; step1 = mkDerivation { diff --git a/tests/functional/impure-derivations.nix b/tests/functional/impure-derivations.nix index 98547e6c1d6..806f20577d3 100644 --- a/tests/functional/impure-derivations.nix +++ b/tests/functional/impure-derivations.nix @@ -4,60 +4,58 @@ rec { impure = mkDerivation { name = "impure"; - outputs = [ "out" "stuff" ]; - buildCommand = - '' - echo impure - x=$(< $TEST_ROOT/counter) - mkdir $out $stuff - echo $x > $out/n - ln -s $out/n $stuff/bla - printf $((x + 1)) > $TEST_ROOT/counter - ''; + outputs = [ + "out" + "stuff" + ]; + buildCommand = '' + echo impure + x=$(< $TEST_ROOT/counter) + mkdir $out $stuff + echo $x > $out/n + ln -s $out/n $stuff/bla + printf $((x + 1)) > $TEST_ROOT/counter + ''; __impure = true; impureEnvVars = [ "TEST_ROOT" ]; }; impureOnImpure = mkDerivation { name = "impure-on-impure"; - buildCommand = - '' - echo impure-on-impure - x=$(< ${impure}/n) - mkdir $out - printf X$x > $out/n - ln -s ${impure.stuff} $out/symlink - ln -s $out $out/self - ''; + buildCommand = '' + echo impure-on-impure + x=$(< ${impure}/n) + mkdir $out + printf X$x > $out/n + ln -s ${impure.stuff} $out/symlink + ln -s $out $out/self + ''; __impure = true; }; # This is not allowed. inputAddressed = mkDerivation { name = "input-addressed"; - buildCommand = - '' - cat ${impure} > $out - ''; + buildCommand = '' + cat ${impure} > $out + ''; }; contentAddressed = mkDerivation { name = "content-addressed"; - buildCommand = - '' - echo content-addressed - x=$(< ${impureOnImpure}/n) - printf ''${x:0:1} > $out - ''; + buildCommand = '' + echo content-addressed + x=$(< ${impureOnImpure}/n) + printf ''${x:0:1} > $out + ''; outputHashMode = "recursive"; outputHash = "sha256-eBYxcgkuWuiqs4cKNgKwkb3vY/HR0vVsJnqe8itJGcQ="; }; inputAddressedAfterCA = mkDerivation { name = "input-addressed-after-ca"; - buildCommand = - '' - cat ${contentAddressed} > $out - ''; + buildCommand = '' + cat ${contentAddressed} > $out + ''; }; } diff --git a/tests/functional/lang-gc/issue-11141-gc-coroutine-test.nix b/tests/functional/lang-gc/issue-11141-gc-coroutine-test.nix index 4f311af75d7..6dae5c155dd 100644 --- a/tests/functional/lang-gc/issue-11141-gc-coroutine-test.nix +++ b/tests/functional/lang-gc/issue-11141-gc-coroutine-test.nix @@ -1,4 +1,3 @@ - # Run: # GC_INITIAL_HEAP_SIZE=$[1024 * 1024] NIX_SHOW_STATS=1 nix eval -f gc-coroutine-test.nix -vvvv @@ -11,55 +10,56 @@ let # Generate a tree of numbers, n deep, such that the numbers add up to (1 + salt) * 10^n. # The salting makes the numbers all different, increasing the likelihood of catching # any memory corruptions that might be caused by the GC or otherwise. - garbage = salt: n: - if n == 0 - then [(1 + salt)] - else [ - (garbage (10 * salt + 1) (n - 1)) - (garbage (10 * salt - 1) (n - 1)) - (garbage (10 * salt + 2) (n - 1)) - (garbage (10 * salt - 2) (n - 1)) - (garbage (10 * salt + 3) (n - 1)) - (garbage (10 * salt - 3) (n - 1)) - (garbage (10 * salt + 4) (n - 1)) - (garbage (10 * salt - 4) (n - 1)) - (garbage (10 * salt + 5) (n - 1)) - (garbage (10 * salt - 5) (n - 1)) - ]; + garbage = + salt: n: + if n == 0 then + [ (1 + salt) ] + else + [ + (garbage (10 * salt + 1) (n - 1)) + (garbage (10 * salt - 1) (n - 1)) + (garbage (10 * salt + 2) (n - 1)) + (garbage (10 * salt - 2) (n - 1)) + (garbage (10 * salt + 3) (n - 1)) + (garbage (10 * salt - 3) (n - 1)) + (garbage (10 * salt + 4) (n - 1)) + (garbage (10 * salt - 4) (n - 1)) + (garbage (10 * salt + 5) (n - 1)) + (garbage (10 * salt - 5) (n - 1)) + ]; - pow = base: n: - if n == 0 - then 1 - else base * (pow base (n - 1)); + pow = base: n: if n == 0 then 1 else base * (pow base (n - 1)); - sumNestedLists = l: - if isList l - then foldl' (a: b: a + sumNestedLists b) 0 l - else l; + sumNestedLists = l: if isList l then foldl' (a: b: a + sumNestedLists b) 0 l else l; in - assert sumNestedLists (garbage 0 3) == pow 10 3; - assert sumNestedLists (garbage 0 6) == pow 10 6; - builtins.foldl' - (a: b: - assert - "${ - builtins.path { - path = ./src; - filter = path: type: - # We're not doing common subexpression elimination, so this reallocates - # the fairly big tree over and over, producing a lot of garbage during - # source filtering, whose filter runs in a coroutine. - assert sumNestedLists (garbage 0 3) == pow 10 3; - true; - } - }" - == "${./src}"; +assert sumNestedLists (garbage 0 3) == pow 10 3; +assert sumNestedLists (garbage 0 6) == pow 10 6; +builtins.foldl' + ( + a: b: + assert + "${builtins.path { + path = ./src; + filter = + path: type: + # We're not doing common subexpression elimination, so this reallocates + # the fairly big tree over and over, producing a lot of garbage during + # source filtering, whose filter runs in a coroutine. + assert sumNestedLists (garbage 0 3) == pow 10 3; + true; + }}" == "${./src}"; - # These asserts don't seem necessary, as the lambda value get corrupted first - assert a.okay; - assert b.okay; - { okay = true; } - ) + # These asserts don't seem necessary, as the lambda value get corrupted first + assert a.okay; + assert b.okay; + { + okay = true; + } + ) + { okay = true; } + [ + { okay = true; } + { okay = true; } { okay = true; } - [ { okay = true; } { okay = true; } { okay = true; } ] + ] diff --git a/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix b/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix index dbde264dfae..a1c3461cf48 100644 --- a/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix +++ b/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix @@ -3,16 +3,23 @@ let name = "fail"; builder = "/bin/false"; system = "x86_64-linux"; - outputs = [ "out" "foo" ]; + outputs = [ + "out" + "foo" + ]; }; drv1 = derivation { name = "fail-2"; builder = "/bin/false"; system = "x86_64-linux"; - outputs = [ "out" "foo" ]; + outputs = [ + "out" + "foo" + ]; }; combo-path = "${drv0.drvPath}${drv1.drvPath}"; -in builtins.addDrvOutputDependencies combo-path +in +builtins.addDrvOutputDependencies combo-path diff --git a/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix b/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix index e379e1d9598..6aab61c4068 100644 --- a/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix +++ b/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix @@ -3,7 +3,11 @@ let name = "fail"; builder = "/bin/false"; system = "x86_64-linux"; - outputs = [ "out" "foo" ]; + outputs = [ + "out" + "foo" + ]; }; -in builtins.addDrvOutputDependencies drv.outPath +in +builtins.addDrvOutputDependencies drv.outPath diff --git a/tests/functional/lang/eval-fail-addErrorContext-example.nix b/tests/functional/lang/eval-fail-addErrorContext-example.nix index 996b2468849..96a9cef84e7 100644 --- a/tests/functional/lang/eval-fail-addErrorContext-example.nix +++ b/tests/functional/lang/eval-fail-addErrorContext-example.nix @@ -1,9 +1,9 @@ let - countDown = n: - if n == 0 - then throw "kaboom" + countDown = + n: + if n == 0 then + throw "kaboom" else - builtins.addErrorContext - "while counting down; n = ${toString n}" - ("x" + countDown (n - 1)); -in countDown 10 + builtins.addErrorContext "while counting down; n = ${toString n}" ("x" + countDown (n - 1)); +in +countDown 10 diff --git a/tests/functional/lang/eval-fail-assert-equal-attrs-names-2.nix b/tests/functional/lang/eval-fail-assert-equal-attrs-names-2.nix index 8e7ac9cf2be..4bce2645612 100644 --- a/tests/functional/lang/eval-fail-assert-equal-attrs-names-2.nix +++ b/tests/functional/lang/eval-fail-assert-equal-attrs-names-2.nix @@ -1,2 +1,8 @@ -assert { a = true; } == { a = true; b = true; }; +assert + { + a = true; + } == { + a = true; + b = true; + }; throw "unreachable" diff --git a/tests/functional/lang/eval-fail-assert-equal-attrs-names.nix b/tests/functional/lang/eval-fail-assert-equal-attrs-names.nix index e2f53a85ad6..f9956999fa4 100644 --- a/tests/functional/lang/eval-fail-assert-equal-attrs-names.nix +++ b/tests/functional/lang/eval-fail-assert-equal-attrs-names.nix @@ -1,2 +1,8 @@ -assert { a = true; b = true; } == { a = true; }; +assert + { + a = true; + b = true; + } == { + a = true; + }; throw "unreachable" diff --git a/tests/functional/lang/eval-fail-assert-equal-derivations-extra.nix b/tests/functional/lang/eval-fail-assert-equal-derivations-extra.nix index fd8bc3f26ca..14a782a7743 100644 --- a/tests/functional/lang/eval-fail-assert-equal-derivations-extra.nix +++ b/tests/functional/lang/eval-fail-assert-equal-derivations-extra.nix @@ -1,5 +1,14 @@ assert - { foo = { type = "derivation"; outPath = "/nix/store/0"; }; } - == - { foo = { type = "derivation"; outPath = "/nix/store/1"; devious = true; }; }; -throw "unreachable" \ No newline at end of file + { + foo = { + type = "derivation"; + outPath = "/nix/store/0"; + }; + } == { + foo = { + type = "derivation"; + outPath = "/nix/store/1"; + devious = true; + }; + }; +throw "unreachable" diff --git a/tests/functional/lang/eval-fail-assert-equal-derivations.nix b/tests/functional/lang/eval-fail-assert-equal-derivations.nix index c648eae374b..0f6748c58bf 100644 --- a/tests/functional/lang/eval-fail-assert-equal-derivations.nix +++ b/tests/functional/lang/eval-fail-assert-equal-derivations.nix @@ -1,5 +1,15 @@ assert - { foo = { type = "derivation"; outPath = "/nix/store/0"; ignored = abort "not ignored"; }; } - == - { foo = { type = "derivation"; outPath = "/nix/store/1"; ignored = abort "not ignored"; }; }; -throw "unreachable" \ No newline at end of file + { + foo = { + type = "derivation"; + outPath = "/nix/store/0"; + ignored = abort "not ignored"; + }; + } == { + foo = { + type = "derivation"; + outPath = "/nix/store/1"; + ignored = abort "not ignored"; + }; + }; +throw "unreachable" diff --git a/tests/functional/lang/eval-fail-assert-equal-function-direct.nix b/tests/functional/lang/eval-fail-assert-equal-function-direct.nix index 68e5e390823..cd15c4a36d8 100644 --- a/tests/functional/lang/eval-fail-assert-equal-function-direct.nix +++ b/tests/functional/lang/eval-fail-assert-equal-function-direct.nix @@ -1,7 +1,4 @@ # Note: functions in nested structures, e.g. attributes, may be optimized away by pointer identity optimization. # This only compares a direct comparison and makes no claims about functions in nested structures. -assert - (x: x) - == - (x: x); -abort "unreachable" \ No newline at end of file +assert (x: x) == (x: x); +abort "unreachable" diff --git a/tests/functional/lang/eval-fail-assert-equal-list-length.nix b/tests/functional/lang/eval-fail-assert-equal-list-length.nix index 6d40f4d8e83..bd74ccccd34 100644 --- a/tests/functional/lang/eval-fail-assert-equal-list-length.nix +++ b/tests/functional/lang/eval-fail-assert-equal-list-length.nix @@ -1,2 +1,6 @@ -assert [ 1 0 ] == [ 10 ]; -throw "unreachable" \ No newline at end of file +assert + [ + 1 + 0 + ] == [ 10 ]; +throw "unreachable" diff --git a/tests/functional/lang/eval-fail-assert-equal-paths.nix b/tests/functional/lang/eval-fail-assert-equal-paths.nix index ef0b6702466..647e891b8ac 100644 --- a/tests/functional/lang/eval-fail-assert-equal-paths.nix +++ b/tests/functional/lang/eval-fail-assert-equal-paths.nix @@ -1,2 +1,2 @@ assert ./foo == ./bar; -throw "unreachable" \ No newline at end of file +throw "unreachable" diff --git a/tests/functional/lang/eval-fail-assert-nested-bool.nix b/tests/functional/lang/eval-fail-assert-nested-bool.nix index 2285769839e..c75fe06106b 100644 --- a/tests/functional/lang/eval-fail-assert-nested-bool.nix +++ b/tests/functional/lang/eval-fail-assert-nested-bool.nix @@ -1,6 +1,3 @@ -assert - { a.b = [ { c.d = true; } ]; } - == - { a.b = [ { c.d = false; } ]; }; +assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; }; -abort "unreachable" \ No newline at end of file +abort "unreachable" diff --git a/tests/functional/lang/eval-fail-assert.nix b/tests/functional/lang/eval-fail-assert.nix index 3b7a1e8bf0c..7cb77504507 100644 --- a/tests/functional/lang/eval-fail-assert.nix +++ b/tests/functional/lang/eval-fail-assert.nix @@ -1,5 +1,8 @@ let { - x = arg: assert arg == "y"; 123; + x = + arg: + assert arg == "y"; + 123; body = x "x"; -} \ No newline at end of file +} diff --git a/tests/functional/lang/eval-fail-attr-name-type.nix b/tests/functional/lang/eval-fail-attr-name-type.nix index a0e76004a39..fb6ccdd41d5 100644 --- a/tests/functional/lang/eval-fail-attr-name-type.nix +++ b/tests/functional/lang/eval-fail-attr-name-type.nix @@ -1,7 +1,7 @@ let attrs = { - puppy.doggy = {}; + puppy.doggy = { }; }; key = 1; in - attrs.puppy.${key} +attrs.puppy.${key} diff --git a/tests/functional/lang/eval-fail-attrset-merge-drops-later-rec.nix b/tests/functional/lang/eval-fail-attrset-merge-drops-later-rec.nix index fdb314b9193..b6b56bf7d42 100644 --- a/tests/functional/lang/eval-fail-attrset-merge-drops-later-rec.nix +++ b/tests/functional/lang/eval-fail-attrset-merge-drops-later-rec.nix @@ -1 +1,8 @@ -{ a.b = 1; a = rec { c = d + 2; d = 3; }; }.c +{ + a.b = 1; + a = rec { + c = d + 2; + d = 3; + }; +} +.c diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.nix b/tests/functional/lang/eval-fail-bad-string-interpolation-4.nix index 457b5f06a88..e8349bbdff3 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-4.nix +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.nix @@ -1,6 +1,16 @@ let # Basically a "billion laughs" attack, but toned down to simulated `pkgs`. - ha = x: y: { a = x y; b = x y; c = x y; d = x y; e = x y; f = x y; g = x y; h = x y; j = x y; }; + ha = x: y: { + a = x y; + b = x y; + c = x y; + d = x y; + e = x y; + f = x y; + g = x y; + h = x y; + j = x y; + }; has = ha (ha (ha (ha (x: x)))) "ha"; # A large structure that has already been evaluated. pkgs = builtins.deepSeq has has; diff --git a/tests/functional/lang/eval-fail-dup-dynamic-attrs.nix b/tests/functional/lang/eval-fail-dup-dynamic-attrs.nix index 7ea17f6c878..93cceefa48e 100644 --- a/tests/functional/lang/eval-fail-dup-dynamic-attrs.nix +++ b/tests/functional/lang/eval-fail-dup-dynamic-attrs.nix @@ -1,4 +1,8 @@ { - set = { "${"" + "b"}" = 1; }; - set = { "${"b" + ""}" = 2; }; + set = { + "${"" + "b"}" = 1; + }; + set = { + "${"b" + ""}" = 2; + }; } diff --git a/tests/functional/lang/eval-fail-duplicate-traces.nix b/tests/functional/lang/eval-fail-duplicate-traces.nix index 17ce374ece7..90526f6d48c 100644 --- a/tests/functional/lang/eval-fail-duplicate-traces.nix +++ b/tests/functional/lang/eval-fail-duplicate-traces.nix @@ -1,9 +1,6 @@ # Check that we only omit duplicate stack traces when there's a bunch of them. # Here, there's only a couple duplicate entries, so we output them all. let - throwAfter = n: - if n > 0 - then throwAfter (n - 1) - else throw "Uh oh!"; + throwAfter = n: if n > 0 then throwAfter (n - 1) else throw "Uh oh!"; in - throwAfter 2 +throwAfter 2 diff --git a/tests/functional/lang/eval-fail-fetchurl-baseName-attrs-name.nix b/tests/functional/lang/eval-fail-fetchurl-baseName-attrs-name.nix index 5838055390d..dcaf7202b11 100644 --- a/tests/functional/lang/eval-fail-fetchurl-baseName-attrs-name.nix +++ b/tests/functional/lang/eval-fail-fetchurl-baseName-attrs-name.nix @@ -1 +1,4 @@ -builtins.fetchurl { url = "https://example.com/foo.tar.gz"; name = "~wobble~"; } +builtins.fetchurl { + url = "https://example.com/foo.tar.gz"; + name = "~wobble~"; +} diff --git a/tests/functional/lang/eval-fail-flake-ref-to-string-negative-integer.nix b/tests/functional/lang/eval-fail-flake-ref-to-string-negative-integer.nix index e0208eb2519..9cc9ef6295b 100644 --- a/tests/functional/lang/eval-fail-flake-ref-to-string-negative-integer.nix +++ b/tests/functional/lang/eval-fail-flake-ref-to-string-negative-integer.nix @@ -1,7 +1,12 @@ -let n = -1; in builtins.seq n (builtins.flakeRefToString { - type = "github"; - owner = "NixOS"; - repo = n; - ref = "23.05"; - dir = "lib"; -}) +let + n = -1; +in +builtins.seq n ( + builtins.flakeRefToString { + type = "github"; + owner = "NixOS"; + repo = n; + ref = "23.05"; + dir = "lib"; + } +) diff --git a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.nix b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.nix index 1620cc76eeb..f85486d441e 100644 --- a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.nix +++ b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.nix @@ -1,5 +1,5 @@ # Tests that the result of applying op is forced even if the value is never used -builtins.foldl' - (_: f: f null) - null - [ (_: throw "Not the final value, but is still forced!") (_: 23) ] +builtins.foldl' (_: f: f null) null [ + (_: throw "Not the final value, but is still forced!") + (_: 23) +] diff --git a/tests/functional/lang/eval-fail-hashfile-missing.nix b/tests/functional/lang/eval-fail-hashfile-missing.nix index ce098b82380..0f2872b7155 100644 --- a/tests/functional/lang/eval-fail-hashfile-missing.nix +++ b/tests/functional/lang/eval-fail-hashfile-missing.nix @@ -1,5 +1,16 @@ let - paths = [ ./this-file-is-definitely-not-there-7392097 "/and/neither/is/this/37293620" ]; + paths = [ + ./this-file-is-definitely-not-there-7392097 + "/and/neither/is/this/37293620" + ]; in - toString (builtins.concatLists (map (hash: map (builtins.hashFile hash) paths) ["md5" "sha1" "sha256" "sha512"])) - +toString ( + builtins.concatLists ( + map (hash: map (builtins.hashFile hash) paths) [ + "md5" + "sha1" + "sha256" + "sha512" + ] + ) +) diff --git a/tests/functional/lang/eval-fail-list.nix b/tests/functional/lang/eval-fail-list.nix index fa749f2f740..14eb4efa9f6 100644 --- a/tests/functional/lang/eval-fail-list.nix +++ b/tests/functional/lang/eval-fail-list.nix @@ -1 +1 @@ -8++1 +8 ++ 1 diff --git a/tests/functional/lang/eval-fail-missing-arg.nix b/tests/functional/lang/eval-fail-missing-arg.nix index c4be9797c53..9037aa40a54 100644 --- a/tests/functional/lang/eval-fail-missing-arg.nix +++ b/tests/functional/lang/eval-fail-missing-arg.nix @@ -1 +1,12 @@ -({x, y, z}: x + y + z) {x = "foo"; z = "bar";} +( + { + x, + y, + z, + }: + x + y + z +) + { + x = "foo"; + z = "bar"; + } diff --git a/tests/functional/lang/eval-fail-mutual-recursion.nix b/tests/functional/lang/eval-fail-mutual-recursion.nix index d090d3158a3..421e464dd86 100644 --- a/tests/functional/lang/eval-fail-mutual-recursion.nix +++ b/tests/functional/lang/eval-fail-mutual-recursion.nix @@ -19,18 +19,22 @@ # - a few frames of A (skip the rest) # - a few frames of B (skip the rest, _and_ skip the remaining frames of A) let - throwAfterB = recurse: n: - if n > 0 - then throwAfterB recurse (n - 1) - else if recurse - then throwAfterA false 10 - else throw "Uh oh!"; + throwAfterB = + recurse: n: + if n > 0 then + throwAfterB recurse (n - 1) + else if recurse then + throwAfterA false 10 + else + throw "Uh oh!"; - throwAfterA = recurse: n: - if n > 0 - then throwAfterA recurse (n - 1) - else if recurse - then throwAfterB true 10 - else throw "Uh oh!"; + throwAfterA = + recurse: n: + if n > 0 then + throwAfterA recurse (n - 1) + else if recurse then + throwAfterB true 10 + else + throw "Uh oh!"; in - throwAfterA true 10 +throwAfterA true 10 diff --git a/tests/functional/lang/eval-fail-nested-list-items.nix b/tests/functional/lang/eval-fail-nested-list-items.nix index af45b1dd49a..d0aa1b5d3b9 100644 --- a/tests/functional/lang/eval-fail-nested-list-items.nix +++ b/tests/functional/lang/eval-fail-nested-list-items.nix @@ -8,4 +8,27 @@ # # error: cannot coerce a list to a string: [ [ 1 2 3 4 5 6 7 8 ] [ 1 «4294967290 items elided» ] ] -"" + (let v = [ [ 1 2 3 4 5 6 7 8 ] [1 2 3 4]]; in builtins.deepSeq v v) +"" ++ ( + let + v = [ + [ + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + ] + [ + 1 + 2 + 3 + 4 + ] + ]; + in + builtins.deepSeq v v +) diff --git a/tests/functional/lang/eval-fail-not-throws.nix b/tests/functional/lang/eval-fail-not-throws.nix index a74ce4ebeea..2e024738b68 100644 --- a/tests/functional/lang/eval-fail-not-throws.nix +++ b/tests/functional/lang/eval-fail-not-throws.nix @@ -1 +1 @@ -! (throw "uh oh!") +!(throw "uh oh!") diff --git a/tests/functional/lang/eval-fail-overflowing-add.nix b/tests/functional/lang/eval-fail-overflowing-add.nix index 24258fc200e..9e1e8aa7571 100644 --- a/tests/functional/lang/eval-fail-overflowing-add.nix +++ b/tests/functional/lang/eval-fail-overflowing-add.nix @@ -1,4 +1,5 @@ let a = 9223372036854775807; b = 1; -in a + b +in +a + b diff --git a/tests/functional/lang/eval-fail-overflowing-div.nix b/tests/functional/lang/eval-fail-overflowing-div.nix index 44fbe9d7e31..e21b0b2e57d 100644 --- a/tests/functional/lang/eval-fail-overflowing-div.nix +++ b/tests/functional/lang/eval-fail-overflowing-div.nix @@ -4,4 +4,5 @@ let # of range intMin = -9223372036854775807 - 1; b = -1; -in builtins.seq intMin (builtins.seq b (intMin / b)) +in +builtins.seq intMin (builtins.seq b (intMin / b)) diff --git a/tests/functional/lang/eval-fail-overflowing-mul.nix b/tests/functional/lang/eval-fail-overflowing-mul.nix index 6081d9c7b14..95b1375bb01 100644 --- a/tests/functional/lang/eval-fail-overflowing-mul.nix +++ b/tests/functional/lang/eval-fail-overflowing-mul.nix @@ -1,3 +1,4 @@ let a = 4294967297; -in a * a * a +in +a * a * a diff --git a/tests/functional/lang/eval-fail-overflowing-sub.nix b/tests/functional/lang/eval-fail-overflowing-sub.nix index 229b8c6d264..4f0203a6da5 100644 --- a/tests/functional/lang/eval-fail-overflowing-sub.nix +++ b/tests/functional/lang/eval-fail-overflowing-sub.nix @@ -1,4 +1,5 @@ let a = -9223372036854775807; b = 2; -in a - b +in +a - b diff --git a/tests/functional/lang/eval-fail-recursion.nix b/tests/functional/lang/eval-fail-recursion.nix index 075b5ed066b..88718a6e507 100644 --- a/tests/functional/lang/eval-fail-recursion.nix +++ b/tests/functional/lang/eval-fail-recursion.nix @@ -1 +1,4 @@ -let a = {} // a; in a.foo +let + a = { } // a; +in +a.foo diff --git a/tests/functional/lang/eval-fail-remove.nix b/tests/functional/lang/eval-fail-remove.nix index 539e0eb0a6f..9de066abe73 100644 --- a/tests/functional/lang/eval-fail-remove.nix +++ b/tests/functional/lang/eval-fail-remove.nix @@ -1,5 +1,8 @@ let { - attrs = {x = 123; y = 456;}; + attrs = { + x = 123; + y = 456; + }; - body = (removeAttrs attrs ["x"]).x; -} \ No newline at end of file + body = (removeAttrs attrs [ "x" ]).x; +} diff --git a/tests/functional/lang/eval-fail-scope-5.nix b/tests/functional/lang/eval-fail-scope-5.nix index f89a65a99be..ef6f1bb640e 100644 --- a/tests/functional/lang/eval-fail-scope-5.nix +++ b/tests/functional/lang/eval-fail-scope-5.nix @@ -3,8 +3,13 @@ let { x = "a"; y = "b"; - f = {x ? y, y ? x}: x + y; - - body = f {}; + f = + { + x ? y, + y ? x, + }: + x + y; + + body = f { }; } diff --git a/tests/functional/lang/eval-fail-undeclared-arg.nix b/tests/functional/lang/eval-fail-undeclared-arg.nix index cafdf163627..aca4511bbff 100644 --- a/tests/functional/lang/eval-fail-undeclared-arg.nix +++ b/tests/functional/lang/eval-fail-undeclared-arg.nix @@ -1 +1,5 @@ -({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";} +({ x, z }: x + z) { + x = "foo"; + y = "bla"; + z = "bar"; +} diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.nix b/tests/functional/lang/eval-fail-using-set-as-attr-name.nix index 48e071a41cf..96390e35f6a 100644 --- a/tests/functional/lang/eval-fail-using-set-as-attr-name.nix +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.nix @@ -1,5 +1,7 @@ let - attr = {foo = "bar";}; - key = {}; + attr = { + foo = "bar"; + }; + key = { }; in - attr.${key} +attr.${key} diff --git a/tests/functional/lang/eval-okay-any-all.nix b/tests/functional/lang/eval-okay-any-all.nix index a3f26ea2aa8..643d36cb704 100644 --- a/tests/functional/lang/eval-okay-any-all.nix +++ b/tests/functional/lang/eval-okay-any-all.nix @@ -1,11 +1,34 @@ with builtins; -[ (any (x: x == 1) []) - (any (x: x == 1) [2 3 4]) - (any (x: x == 1) [1 2 3 4]) - (any (x: x == 1) [4 3 2 1]) - (all (x: x == 1) []) - (all (x: x == 1) [1]) - (all (x: x == 1) [1 2 3]) - (all (x: x == 1) [1 1 1]) +[ + (any (x: x == 1) [ ]) + (any (x: x == 1) [ + 2 + 3 + 4 + ]) + (any (x: x == 1) [ + 1 + 2 + 3 + 4 + ]) + (any (x: x == 1) [ + 4 + 3 + 2 + 1 + ]) + (all (x: x == 1) [ ]) + (all (x: x == 1) [ 1 ]) + (all (x: x == 1) [ + 1 + 2 + 3 + ]) + (all (x: x == 1) [ + 1 + 1 + 1 + ]) ] diff --git a/tests/functional/lang/eval-okay-arithmetic.nix b/tests/functional/lang/eval-okay-arithmetic.nix index 7e9e6a0b666..8160b4d84ca 100644 --- a/tests/functional/lang/eval-okay-arithmetic.nix +++ b/tests/functional/lang/eval-okay-arithmetic.nix @@ -2,58 +2,59 @@ with import ./lib.nix; let { - /* Supposedly tail recursive version: + /* + Supposedly tail recursive version: - range_ = accum: first: last: - if first == last then ([first] ++ accum) - else range_ ([first] ++ accum) (builtins.add first 1) last; + range_ = accum: first: last: + if first == last then ([first] ++ accum) + else range_ ([first] ++ accum) (builtins.add first 1) last; - range = range_ []; + range = range_ []; */ x = 12; err = abort "urgh"; - body = sum - [ (sum (range 1 50)) - (123 + 456) - (0 + -10 + -(-11) + -x) - (10 - 7 - -2) - (10 - (6 - -1)) - (10 - 1 + 2) - (3 * 4 * 5) - (56088 / 123 / 2) - (3 + 4 * const 5 0 - 6 / id 2) - - (builtins.bitAnd 12 10) # 0b1100 & 0b1010 = 8 - (builtins.bitOr 12 10) # 0b1100 | 0b1010 = 14 - (builtins.bitXor 12 10) # 0b1100 ^ 0b1010 = 6 - - (if 3 < 7 then 1 else err) - (if 7 < 3 then err else 1) - (if 3 < 3 then err else 1) - - (if 3 <= 7 then 1 else err) - (if 7 <= 3 then err else 1) - (if 3 <= 3 then 1 else err) - - (if 3 > 7 then err else 1) - (if 7 > 3 then 1 else err) - (if 3 > 3 then err else 1) - - (if 3 >= 7 then err else 1) - (if 7 >= 3 then 1 else err) - (if 3 >= 3 then 1 else err) - - (if 2 > 1 == 1 < 2 then 1 else err) - (if 1 + 2 * 3 >= 7 then 1 else err) - (if 1 + 2 * 3 < 7 then err else 1) - - # Not integer, but so what. - (if "aa" < "ab" then 1 else err) - (if "aa" < "aa" then err else 1) - (if "foo" < "foobar" then 1 else err) - ]; + body = sum [ + (sum (range 1 50)) + (123 + 456) + (0 + -10 + -(-11) + -x) + (10 - 7 - -2) + (10 - (6 - -1)) + (10 - 1 + 2) + (3 * 4 * 5) + (56088 / 123 / 2) + (3 + 4 * const 5 0 - 6 / id 2) + + (builtins.bitAnd 12 10) # 0b1100 & 0b1010 = 8 + (builtins.bitOr 12 10) # 0b1100 | 0b1010 = 14 + (builtins.bitXor 12 10) # 0b1100 ^ 0b1010 = 6 + + (if 3 < 7 then 1 else err) + (if 7 < 3 then err else 1) + (if 3 < 3 then err else 1) + + (if 3 <= 7 then 1 else err) + (if 7 <= 3 then err else 1) + (if 3 <= 3 then 1 else err) + + (if 3 > 7 then err else 1) + (if 7 > 3 then 1 else err) + (if 3 > 3 then err else 1) + + (if 3 >= 7 then err else 1) + (if 7 >= 3 then 1 else err) + (if 3 >= 3 then 1 else err) + + (if 2 > 1 == 1 < 2 then 1 else err) + (if 1 + 2 * 3 >= 7 then 1 else err) + (if 1 + 2 * 3 < 7 then err else 1) + + # Not integer, but so what. + (if "aa" < "ab" then 1 else err) + (if "aa" < "aa" then err else 1) + (if "foo" < "foobar" then 1 else err) + ]; } diff --git a/tests/functional/lang/eval-okay-attrnames.nix b/tests/functional/lang/eval-okay-attrnames.nix index e5b26e9f2e3..085e78084b0 100644 --- a/tests/functional/lang/eval-okay-attrnames.nix +++ b/tests/functional/lang/eval-okay-attrnames.nix @@ -2,10 +2,21 @@ with import ./lib.nix; let - attrs = {y = "y"; x = "x"; foo = "foo";} // rec {x = "newx"; bar = x;}; + attrs = + { + y = "y"; + x = "x"; + foo = "foo"; + } + // rec { + x = "newx"; + bar = x; + }; names = builtins.attrNames attrs; values = map (name: builtins.getAttr name attrs) names; -in assert values == builtins.attrValues attrs; concat values +in +assert values == builtins.attrValues attrs; +concat values diff --git a/tests/functional/lang/eval-okay-attrs.nix b/tests/functional/lang/eval-okay-attrs.nix index 810b31a5da9..787b9a933cf 100644 --- a/tests/functional/lang/eval-okay-attrs.nix +++ b/tests/functional/lang/eval-okay-attrs.nix @@ -1,5 +1,20 @@ let { - as = { x = 123; y = 456; } // { z = 789; } // { z = 987; }; + as = + { + x = 123; + y = 456; + } + // { + z = 789; + } + // { + z = 987; + }; - body = if as ? a then as.a else assert as ? z; as.z; + body = + if as ? a then + as.a + else + assert as ? z; + as.z; } diff --git a/tests/functional/lang/eval-okay-attrs2.nix b/tests/functional/lang/eval-okay-attrs2.nix index 9e06b83ac1f..0896f9cf1e1 100644 --- a/tests/functional/lang/eval-okay-attrs2.nix +++ b/tests/functional/lang/eval-okay-attrs2.nix @@ -1,10 +1,23 @@ let { - as = { x = 123; y = 456; } // { z = 789; } // { z = 987; }; + as = + { + x = 123; + y = 456; + } + // { + z = 789; + } + // { + z = 987; + }; A = "a"; Z = "z"; - body = if builtins.hasAttr A as - then builtins.getAttr A as - else assert builtins.hasAttr Z as; builtins.getAttr Z as; + body = + if builtins.hasAttr A as then + builtins.getAttr A as + else + assert builtins.hasAttr Z as; + builtins.getAttr Z as; } diff --git a/tests/functional/lang/eval-okay-attrs3.nix b/tests/functional/lang/eval-okay-attrs3.nix index f29de11fe66..cab345337dd 100644 --- a/tests/functional/lang/eval-okay-attrs3.nix +++ b/tests/functional/lang/eval-okay-attrs3.nix @@ -1,22 +1,22 @@ let - config = - { - services.sshd.enable = true; - services.sshd.port = 22; - services.httpd.port = 80; - hostName = "itchy"; - a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z = "x"; - foo = { - a = "a"; - b.c = "c"; - }; + config = { + services.sshd.enable = true; + services.sshd.port = 22; + services.httpd.port = 80; + hostName = "itchy"; + a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z = "x"; + foo = { + a = "a"; + b.c = "c"; }; + }; in - if config.services.sshd.enable - then "foo ${toString config.services.sshd.port} ${toString config.services.httpd.port} ${config.hostName}" - + "${config.a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z}" - + "${config.foo.a}" - + "${config.foo.b.c}" - else "bar" +if config.services.sshd.enable then + "foo ${toString config.services.sshd.port} ${toString config.services.httpd.port} ${config.hostName}" + + "${config.a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z}" + + "${config.foo.a}" + + "${config.foo.b.c}" +else + "bar" diff --git a/tests/functional/lang/eval-okay-attrs4.nix b/tests/functional/lang/eval-okay-attrs4.nix index 43ec81210f3..3e43e4bae4f 100644 --- a/tests/functional/lang/eval-okay-attrs4.nix +++ b/tests/functional/lang/eval-okay-attrs4.nix @@ -1,7 +1,20 @@ let - as = { x.y.z = 123; a.b.c = 456; }; + as = { + x.y.z = 123; + a.b.c = 456; + }; bs = null; -in [ (as ? x) (as ? y) (as ? x.y.z) (as ? x.y.z.a) (as ? x.y.a) (as ? a.b.c) (bs ? x) (bs ? x.y.z) ] +in +[ + (as ? x) + (as ? y) + (as ? x.y.z) + (as ? x.y.z.a) + (as ? x.y.a) + (as ? a.b.c) + (bs ? x) + (bs ? x.y.z) +] diff --git a/tests/functional/lang/eval-okay-attrs6.nix b/tests/functional/lang/eval-okay-attrs6.nix index 2e5c85483be..76c94af785a 100644 --- a/tests/functional/lang/eval-okay-attrs6.nix +++ b/tests/functional/lang/eval-okay-attrs6.nix @@ -1,4 +1,6 @@ rec { "${"foo"}" = "bar"; - __overrides = { bar = "qux"; }; + __overrides = { + bar = "qux"; + }; } diff --git a/tests/functional/lang/eval-okay-autoargs.nix b/tests/functional/lang/eval-okay-autoargs.nix index 815f51b1d67..bc82c569b48 100644 --- a/tests/functional/lang/eval-okay-autoargs.nix +++ b/tests/functional/lang/eval-okay-autoargs.nix @@ -4,12 +4,17 @@ let in -{ xyzzy2 ? xyzzy # mutually recursive args -, xyzzy ? "blaat" # will be overridden by --argstr -, fb ? foobar -, lib # will be set by --arg +{ + xyzzy2 ? xyzzy, # mutually recursive args + xyzzy ? "blaat", # will be overridden by --argstr + fb ? foobar, + lib, # will be set by --arg }: { - result = lib.concat [xyzzy xyzzy2 fb]; + result = lib.concat [ + xyzzy + xyzzy2 + fb + ]; } diff --git a/tests/functional/lang/eval-okay-builtins-add.nix b/tests/functional/lang/eval-okay-builtins-add.nix index c841816222a..f678f640f12 100644 --- a/tests/functional/lang/eval-okay-builtins-add.nix +++ b/tests/functional/lang/eval-okay-builtins-add.nix @@ -1,8 +1,8 @@ [ -(builtins.add 2 3) -(builtins.add 2 2) -(builtins.typeOf (builtins.add 2 2)) -("t" + "t") -(builtins.typeOf (builtins.add 2.0 2)) -(builtins.add 2.0 2) + (builtins.add 2 3) + (builtins.add 2 2) + (builtins.typeOf (builtins.add 2 2)) + ("t" + "t") + (builtins.typeOf (builtins.add 2.0 2)) + (builtins.add 2.0 2) ] diff --git a/tests/functional/lang/eval-okay-builtins.nix b/tests/functional/lang/eval-okay-builtins.nix index e9d65e88a81..be4114116f3 100644 --- a/tests/functional/lang/eval-okay-builtins.nix +++ b/tests/functional/lang/eval-okay-builtins.nix @@ -8,5 +8,5 @@ let { y = if builtins ? fnord then builtins.fnord "foo" else ""; body = x + y; - + } diff --git a/tests/functional/lang/eval-okay-callable-attrs.nix b/tests/functional/lang/eval-okay-callable-attrs.nix index 310a030df00..a4c1ace362b 100644 --- a/tests/functional/lang/eval-okay-callable-attrs.nix +++ b/tests/functional/lang/eval-okay-callable-attrs.nix @@ -1 +1,10 @@ -({ __functor = self: x: self.foo && x; foo = false; } // { foo = true; }) true +( + { + __functor = self: x: self.foo && x; + foo = false; + } + // { + foo = true; + } +) + true diff --git a/tests/functional/lang/eval-okay-catattrs.nix b/tests/functional/lang/eval-okay-catattrs.nix index 2c3dc10da52..7ec4ba7aeb2 100644 --- a/tests/functional/lang/eval-okay-catattrs.nix +++ b/tests/functional/lang/eval-okay-catattrs.nix @@ -1 +1,5 @@ -builtins.catAttrs "a" [ { a = 1; } { b = 0; } { a = 2; } ] +builtins.catAttrs "a" [ + { a = 1; } + { b = 0; } + { a = 2; } +] diff --git a/tests/functional/lang/eval-okay-closure.nix b/tests/functional/lang/eval-okay-closure.nix index cccd4dc3573..67c53d08947 100644 --- a/tests/functional/lang/eval-okay-closure.nix +++ b/tests/functional/lang/eval-okay-closure.nix @@ -1,13 +1,25 @@ let closure = builtins.genericClosure { - startSet = [{key = 80;}]; - operator = {key, foo ? false}: - if builtins.lessThan key 0 - then [] - else [{key = builtins.sub key 9;} {key = builtins.sub key 13; foo = true;}]; + startSet = [ { key = 80; } ]; + operator = + { + key, + foo ? false, + }: + if builtins.lessThan key 0 then + [ ] + else + [ + { key = builtins.sub key 9; } + { + key = builtins.sub key 13; + foo = true; + } + ]; }; sort = (import ./lib.nix).sortBy (a: b: builtins.lessThan a.key b.key); -in sort closure +in +sort closure diff --git a/tests/functional/lang/eval-okay-concat.nix b/tests/functional/lang/eval-okay-concat.nix index d158a9bf05b..ce754ca005f 100644 --- a/tests/functional/lang/eval-okay-concat.nix +++ b/tests/functional/lang/eval-okay-concat.nix @@ -1 +1,15 @@ -[1 2 3] ++ [4 5 6] ++ [7 8 9] +[ + 1 + 2 + 3 +] +++ [ + 4 + 5 + 6 +] +++ [ + 7 + 8 + 9 +] diff --git a/tests/functional/lang/eval-okay-concatmap.nix b/tests/functional/lang/eval-okay-concatmap.nix index 97da5d37a41..14b5461319e 100644 --- a/tests/functional/lang/eval-okay-concatmap.nix +++ b/tests/functional/lang/eval-okay-concatmap.nix @@ -1,5 +1,9 @@ with import ./lib.nix; -[ (builtins.concatMap (x: if x / 2 * 2 == x then [] else [ x ]) (range 0 10)) - (builtins.concatMap (x: [x] ++ ["z"]) ["a" "b"]) +[ + (builtins.concatMap (x: if x / 2 * 2 == x then [ ] else [ x ]) (range 0 10)) + (builtins.concatMap (x: [ x ] ++ [ "z" ]) [ + "a" + "b" + ]) ] diff --git a/tests/functional/lang/eval-okay-concatstringssep.nix b/tests/functional/lang/eval-okay-concatstringssep.nix index adc4c41bd55..2270d11b4c4 100644 --- a/tests/functional/lang/eval-okay-concatstringssep.nix +++ b/tests/functional/lang/eval-okay-concatstringssep.nix @@ -1,8 +1,17 @@ with builtins; -[ (concatStringsSep "" []) - (concatStringsSep "" ["foo" "bar" "xyzzy"]) - (concatStringsSep ", " ["foo" "bar" "xyzzy"]) - (concatStringsSep ", " ["foo"]) - (concatStringsSep ", " []) +[ + (concatStringsSep "" [ ]) + (concatStringsSep "" [ + "foo" + "bar" + "xyzzy" + ]) + (concatStringsSep ", " [ + "foo" + "bar" + "xyzzy" + ]) + (concatStringsSep ", " [ "foo" ]) + (concatStringsSep ", " [ ]) ] diff --git a/tests/functional/lang/eval-okay-context-introspection.nix b/tests/functional/lang/eval-okay-context-introspection.nix index 8886cf32e94..5ed99471901 100644 --- a/tests/functional/lang/eval-okay-context-introspection.nix +++ b/tests/functional/lang/eval-okay-context-introspection.nix @@ -3,7 +3,10 @@ let name = "fail"; builder = "/bin/false"; system = "x86_64-linux"; - outputs = [ "out" "foo" ]; + outputs = [ + "out" + "foo" + ]; }; path = "${./eval-okay-context-introspection.nix}"; @@ -13,7 +16,10 @@ let path = true; }; "${builtins.unsafeDiscardStringContext drv.drvPath}" = { - outputs = [ "foo" "out" ]; + outputs = [ + "foo" + "out" + ]; allOutputs = true; }; }; @@ -21,25 +27,22 @@ let combo-path = "${path}${drv.outPath}${drv.foo.outPath}${drv.drvPath}"; legit-context = builtins.getContext combo-path; - reconstructed-path = builtins.appendContext - (builtins.unsafeDiscardStringContext combo-path) - desired-context; + reconstructed-path = builtins.appendContext (builtins.unsafeDiscardStringContext combo-path) desired-context; # Eta rule for strings with context. - etaRule = str: - str == builtins.appendContext - (builtins.unsafeDiscardStringContext str) - (builtins.getContext str); + etaRule = + str: + str == builtins.appendContext (builtins.unsafeDiscardStringContext str) (builtins.getContext str); # Only holds true if string context contains both a `DrvDeep` and # `Opaque` element. - almostEtaRule = str: - str == builtins.addDrvOutputDependencies - (builtins.unsafeDiscardOutputDependency str); + almostEtaRule = + str: str == builtins.addDrvOutputDependencies (builtins.unsafeDiscardOutputDependency str); - addDrvOutputDependencies_idempotent = str: - builtins.addDrvOutputDependencies str == - builtins.addDrvOutputDependencies (builtins.addDrvOutputDependencies str); + addDrvOutputDependencies_idempotent = + str: + builtins.addDrvOutputDependencies str + == builtins.addDrvOutputDependencies (builtins.addDrvOutputDependencies str); rules = str: [ (etaRule str) @@ -47,12 +50,14 @@ let (addDrvOutputDependencies_idempotent str) ]; -in [ +in +[ (legit-context == desired-context) (reconstructed-path == combo-path) (etaRule "foo") (etaRule drv.foo.outPath) -] ++ builtins.concatMap rules [ +] +++ builtins.concatMap rules [ drv.drvPath (builtins.addDrvOutputDependencies drv.drvPath) (builtins.unsafeDiscardOutputDependency drv.drvPath) diff --git a/tests/functional/lang/eval-okay-context.nix b/tests/functional/lang/eval-okay-context.nix index 7b9531cfe9e..102bc22599c 100644 --- a/tests/functional/lang/eval-okay-context.nix +++ b/tests/functional/lang/eval-okay-context.nix @@ -1,6 +1,7 @@ -let s = "foo ${builtins.substring 33 100 (baseNameOf "${./eval-okay-context.nix}")} bar"; +let + s = "foo ${builtins.substring 33 100 (baseNameOf "${./eval-okay-context.nix}")} bar"; in - if s != "foo eval-okay-context.nix bar" - then abort "context not discarded" - else builtins.unsafeDiscardStringContext s - +if s != "foo eval-okay-context.nix bar" then + abort "context not discarded" +else + builtins.unsafeDiscardStringContext s diff --git a/tests/functional/lang/eval-okay-convertHash.nix b/tests/functional/lang/eval-okay-convertHash.nix index a0191ee8df1..6d5074fea23 100644 --- a/tests/functional/lang/eval-okay-convertHash.nix +++ b/tests/functional/lang/eval-okay-convertHash.nix @@ -1,33 +1,131 @@ let - hashAlgos = [ "md5" "md5" "md5" "sha1" "sha1" "sha1" "sha256" "sha256" "sha256" "sha512" "sha512" "sha512" ]; + hashAlgos = [ + "md5" + "md5" + "md5" + "sha1" + "sha1" + "sha1" + "sha256" + "sha256" + "sha256" + "sha512" + "sha512" + "sha512" + ]; hashesBase16 = import ./eval-okay-hashstring.exp; - map2 = f: { fsts, snds }: if fsts == [ ] then [ ] else [ (f (builtins.head fsts) (builtins.head snds)) ] ++ map2 f { fsts = builtins.tail fsts; snds = builtins.tail snds; }; - map2' = f: fsts: snds: map2 f { inherit fsts snds; }; + map2 = + f: + { fsts, snds }: + if fsts == [ ] then + [ ] + else + [ (f (builtins.head fsts) (builtins.head snds)) ] + ++ map2 f { + fsts = builtins.tail fsts; + snds = builtins.tail snds; + }; + map2' = + f: fsts: snds: + map2 f { inherit fsts snds; }; getOutputHashes = hashes: { - hashesBase16 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base16";}) hashAlgos hashes; - hashesNix32 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";}) hashAlgos hashes; - hashesBase32 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}) hashAlgos hashes; - hashesBase64 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base64";}) hashAlgos hashes; - hashesSRI = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "sri" ;}) hashAlgos hashes; + hashesBase16 = map2' ( + hashAlgo: hash: + builtins.convertHash { + inherit hash hashAlgo; + toHashFormat = "base16"; + } + ) hashAlgos hashes; + hashesNix32 = map2' ( + hashAlgo: hash: + builtins.convertHash { + inherit hash hashAlgo; + toHashFormat = "nix32"; + } + ) hashAlgos hashes; + hashesBase32 = map2' ( + hashAlgo: hash: + builtins.convertHash { + inherit hash hashAlgo; + toHashFormat = "base32"; + } + ) hashAlgos hashes; + hashesBase64 = map2' ( + hashAlgo: hash: + builtins.convertHash { + inherit hash hashAlgo; + toHashFormat = "base64"; + } + ) hashAlgos hashes; + hashesSRI = map2' ( + hashAlgo: hash: + builtins.convertHash { + inherit hash hashAlgo; + toHashFormat = "sri"; + } + ) hashAlgos hashes; }; getOutputHashesColon = hashes: { - hashesBase16 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base16";}) hashAlgos hashes; - hashesNix32 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "nix32";}) hashAlgos hashes; - hashesBase32 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base32";}) hashAlgos hashes; - hashesBase64 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base64";}) hashAlgos hashes; - hashesSRI = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "sri" ;}) hashAlgos hashes; + hashesBase16 = map2' ( + hashAlgo: hashBody: + builtins.convertHash { + hash = hashAlgo + ":" + hashBody; + toHashFormat = "base16"; + } + ) hashAlgos hashes; + hashesNix32 = map2' ( + hashAlgo: hashBody: + builtins.convertHash { + hash = hashAlgo + ":" + hashBody; + toHashFormat = "nix32"; + } + ) hashAlgos hashes; + hashesBase32 = map2' ( + hashAlgo: hashBody: + builtins.convertHash { + hash = hashAlgo + ":" + hashBody; + toHashFormat = "base32"; + } + ) hashAlgos hashes; + hashesBase64 = map2' ( + hashAlgo: hashBody: + builtins.convertHash { + hash = hashAlgo + ":" + hashBody; + toHashFormat = "base64"; + } + ) hashAlgos hashes; + hashesSRI = map2' ( + hashAlgo: hashBody: + builtins.convertHash { + hash = hashAlgo + ":" + hashBody; + toHashFormat = "sri"; + } + ) hashAlgos hashes; }; outputHashes = getOutputHashes hashesBase16; in # map2'` -assert map2' (s1: s2: s1 + s2) [ "a" "b" ] [ "c" "d" ] == [ "ac" "bd" ]; +assert + map2' (s1: s2: s1 + s2) [ "a" "b" ] [ "c" "d" ] == [ + "ac" + "bd" + ]; # hashesBase16 assert outputHashes.hashesBase16 == hashesBase16; # standard SRI hashes -assert outputHashes.hashesSRI == (map2' (hashAlgo: hashBody: hashAlgo + "-" + hashBody) hashAlgos outputHashes.hashesBase64); +assert + outputHashes.hashesSRI + == (map2' (hashAlgo: hashBody: hashAlgo + "-" + hashBody) hashAlgos outputHashes.hashesBase64); # without prefix assert builtins.all (x: getOutputHashes x == outputHashes) (builtins.attrValues outputHashes); # colon-separated. # Note that colon prefix must not be applied to the standard SRI. e.g. "sha256:sha256-..." is illegal. -assert builtins.all (x: getOutputHashesColon x == outputHashes) (with outputHashes; [ hashesBase16 hashesBase32 hashesBase64 ]); +assert builtins.all (x: getOutputHashesColon x == outputHashes) ( + with outputHashes; + [ + hashesBase16 + hashesBase32 + hashesBase64 + ] +); outputHashes diff --git a/tests/functional/lang/eval-okay-deepseq.nix b/tests/functional/lang/eval-okay-deepseq.nix index 53aa4b1dc25..f9aa5f720f3 100644 --- a/tests/functional/lang/eval-okay-deepseq.nix +++ b/tests/functional/lang/eval-okay-deepseq.nix @@ -1 +1,9 @@ -builtins.deepSeq (let as = { x = 123; y = as; }; in as) 456 +builtins.deepSeq ( + let + as = { + x = 123; + y = as; + }; + in + as +) 456 diff --git a/tests/functional/lang/eval-okay-delayed-with-inherit.nix b/tests/functional/lang/eval-okay-delayed-with-inherit.nix index 84b388c2713..10ce7df13c0 100644 --- a/tests/functional/lang/eval-okay-delayed-with-inherit.nix +++ b/tests/functional/lang/eval-okay-delayed-with-inherit.nix @@ -4,7 +4,10 @@ let name = "a"; system = builtins.currentSystem; builder = "/bin/sh"; - args = [ "-c" "touch $out" ]; + args = [ + "-c" + "touch $out" + ]; inherit b; }; @@ -16,9 +19,13 @@ let name = "b-overridden"; system = builtins.currentSystem; builder = "/bin/sh"; - args = [ "-c" "touch $out" ]; + args = [ + "-c" + "touch $out" + ]; }; }; pkgs = pkgs_ // (packageOverrides pkgs_); -in pkgs.a.b.name +in +pkgs.a.b.name diff --git a/tests/functional/lang/eval-okay-delayed-with.nix b/tests/functional/lang/eval-okay-delayed-with.nix index 3fb023e1cd4..52ec24e12e4 100644 --- a/tests/functional/lang/eval-okay-delayed-with.nix +++ b/tests/functional/lang/eval-okay-delayed-with.nix @@ -5,7 +5,10 @@ let name = "a"; system = builtins.currentSystem; builder = "/bin/sh"; - args = [ "-c" "touch $out" ]; + args = [ + "-c" + "touch $out" + ]; inherit b; }; @@ -13,17 +16,22 @@ let name = "b"; system = builtins.currentSystem; builder = "/bin/sh"; - args = [ "-c" "touch $out" ]; + args = [ + "-c" + "touch $out" + ]; inherit a; }; c = b; }; - packageOverrides = pkgs: with pkgs; { - b = derivation (b.drvAttrs // { name = "${b.name}-overridden"; }); - }; + packageOverrides = + pkgs: with pkgs; { + b = derivation (b.drvAttrs // { name = "${b.name}-overridden"; }); + }; pkgs = pkgs_ // (packageOverrides pkgs_); -in "${pkgs.a.b.name} ${pkgs.c.name} ${pkgs.b.a.name}" +in +"${pkgs.a.b.name} ${pkgs.c.name} ${pkgs.b.a.name}" diff --git a/tests/functional/lang/eval-okay-dynamic-attrs-2.nix b/tests/functional/lang/eval-okay-dynamic-attrs-2.nix index 6d57bf85490..95fe79e2558 100644 --- a/tests/functional/lang/eval-okay-dynamic-attrs-2.nix +++ b/tests/functional/lang/eval-okay-dynamic-attrs-2.nix @@ -1 +1,5 @@ -{ a."${"b"}" = true; a."${"c"}" = false; }.a.b +{ + a."${"b"}" = true; + a."${"c"}" = false; +} +.a.b diff --git a/tests/functional/lang/eval-okay-dynamic-attrs-bare.nix b/tests/functional/lang/eval-okay-dynamic-attrs-bare.nix index 0dbe15e6384..a612bf69dfa 100644 --- a/tests/functional/lang/eval-okay-dynamic-attrs-bare.nix +++ b/tests/functional/lang/eval-okay-dynamic-attrs-bare.nix @@ -2,7 +2,8 @@ let aString = "a"; bString = "b"; -in { +in +{ hasAttrs = { a.b = null; } ? ${aString}.b; selectAttrs = { a.b = true; }.a.${bString}; @@ -11,7 +12,17 @@ in { binds = { ${aString}."${bString}c" = true; }.a.bc; - recBinds = rec { ${bString} = a; a = true; }.b; + recBinds = + rec { + ${bString} = a; + a = true; + } + .b; - multiAttrs = { ${aString} = true; ${bString} = false; }.a; + multiAttrs = + { + ${aString} = true; + ${bString} = false; + } + .a; } diff --git a/tests/functional/lang/eval-okay-dynamic-attrs.nix b/tests/functional/lang/eval-okay-dynamic-attrs.nix index ee02ac7e657..f46e26b992f 100644 --- a/tests/functional/lang/eval-okay-dynamic-attrs.nix +++ b/tests/functional/lang/eval-okay-dynamic-attrs.nix @@ -2,7 +2,8 @@ let aString = "a"; bString = "b"; -in { +in +{ hasAttrs = { a.b = null; } ? "${aString}".b; selectAttrs = { a.b = true; }.a."${bString}"; @@ -11,7 +12,17 @@ in { binds = { "${aString}"."${bString}c" = true; }.a.bc; - recBinds = rec { "${bString}" = a; a = true; }.b; + recBinds = + rec { + "${bString}" = a; + a = true; + } + .b; - multiAttrs = { "${aString}" = true; "${bString}" = false; }.a; + multiAttrs = + { + "${aString}" = true; + "${bString}" = false; + } + .a; } diff --git a/tests/functional/lang/eval-okay-elem.nix b/tests/functional/lang/eval-okay-elem.nix index 71ea7a4ed03..004111dcc69 100644 --- a/tests/functional/lang/eval-okay-elem.nix +++ b/tests/functional/lang/eval-okay-elem.nix @@ -1,6 +1,11 @@ with import ./lib.nix; -let xs = range 10 40; in - -[ (builtins.elem 23 xs) (builtins.elem 42 xs) (builtins.elemAt xs 20) ] +let + xs = range 10 40; +in +[ + (builtins.elem 23 xs) + (builtins.elem 42 xs) + (builtins.elemAt xs 20) +] diff --git a/tests/functional/lang/eval-okay-empty-args.nix b/tests/functional/lang/eval-okay-empty-args.nix index 78c133afdd9..9466749f6ab 100644 --- a/tests/functional/lang/eval-okay-empty-args.nix +++ b/tests/functional/lang/eval-okay-empty-args.nix @@ -1 +1,4 @@ -({}: {x,y,}: "${x}${y}") {} {x = "a"; y = "b";} +({ }: { x, y }: "${x}${y}") { } { + x = "a"; + y = "b"; +} diff --git a/tests/functional/lang/eval-okay-eq-derivations.nix b/tests/functional/lang/eval-okay-eq-derivations.nix index d526cb4a216..ac802f433c7 100644 --- a/tests/functional/lang/eval-okay-eq-derivations.nix +++ b/tests/functional/lang/eval-okay-eq-derivations.nix @@ -1,10 +1,40 @@ let - drvA1 = derivation { name = "a"; builder = "/foo"; system = "i686-linux"; }; - drvA2 = derivation { name = "a"; builder = "/foo"; system = "i686-linux"; }; - drvA3 = derivation { name = "a"; builder = "/foo"; system = "i686-linux"; } // { dummy = 1; }; - - drvC1 = derivation { name = "c"; builder = "/foo"; system = "i686-linux"; }; - drvC2 = derivation { name = "c"; builder = "/bar"; system = "i686-linux"; }; + drvA1 = derivation { + name = "a"; + builder = "/foo"; + system = "i686-linux"; + }; + drvA2 = derivation { + name = "a"; + builder = "/foo"; + system = "i686-linux"; + }; + drvA3 = + derivation { + name = "a"; + builder = "/foo"; + system = "i686-linux"; + } + // { + dummy = 1; + }; -in [ (drvA1 == drvA1) (drvA1 == drvA2) (drvA1 == drvA3) (drvC1 == drvC2) ] + drvC1 = derivation { + name = "c"; + builder = "/foo"; + system = "i686-linux"; + }; + drvC2 = derivation { + name = "c"; + builder = "/bar"; + system = "i686-linux"; + }; + +in +[ + (drvA1 == drvA1) + (drvA1 == drvA2) + (drvA1 == drvA3) + (drvC1 == drvC2) +] diff --git a/tests/functional/lang/eval-okay-eq.nix b/tests/functional/lang/eval-okay-eq.nix index 73d200b3814..21cb08790ca 100644 --- a/tests/functional/lang/eval-okay-eq.nix +++ b/tests/functional/lang/eval-okay-eq.nix @@ -1,3 +1,13 @@ -["foobar" (rec {x = 1; y = x;})] -== -[("foo" + "bar") ({x = 1; y = 1;})] +[ + "foobar" + (rec { + x = 1; + y = x; + }) +] == [ + ("foo" + "bar") + ({ + x = 1; + y = 1; + }) +] diff --git a/tests/functional/lang/eval-okay-filter.nix b/tests/functional/lang/eval-okay-filter.nix index 85109b0d0eb..ef4e490c0fd 100644 --- a/tests/functional/lang/eval-okay-filter.nix +++ b/tests/functional/lang/eval-okay-filter.nix @@ -1,5 +1,8 @@ with import ./lib.nix; -builtins.filter - (x: x / 2 * 2 == x) - (builtins.concatLists [ (range 0 10) (range 100 110) ]) +builtins.filter (x: x / 2 * 2 == x) ( + builtins.concatLists [ + (range 0 10) + (range 100 110) + ] +) diff --git a/tests/functional/lang/eval-okay-flake-ref-to-string.nix b/tests/functional/lang/eval-okay-flake-ref-to-string.nix index dbb4e5b2af4..f477ba52caf 100644 --- a/tests/functional/lang/eval-okay-flake-ref-to-string.nix +++ b/tests/functional/lang/eval-okay-flake-ref-to-string.nix @@ -1,7 +1,7 @@ builtins.flakeRefToString { - type = "github"; + type = "github"; owner = "NixOS"; - repo = "nixpkgs"; - ref = "23.05"; - dir = "lib"; + repo = "nixpkgs"; + ref = "23.05"; + dir = "lib"; } diff --git a/tests/functional/lang/eval-okay-flatten.nix b/tests/functional/lang/eval-okay-flatten.nix index fe911e9683e..ade74c8e8fe 100644 --- a/tests/functional/lang/eval-okay-flatten.nix +++ b/tests/functional/lang/eval-okay-flatten.nix @@ -2,7 +2,19 @@ with import ./lib.nix; let { - l = ["1" "2" ["3" ["4"] ["5" "6"]] "7"]; + l = [ + "1" + "2" + [ + "3" + [ "4" ] + [ + "5" + "6" + ] + ] + "7" + ]; body = concat (flatten l); } diff --git a/tests/functional/lang/eval-okay-floor-ceil.nix b/tests/functional/lang/eval-okay-floor-ceil.nix index d76a0d86ea7..06f1a13d252 100644 --- a/tests/functional/lang/eval-okay-floor-ceil.nix +++ b/tests/functional/lang/eval-okay-floor-ceil.nix @@ -6,4 +6,11 @@ let n3 = builtins.floor 23; n4 = builtins.ceil 23; in - builtins.concatStringsSep ";" (map toString [ n1 n2 n3 n4 ]) +builtins.concatStringsSep ";" ( + map toString [ + n1 + n2 + n3 + n4 + ] +) diff --git a/tests/functional/lang/eval-okay-foldlStrict-lazy-elements.nix b/tests/functional/lang/eval-okay-foldlStrict-lazy-elements.nix index c666e07f3ae..49751c759d0 100644 --- a/tests/functional/lang/eval-okay-foldlStrict-lazy-elements.nix +++ b/tests/functional/lang/eval-okay-foldlStrict-lazy-elements.nix @@ -1,9 +1,6 @@ # Tests that the rhs argument of op is not forced unconditionally let - lst = builtins.foldl' - (acc: x: acc ++ [ x ]) - [ ] - [ 42 (throw "this shouldn't be evaluated") ]; + lst = builtins.foldl' (acc: x: acc ++ [ x ]) [ ] [ 42 (throw "this shouldn't be evaluated") ]; in builtins.head lst diff --git a/tests/functional/lang/eval-okay-foldlStrict-lazy-initial-accumulator.nix b/tests/functional/lang/eval-okay-foldlStrict-lazy-initial-accumulator.nix index abcd5366ab8..9cf0ef32c87 100644 --- a/tests/functional/lang/eval-okay-foldlStrict-lazy-initial-accumulator.nix +++ b/tests/functional/lang/eval-okay-foldlStrict-lazy-initial-accumulator.nix @@ -1,6 +1,6 @@ # Checks that the nul value for the accumulator is not forced unconditionally. # Some languages provide a foldl' that is strict in this argument, but Nix does not. -builtins.foldl' - (_: x: x) - (throw "This is never forced") - [ "but the results of applying op are" 42 ] +builtins.foldl' (_: x: x) (throw "This is never forced") [ + "but the results of applying op are" + 42 +] diff --git a/tests/functional/lang/eval-okay-fromjson-escapes.nix b/tests/functional/lang/eval-okay-fromjson-escapes.nix index f0071350773..6330e9c8667 100644 --- a/tests/functional/lang/eval-okay-fromjson-escapes.nix +++ b/tests/functional/lang/eval-okay-fromjson-escapes.nix @@ -1,3 +1,4 @@ # This string contains all supported escapes in a JSON string, per json.org # \b and \f are not supported by Nix -builtins.fromJSON ''"quote \" reverse solidus \\ solidus \/ backspace \b formfeed \f newline \n carriage return \r horizontal tab \t 1 char unicode encoded backspace \u0008 1 char unicode encoded e with accent \u00e9 2 char unicode encoded s with caron \u0161 3 char unicode encoded rightwards arrow \u2192"'' +builtins.fromJSON + ''"quote \" reverse solidus \\ solidus \/ backspace \b formfeed \f newline \n carriage return \r horizontal tab \t 1 char unicode encoded backspace \u0008 1 char unicode encoded e with accent \u00e9 2 char unicode encoded s with caron \u0161 3 char unicode encoded rightwards arrow \u2192"'' diff --git a/tests/functional/lang/eval-okay-fromjson.nix b/tests/functional/lang/eval-okay-fromjson.nix index 4c526b9ae5d..0e8a2351fe8 100644 --- a/tests/functional/lang/eval-okay-fromjson.nix +++ b/tests/functional/lang/eval-okay-fromjson.nix @@ -1,41 +1,55 @@ -builtins.fromJSON - '' - { - "Video": { - "Title": "The Penguin Chronicles", - "Width": 1920, - "Height": 1080, - "EmbeddedData": [3.14159, 23493,null, true ,false, -10], - "Thumb": { - "Url": "http://www.example.com/video/5678931", - "Width": 200, - "Height": 250 - }, - "Animated" : false, - "IDs": [116, 943, 234, 38793, true ,false,null, -100], - "Escapes": "\"\\\/\t\n\r\t", - "Subtitle" : false, - "Latitude": 37.7668, - "Longitude": -122.3959 - } - } - '' -== - { Video = - { Title = "The Penguin Chronicles"; - Width = 1920; - Height = 1080; - EmbeddedData = [ 3.14159 23493 null true false (0-10) ]; - Thumb = - { Url = "http://www.example.com/video/5678931"; - Width = 200; - Height = 250; - }; - Animated = false; - IDs = [ 116 943 234 38793 true false null (0-100) ]; - Escapes = "\"\\\/\t\n\r\t"; # supported in JSON but not Nix: \b\f - Subtitle = false; - Latitude = 37.7668; - Longitude = -122.3959; - }; +builtins.fromJSON '' + { + "Video": { + "Title": "The Penguin Chronicles", + "Width": 1920, + "Height": 1080, + "EmbeddedData": [3.14159, 23493,null, true ,false, -10], + "Thumb": { + "Url": "http://www.example.com/video/5678931", + "Width": 200, + "Height": 250 + }, + "Animated" : false, + "IDs": [116, 943, 234, 38793, true ,false,null, -100], + "Escapes": "\"\\\/\t\n\r\t", + "Subtitle" : false, + "Latitude": 37.7668, + "Longitude": -122.3959 + } } +'' == { + Video = { + Title = "The Penguin Chronicles"; + Width = 1920; + Height = 1080; + EmbeddedData = [ + 3.14159 + 23493 + null + true + false + (0 - 10) + ]; + Thumb = { + Url = "http://www.example.com/video/5678931"; + Width = 200; + Height = 250; + }; + Animated = false; + IDs = [ + 116 + 943 + 234 + 38793 + true + false + null + (0 - 100) + ]; + Escapes = "\"\\\/\t\n\r\t"; # supported in JSON but not Nix: \b\f + Subtitle = false; + Latitude = 37.7668; + Longitude = -122.3959; + }; +} diff --git a/tests/functional/lang/eval-okay-functionargs.nix b/tests/functional/lang/eval-okay-functionargs.nix index 68dca62ee18..7c11f19c235 100644 --- a/tests/functional/lang/eval-okay-functionargs.nix +++ b/tests/functional/lang/eval-okay-functionargs.nix @@ -1,29 +1,74 @@ let - stdenvFun = { }: { name = "stdenv"; }; - stdenv2Fun = { }: { name = "stdenv2"; }; - fetchurlFun = { stdenv }: assert stdenv.name == "stdenv"; { name = "fetchurl"; }; - atermFun = { stdenv, fetchurl }: { name = "aterm-${stdenv.name}"; }; - aterm2Fun = { stdenv, fetchurl }: { name = "aterm2-${stdenv.name}"; }; - nixFun = { stdenv, fetchurl, aterm }: { name = "nix-${stdenv.name}-${aterm.name}"; }; - + stdenvFun = + { }: + { + name = "stdenv"; + }; + stdenv2Fun = + { }: + { + name = "stdenv2"; + }; + fetchurlFun = + { stdenv }: + assert stdenv.name == "stdenv"; + { + name = "fetchurl"; + }; + atermFun = + { stdenv, fetchurl }: + { + name = "aterm-${stdenv.name}"; + }; + aterm2Fun = + { stdenv, fetchurl }: + { + name = "aterm2-${stdenv.name}"; + }; + nixFun = + { + stdenv, + fetchurl, + aterm, + }: + { + name = "nix-${stdenv.name}-${aterm.name}"; + }; + mplayerFun = - { stdenv, fetchurl, enableX11 ? false, xorg ? null, enableFoo ? true, foo ? null }: + { + stdenv, + fetchurl, + enableX11 ? false, + xorg ? null, + enableFoo ? true, + foo ? null, + }: assert stdenv.name == "stdenv2"; assert enableX11 -> xorg.libXv.name == "libXv"; assert enableFoo -> foo != null; - { name = "mplayer-${stdenv.name}.${xorg.libXv.name}-${xorg.libX11.name}"; }; + { + name = "mplayer-${stdenv.name}.${xorg.libXv.name}-${xorg.libX11.name}"; + }; - makeOverridable = f: origArgs: f origArgs // - { override = newArgs: + makeOverridable = + f: origArgs: + f origArgs + // { + override = + newArgs: makeOverridable f (origArgs // (if builtins.isFunction newArgs then newArgs origArgs else newArgs)); }; - - callPackage_ = pkgs: f: args: + + callPackage_ = + pkgs: f: args: makeOverridable f ((builtins.intersectAttrs (builtins.functionArgs f) pkgs) // args); allPackages = - { overrides ? (pkgs: pkgsPrev: { }) }: + { + overrides ? (pkgs: pkgsPrev: { }), + }: let callPackage = callPackage_ pkgs; pkgs = pkgsStd // (overrides pkgs pkgsStd); @@ -34,18 +79,40 @@ let fetchurl = callPackage fetchurlFun { }; aterm = callPackage atermFun { }; xorg = callPackage xorgFun { }; - mplayer = callPackage mplayerFun { stdenv = pkgs.stdenv2; enableFoo = false; }; + mplayer = callPackage mplayerFun { + stdenv = pkgs.stdenv2; + enableFoo = false; + }; nix = callPackage nixFun { }; }; - in pkgs; + in + pkgs; + + libX11Fun = + { stdenv, fetchurl }: + { + name = "libX11"; + }; + libX11_2Fun = + { stdenv, fetchurl }: + { + name = "libX11_2"; + }; + libXvFun = + { + stdenv, + fetchurl, + libX11, + }: + { + name = "libXv"; + }; - libX11Fun = { stdenv, fetchurl }: { name = "libX11"; }; - libX11_2Fun = { stdenv, fetchurl }: { name = "libX11_2"; }; - libXvFun = { stdenv, fetchurl, libX11 }: { name = "libXv"; }; - xorgFun = { pkgs }: - let callPackage = callPackage_ (pkgs // pkgs.xorg); in + let + callPackage = callPackage_ (pkgs // pkgs.xorg); + in { libX11 = callPackage libX11Fun { }; libXv = callPackage libXvFun { }; @@ -56,25 +123,28 @@ in let pkgs = allPackages { }; - + pkgs2 = allPackages { overrides = pkgs: pkgsPrev: { stdenv = pkgs.stdenv2; nix = pkgsPrev.nix.override { aterm = aterm2Fun { inherit (pkgs) stdenv fetchurl; }; }; - xorg = pkgsPrev.xorg // { libX11 = libX11_2Fun { inherit (pkgs) stdenv fetchurl; }; }; + xorg = pkgsPrev.xorg // { + libX11 = libX11_2Fun { inherit (pkgs) stdenv fetchurl; }; + }; }; }; - + in - [ pkgs.stdenv.name - pkgs.fetchurl.name - pkgs.aterm.name - pkgs2.aterm.name - pkgs.xorg.libX11.name - pkgs.xorg.libXv.name - pkgs.mplayer.name - pkgs2.mplayer.name - pkgs.nix.name - pkgs2.nix.name - ] +[ + pkgs.stdenv.name + pkgs.fetchurl.name + pkgs.aterm.name + pkgs2.aterm.name + pkgs.xorg.libX11.name + pkgs.xorg.libXv.name + pkgs.mplayer.name + pkgs2.mplayer.name + pkgs.nix.name + pkgs2.nix.name +] diff --git a/tests/functional/lang/eval-okay-getattrpos-functionargs.nix b/tests/functional/lang/eval-okay-getattrpos-functionargs.nix index 11d6bb0e3ac..9692911cfc9 100644 --- a/tests/functional/lang/eval-okay-getattrpos-functionargs.nix +++ b/tests/functional/lang/eval-okay-getattrpos-functionargs.nix @@ -1,4 +1,8 @@ let - fun = { foo }: {}; + fun = { foo }: { }; pos = builtins.unsafeGetAttrPos "foo" (builtins.functionArgs fun); -in { inherit (pos) column line; file = baseNameOf pos.file; } +in +{ + inherit (pos) column line; + file = baseNameOf pos.file; +} diff --git a/tests/functional/lang/eval-okay-getattrpos.nix b/tests/functional/lang/eval-okay-getattrpos.nix index ca6b0796154..25bc57444fa 100644 --- a/tests/functional/lang/eval-okay-getattrpos.nix +++ b/tests/functional/lang/eval-okay-getattrpos.nix @@ -3,4 +3,8 @@ let foo = "bar"; }; pos = builtins.unsafeGetAttrPos "foo" as; -in { inherit (pos) column line; file = baseNameOf pos.file; } +in +{ + inherit (pos) column line; + file = baseNameOf pos.file; +} diff --git a/tests/functional/lang/eval-okay-groupBy.nix b/tests/functional/lang/eval-okay-groupBy.nix index 862d89dbd67..f4de5444a3c 100644 --- a/tests/functional/lang/eval-okay-groupBy.nix +++ b/tests/functional/lang/eval-okay-groupBy.nix @@ -1,5 +1,5 @@ with import ./lib.nix; -builtins.groupBy (n: - builtins.substring 0 1 (builtins.hashString "sha256" (toString n)) -) (range 0 31) +builtins.groupBy (n: builtins.substring 0 1 (builtins.hashString "sha256" (toString n))) ( + range 0 31 +) diff --git a/tests/functional/lang/eval-okay-hashfile.nix b/tests/functional/lang/eval-okay-hashfile.nix index aff5a185681..aeaf09f43f6 100644 --- a/tests/functional/lang/eval-okay-hashfile.nix +++ b/tests/functional/lang/eval-okay-hashfile.nix @@ -1,4 +1,14 @@ let - paths = [ ./data ./binary-data ]; + paths = [ + ./data + ./binary-data + ]; in - builtins.concatLists (map (hash: map (builtins.hashFile hash) paths) ["md5" "sha1" "sha256" "sha512"]) +builtins.concatLists ( + map (hash: map (builtins.hashFile hash) paths) [ + "md5" + "sha1" + "sha256" + "sha512" + ] +) diff --git a/tests/functional/lang/eval-okay-hashstring.nix b/tests/functional/lang/eval-okay-hashstring.nix index b0f62b245ca..c760b00435e 100644 --- a/tests/functional/lang/eval-okay-hashstring.nix +++ b/tests/functional/lang/eval-okay-hashstring.nix @@ -1,4 +1,15 @@ let - strings = [ "" "text 1" "text 2" ]; + strings = [ + "" + "text 1" + "text 2" + ]; in - builtins.concatLists (map (hash: map (builtins.hashString hash) strings) ["md5" "sha1" "sha256" "sha512"]) +builtins.concatLists ( + map (hash: map (builtins.hashString hash) strings) [ + "md5" + "sha1" + "sha256" + "sha512" + ] +) diff --git a/tests/functional/lang/eval-okay-if.nix b/tests/functional/lang/eval-okay-if.nix index 23e4c74d501..66b9d15b8cc 100644 --- a/tests/functional/lang/eval-okay-if.nix +++ b/tests/functional/lang/eval-okay-if.nix @@ -1 +1,6 @@ -if "foo" != "f" + "oo" then 1 else if false then 2 else 3 +if "foo" != "f" + "oo" then + 1 +else if false then + 2 +else + 3 diff --git a/tests/functional/lang/eval-okay-import.nix b/tests/functional/lang/eval-okay-import.nix index 0b18d941312..484dccac0e1 100644 --- a/tests/functional/lang/eval-okay-import.nix +++ b/tests/functional/lang/eval-okay-import.nix @@ -8,4 +8,5 @@ let builtins = builtins // overrides; } // import ./lib.nix; -in scopedImport overrides ./imported.nix +in +scopedImport overrides ./imported.nix diff --git a/tests/functional/lang/eval-okay-inherit-attr-pos.nix b/tests/functional/lang/eval-okay-inherit-attr-pos.nix index 017ab1d364d..c162d119677 100644 --- a/tests/functional/lang/eval-okay-inherit-attr-pos.nix +++ b/tests/functional/lang/eval-okay-inherit-attr-pos.nix @@ -4,9 +4,9 @@ let y = { inherit d x; }; z = { inherit (y) d x; }; in - [ - (builtins.unsafeGetAttrPos "d" y) - (builtins.unsafeGetAttrPos "x" y) - (builtins.unsafeGetAttrPos "d" z) - (builtins.unsafeGetAttrPos "x" z) - ] +[ + (builtins.unsafeGetAttrPos "d" y) + (builtins.unsafeGetAttrPos "x" y) + (builtins.unsafeGetAttrPos "d" z) + (builtins.unsafeGetAttrPos "x" z) +] diff --git a/tests/functional/lang/eval-okay-inherit-from.nix b/tests/functional/lang/eval-okay-inherit-from.nix index b72a1c639fd..1a0980aafb1 100644 --- a/tests/functional/lang/eval-okay-inherit-from.nix +++ b/tests/functional/lang/eval-okay-inherit-from.nix @@ -1,5 +1,12 @@ let - inherit (builtins.trace "used" { a = 1; b = 2; }) a b; + inherit + (builtins.trace "used" { + a = 1; + b = 2; + }) + a + b + ; x.c = 3; y.d = 4; @@ -13,4 +20,14 @@ let }; }; in - [ a b rec { x.c = []; inherit (x) c; inherit (y) d; __overrides.y.d = []; } merged ] +[ + a + b + rec { + x.c = [ ]; + inherit (x) c; + inherit (y) d; + __overrides.y.d = [ ]; + } + merged +] diff --git a/tests/functional/lang/eval-okay-intersectAttrs.nix b/tests/functional/lang/eval-okay-intersectAttrs.nix index 39d49938cc2..bf4d58a9969 100644 --- a/tests/functional/lang/eval-okay-intersectAttrs.nix +++ b/tests/functional/lang/eval-okay-intersectAttrs.nix @@ -1,6 +1,6 @@ let - alphabet = - { a = "a"; + alphabet = { + a = "a"; b = "b"; c = "c"; d = "d"; @@ -28,23 +28,46 @@ let z = "z"; }; foo = { - inherit (alphabet) f o b a r z q u x; + inherit (alphabet) + f + o + b + a + r + z + q + u + x + ; aa = throw "aa"; }; alphabetFail = builtins.mapAttrs throw alphabet; in -[ (builtins.intersectAttrs { a = abort "l1"; } { b = abort "r1"; }) +[ + (builtins.intersectAttrs { a = abort "l1"; } { b = abort "r1"; }) (builtins.intersectAttrs { a = abort "l2"; } { a = 1; }) (builtins.intersectAttrs alphabetFail { a = 1; }) - (builtins.intersectAttrs { a = abort "laa"; } alphabet) + (builtins.intersectAttrs { a = abort "laa"; } alphabet) (builtins.intersectAttrs alphabetFail { m = 1; }) - (builtins.intersectAttrs { m = abort "lam"; } alphabet) + (builtins.intersectAttrs { m = abort "lam"; } alphabet) (builtins.intersectAttrs alphabetFail { n = 1; }) - (builtins.intersectAttrs { n = abort "lan"; } alphabet) - (builtins.intersectAttrs alphabetFail { n = 1; p = 2; }) - (builtins.intersectAttrs { n = abort "lan2"; p = abort "lap"; } alphabet) - (builtins.intersectAttrs alphabetFail { n = 1; p = 2; }) - (builtins.intersectAttrs { n = abort "lan2"; p = abort "lap"; } alphabet) + (builtins.intersectAttrs { n = abort "lan"; } alphabet) + (builtins.intersectAttrs alphabetFail { + n = 1; + p = 2; + }) + (builtins.intersectAttrs { + n = abort "lan2"; + p = abort "lap"; + } alphabet) + (builtins.intersectAttrs alphabetFail { + n = 1; + p = 2; + }) + (builtins.intersectAttrs { + n = abort "lan2"; + p = abort "lap"; + } alphabet) (builtins.intersectAttrs alphabetFail alphabet) (builtins.intersectAttrs alphabet foo == builtins.intersectAttrs foo alphabet) ] diff --git a/tests/functional/lang/eval-okay-list.nix b/tests/functional/lang/eval-okay-list.nix index d433bcf908b..b5045a75378 100644 --- a/tests/functional/lang/eval-okay-list.nix +++ b/tests/functional/lang/eval-okay-list.nix @@ -2,6 +2,11 @@ with import ./lib.nix; let { - body = concat ["foo" "bar" "bla" "test"]; - -} \ No newline at end of file + body = concat [ + "foo" + "bar" + "bla" + "test" + ]; + +} diff --git a/tests/functional/lang/eval-okay-listtoattrs.nix b/tests/functional/lang/eval-okay-listtoattrs.nix index 4186e029b53..1de9d6d62f5 100644 --- a/tests/functional/lang/eval-okay-listtoattrs.nix +++ b/tests/functional/lang/eval-okay-listtoattrs.nix @@ -1,11 +1,24 @@ # this test shows how to use listToAttrs and that evaluation is still lazy (throw isn't called) with import ./lib.nix; -let - asi = name: value : { inherit name value; }; - list = [ ( asi "a" "A" ) ( asi "b" "B" ) ]; +let + asi = name: value: { inherit name value; }; + list = [ + (asi "a" "A") + (asi "b" "B") + ]; a = builtins.listToAttrs list; - b = builtins.listToAttrs ( list ++ list ); - r = builtins.listToAttrs [ (asi "result" [ a b ]) ( asi "throw" (throw "this should not be thrown")) ]; - x = builtins.listToAttrs [ (asi "foo" "bar") (asi "foo" "bla") ]; -in concat (map (x: x.a) r.result) + x.foo + b = builtins.listToAttrs (list ++ list); + r = builtins.listToAttrs [ + (asi "result" [ + a + b + ]) + (asi "throw" (throw "this should not be thrown")) + ]; + x = builtins.listToAttrs [ + (asi "foo" "bar") + (asi "foo" "bla") + ]; +in +concat (map (x: x.a) r.result) + x.foo diff --git a/tests/functional/lang/eval-okay-logic.nix b/tests/functional/lang/eval-okay-logic.nix index fbb12794401..55cd2fc00fd 100644 --- a/tests/functional/lang/eval-okay-logic.nix +++ b/tests/functional/lang/eval-okay-logic.nix @@ -1 +1,2 @@ -assert !false && (true || false) -> true; 1 +assert !false && (true || false) -> true; +1 diff --git a/tests/functional/lang/eval-okay-map.nix b/tests/functional/lang/eval-okay-map.nix index a76c1d81145..22059f37a57 100644 --- a/tests/functional/lang/eval-okay-map.nix +++ b/tests/functional/lang/eval-okay-map.nix @@ -1,3 +1,9 @@ with import ./lib.nix; -concat (map (x: x + "bar") [ "foo" "bla" "xyzzy" ]) \ No newline at end of file +concat ( + map (x: x + "bar") [ + "foo" + "bla" + "xyzzy" + ] +) diff --git a/tests/functional/lang/eval-okay-mapattrs.nix b/tests/functional/lang/eval-okay-mapattrs.nix index f075b6275e5..c1182d13db5 100644 --- a/tests/functional/lang/eval-okay-mapattrs.nix +++ b/tests/functional/lang/eval-okay-mapattrs.nix @@ -1,3 +1,6 @@ with import ./lib.nix; -builtins.mapAttrs (name: value: name + "-" + value) { x = "foo"; y = "bar"; } +builtins.mapAttrs (name: value: name + "-" + value) { + x = "foo"; + y = "bar"; +} diff --git a/tests/functional/lang/eval-okay-merge-dynamic-attrs.nix b/tests/functional/lang/eval-okay-merge-dynamic-attrs.nix index f459a554f34..8ee8e503a6a 100644 --- a/tests/functional/lang/eval-okay-merge-dynamic-attrs.nix +++ b/tests/functional/lang/eval-okay-merge-dynamic-attrs.nix @@ -1,9 +1,17 @@ { - set1 = { a = 1; }; - set1 = { "${"b" + ""}" = 2; }; + set1 = { + a = 1; + }; + set1 = { + "${"b" + ""}" = 2; + }; - set2 = { "${"b" + ""}" = 2; }; - set2 = { a = 1; }; + set2 = { + "${"b" + ""}" = 2; + }; + set2 = { + a = 1; + }; set3.a = 1; set3."${"b" + ""}" = 2; diff --git a/tests/functional/lang/eval-okay-nested-with.nix b/tests/functional/lang/eval-okay-nested-with.nix index ba9d79aa79b..ee069eaa1c2 100644 --- a/tests/functional/lang/eval-okay-nested-with.nix +++ b/tests/functional/lang/eval-okay-nested-with.nix @@ -1,3 +1 @@ -with { x = 1; }; -with { x = 2; }; -x +with { x = 1; }; with { x = 2; }; x diff --git a/tests/functional/lang/eval-okay-new-let.nix b/tests/functional/lang/eval-okay-new-let.nix index 73812314150..1a938ce718f 100644 --- a/tests/functional/lang/eval-okay-new-let.nix +++ b/tests/functional/lang/eval-okay-new-let.nix @@ -1,14 +1,16 @@ let - f = z: + f = + z: let x = "foo"; y = "bar"; body = 1; # compat test in - z + x + y; + z + x + y; arg = "xyzzy"; -in f arg +in +f arg diff --git a/tests/functional/lang/eval-okay-null-dynamic-attrs.nix b/tests/functional/lang/eval-okay-null-dynamic-attrs.nix index b060c0bc985..76286b6225c 100644 --- a/tests/functional/lang/eval-okay-null-dynamic-attrs.nix +++ b/tests/functional/lang/eval-okay-null-dynamic-attrs.nix @@ -1 +1 @@ -{ ${null} = true; } == {} +{ ${null} = true; } == { } diff --git a/tests/functional/lang/eval-okay-overrides.nix b/tests/functional/lang/eval-okay-overrides.nix index 719bdc9c05e..1c0d5d7c2ea 100644 --- a/tests/functional/lang/eval-okay-overrides.nix +++ b/tests/functional/lang/eval-okay-overrides.nix @@ -1,8 +1,12 @@ let - overrides = { a = 2; b = 3; }; + overrides = { + a = 2; + b = 3; + }; -in (rec { +in +(rec { __overrides = overrides; x = a; a = 1; diff --git a/tests/functional/lang/eval-okay-parse-flake-ref.nix b/tests/functional/lang/eval-okay-parse-flake-ref.nix index db4ed2742cd..404c5df0824 100644 --- a/tests/functional/lang/eval-okay-parse-flake-ref.nix +++ b/tests/functional/lang/eval-okay-parse-flake-ref.nix @@ -1 +1 @@ - builtins.parseFlakeRef "github:NixOS/nixpkgs/23.05?dir=lib" +builtins.parseFlakeRef "github:NixOS/nixpkgs/23.05?dir=lib" diff --git a/tests/functional/lang/eval-okay-partition.nix b/tests/functional/lang/eval-okay-partition.nix index 846d2ce4948..b9566edf979 100644 --- a/tests/functional/lang/eval-okay-partition.nix +++ b/tests/functional/lang/eval-okay-partition.nix @@ -1,5 +1,8 @@ with import ./lib.nix; -builtins.partition - (x: x / 2 * 2 == x) - (builtins.concatLists [ (range 0 10) (range 100 110) ]) +builtins.partition (x: x / 2 * 2 == x) ( + builtins.concatLists [ + (range 0 10) + (range 100 110) + ] +) diff --git a/tests/functional/lang/eval-okay-path.nix b/tests/functional/lang/eval-okay-path.nix index 599b3354147..b8b48aae1a6 100644 --- a/tests/functional/lang/eval-okay-path.nix +++ b/tests/functional/lang/eval-okay-path.nix @@ -1,15 +1,15 @@ [ - (builtins.path - { path = ./.; - filter = path: _: baseNameOf path == "data"; - recursive = true; - sha256 = "1yhm3gwvg5a41yylymgblsclk95fs6jy72w0wv925mmidlhcq4sw"; - name = "output"; - }) - (builtins.path - { path = ./data; - recursive = false; - sha256 = "0k4lwj58f2w5yh92ilrwy9917pycipbrdrr13vbb3yd02j09vfxm"; - name = "output"; - }) + (builtins.path { + path = ./.; + filter = path: _: baseNameOf path == "data"; + recursive = true; + sha256 = "1yhm3gwvg5a41yylymgblsclk95fs6jy72w0wv925mmidlhcq4sw"; + name = "output"; + }) + (builtins.path { + path = ./data; + recursive = false; + sha256 = "0k4lwj58f2w5yh92ilrwy9917pycipbrdrr13vbb3yd02j09vfxm"; + name = "output"; + }) ] diff --git a/tests/functional/lang/eval-okay-patterns.nix b/tests/functional/lang/eval-okay-patterns.nix index 96fd25a0151..b92b232d2fa 100644 --- a/tests/functional/lang/eval-okay-patterns.nix +++ b/tests/functional/lang/eval-okay-patterns.nix @@ -1,16 +1,59 @@ let - f = args@{x, y, z}: x + args.y + z; + f = + args@{ + x, + y, + z, + }: + x + args.y + z; - g = {x, y, z}@args: f args; + g = + { + x, + y, + z, + }@args: + f args; - h = {x ? "d", y ? x, z ? args.x}@args: x + y + z; + h = + { + x ? "d", + y ? x, + z ? args.x, + }@args: + x + y + z; - j = {x, y, z, ...}: x + y + z; + j = + { + x, + y, + z, + ... + }: + x + y + z; in - f {x = "a"; y = "b"; z = "c";} + - g {x = "x"; y = "y"; z = "z";} + - h {x = "D";} + - h {x = "D"; y = "E"; z = "F";} + - j {x = "i"; y = "j"; z = "k"; bla = "bla"; foo = "bar";} +f { + x = "a"; + y = "b"; + z = "c"; +} ++ g { + x = "x"; + y = "y"; + z = "z"; +} ++ h { x = "D"; } ++ h { + x = "D"; + y = "E"; + z = "F"; +} ++ j { + x = "i"; + y = "j"; + z = "k"; + bla = "bla"; + foo = "bar"; +} diff --git a/tests/functional/lang/eval-okay-print.nix b/tests/functional/lang/eval-okay-print.nix index d36ba4da31c..1ad46560235 100644 --- a/tests/functional/lang/eval-okay-print.nix +++ b/tests/functional/lang/eval-okay-print.nix @@ -1 +1,15 @@ -with builtins; trace [(1+1)] [ null toString (deepSeq "x") (a: a) (let x=[x]; in x) ] +with builtins; +trace + [ (1 + 1) ] + [ + null + toString + (deepSeq "x") + (a: a) + ( + let + x = [ x ]; + in + x + ) + ] diff --git a/tests/functional/lang/eval-okay-readFileType.nix b/tests/functional/lang/eval-okay-readFileType.nix index 174fb6c3a02..79beb9a6e25 100644 --- a/tests/functional/lang/eval-okay-readFileType.nix +++ b/tests/functional/lang/eval-okay-readFileType.nix @@ -1,6 +1,6 @@ { - bar = builtins.readFileType ./readDir/bar; - foo = builtins.readFileType ./readDir/foo; + bar = builtins.readFileType ./readDir/bar; + foo = builtins.readFileType ./readDir/foo; linked = builtins.readFileType ./readDir/linked; - ldir = builtins.readFileType ./readDir/ldir; + ldir = builtins.readFileType ./readDir/ldir; } diff --git a/tests/functional/lang/eval-okay-redefine-builtin.nix b/tests/functional/lang/eval-okay-redefine-builtin.nix index df9fc3f37d2..ec95ffa932a 100644 --- a/tests/functional/lang/eval-okay-redefine-builtin.nix +++ b/tests/functional/lang/eval-okay-redefine-builtin.nix @@ -1,3 +1,4 @@ let throw = abort "Error!"; -in (builtins.tryEval ).success +in +(builtins.tryEval ).success diff --git a/tests/functional/lang/eval-okay-regex-match.nix b/tests/functional/lang/eval-okay-regex-match.nix index 273e2590713..54b995996f1 100644 --- a/tests/functional/lang/eval-okay-regex-match.nix +++ b/tests/functional/lang/eval-okay-regex-match.nix @@ -8,22 +8,34 @@ let in -assert matches "foobar" "foobar"; -assert matches "fo*" "f"; +assert matches "foobar" "foobar"; +assert matches "fo*" "f"; assert !matches "fo+" "f"; -assert matches "fo*" "fo"; -assert matches "fo*" "foo"; -assert matches "fo+" "foo"; -assert matches "fo{1,2}" "foo"; +assert matches "fo*" "fo"; +assert matches "fo*" "foo"; +assert matches "fo+" "foo"; +assert matches "fo{1,2}" "foo"; assert !matches "fo{1,2}" "fooo"; assert !matches "fo*" "foobar"; -assert matches "[[:space:]]+([^[:space:]]+)[[:space:]]+" " foo "; +assert matches "[[:space:]]+([^[:space:]]+)[[:space:]]+" " foo "; assert !matches "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo "; assert match "(.*)\\.nix" "foobar.nix" == [ "foobar" ]; assert match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO " == [ "FOO" ]; -assert splitFN "/path/to/foobar.nix" == [ "/path/to/" "/path/to" "foobar" "nix" ]; -assert splitFN "foobar.cc" == [ null null "foobar" "cc" ]; +assert + splitFN "/path/to/foobar.nix" == [ + "/path/to/" + "/path/to" + "foobar" + "nix" + ]; +assert + splitFN "foobar.cc" == [ + null + null + "foobar" + "cc" + ]; true diff --git a/tests/functional/lang/eval-okay-regex-split.nix b/tests/functional/lang/eval-okay-regex-split.nix index 0073e057787..8ab3e60cbb2 100644 --- a/tests/functional/lang/eval-okay-regex-split.nix +++ b/tests/functional/lang/eval-okay-regex-split.nix @@ -1,48 +1,197 @@ with builtins; # Non capturing regex returns empty lists -assert split "foobar" "foobar" == ["" [] ""]; -assert split "fo*" "f" == ["" [] ""]; -assert split "fo+" "f" == ["f"]; -assert split "fo*" "fo" == ["" [] ""]; -assert split "fo*" "foo" == ["" [] ""]; -assert split "fo+" "foo" == ["" [] ""]; -assert split "fo{1,2}" "foo" == ["" [] ""]; -assert split "fo{1,2}" "fooo" == ["" [] "o"]; -assert split "fo*" "foobar" == ["" [] "bar"]; +assert + split "foobar" "foobar" == [ + "" + [ ] + "" + ]; +assert + split "fo*" "f" == [ + "" + [ ] + "" + ]; +assert split "fo+" "f" == [ "f" ]; +assert + split "fo*" "fo" == [ + "" + [ ] + "" + ]; +assert + split "fo*" "foo" == [ + "" + [ ] + "" + ]; +assert + split "fo+" "foo" == [ + "" + [ ] + "" + ]; +assert + split "fo{1,2}" "foo" == [ + "" + [ ] + "" + ]; +assert + split "fo{1,2}" "fooo" == [ + "" + [ ] + "o" + ]; +assert + split "fo*" "foobar" == [ + "" + [ ] + "bar" + ]; # Capturing regex returns a list of sub-matches -assert split "(fo*)" "f" == ["" ["f"] ""]; -assert split "(fo+)" "f" == ["f"]; -assert split "(fo*)" "fo" == ["" ["fo"] ""]; -assert split "(f)(o*)" "f" == ["" ["f" ""] ""]; -assert split "(f)(o*)" "foo" == ["" ["f" "oo"] ""]; -assert split "(fo+)" "foo" == ["" ["foo"] ""]; -assert split "(fo{1,2})" "foo" == ["" ["foo"] ""]; -assert split "(fo{1,2})" "fooo" == ["" ["foo"] "o"]; -assert split "(fo*)" "foobar" == ["" ["foo"] "bar"]; +assert + split "(fo*)" "f" == [ + "" + [ "f" ] + "" + ]; +assert split "(fo+)" "f" == [ "f" ]; +assert + split "(fo*)" "fo" == [ + "" + [ "fo" ] + "" + ]; +assert + split "(f)(o*)" "f" == [ + "" + [ + "f" + "" + ] + "" + ]; +assert + split "(f)(o*)" "foo" == [ + "" + [ + "f" + "oo" + ] + "" + ]; +assert + split "(fo+)" "foo" == [ + "" + [ "foo" ] + "" + ]; +assert + split "(fo{1,2})" "foo" == [ + "" + [ "foo" ] + "" + ]; +assert + split "(fo{1,2})" "fooo" == [ + "" + [ "foo" ] + "o" + ]; +assert + split "(fo*)" "foobar" == [ + "" + [ "foo" ] + "bar" + ]; # Matches are greedy. -assert split "(o+)" "oooofoooo" == ["" ["oooo"] "f" ["oooo"] ""]; +assert + split "(o+)" "oooofoooo" == [ + "" + [ "oooo" ] + "f" + [ "oooo" ] + "" + ]; # Matches multiple times. -assert split "(b)" "foobarbaz" == ["foo" ["b"] "ar" ["b"] "az"]; +assert + split "(b)" "foobarbaz" == [ + "foo" + [ "b" ] + "ar" + [ "b" ] + "az" + ]; # Split large strings containing newlines. null are inserted when a # pattern within the current did not match anything. -assert split "[[:space:]]+|([',.!?])" '' - Nix Rocks! - That's why I use it. -'' == [ - "Nix" [ null ] "Rocks" ["!"] "" [ null ] - "That" ["'"] "s" [ null ] "why" [ null ] "I" [ null ] "use" [ null ] "it" ["."] "" [ null ] - "" -]; +assert + split "[[:space:]]+|([',.!?])" '' + Nix Rocks! + That's why I use it. + '' == [ + "Nix" + [ null ] + "Rocks" + [ "!" ] + "" + [ null ] + "That" + [ "'" ] + "s" + [ null ] + "why" + [ null ] + "I" + [ null ] + "use" + [ null ] + "it" + [ "." ] + "" + [ null ] + "" + ]; # Documentation examples -assert split "(a)b" "abc" == [ "" [ "a" ] "c" ]; -assert split "([ac])" "abc" == [ "" [ "a" ] "b" [ "c" ] "" ]; -assert split "(a)|(c)" "abc" == [ "" [ "a" null ] "b" [ null "c" ] "" ]; -assert split "([[:upper:]]+)" " FOO " == [ " " [ "FOO" ] " " ]; +assert + split "(a)b" "abc" == [ + "" + [ "a" ] + "c" + ]; +assert + split "([ac])" "abc" == [ + "" + [ "a" ] + "b" + [ "c" ] + "" + ]; +assert + split "(a)|(c)" "abc" == [ + "" + [ + "a" + null + ] + "b" + [ + null + "c" + ] + "" + ]; +assert + split "([[:upper:]]+)" " FOO " == [ + " " + [ "FOO" ] + " " + ]; true diff --git a/tests/functional/lang/eval-okay-regression-20220125.nix b/tests/functional/lang/eval-okay-regression-20220125.nix index 48550237394..1c4b8e09f39 100644 --- a/tests/functional/lang/eval-okay-regression-20220125.nix +++ b/tests/functional/lang/eval-okay-regression-20220125.nix @@ -1,2 +1 @@ ((__curPosFoo: __curPosFoo) 1) + ((__curPosBar: __curPosBar) 2) - diff --git a/tests/functional/lang/eval-okay-regrettable-rec-attrset-merge.nix b/tests/functional/lang/eval-okay-regrettable-rec-attrset-merge.nix index 8df6a2ad81d..e92ae8125a6 100644 --- a/tests/functional/lang/eval-okay-regrettable-rec-attrset-merge.nix +++ b/tests/functional/lang/eval-okay-regrettable-rec-attrset-merge.nix @@ -1,3 +1,10 @@ # This is for backwards compatibility, not because we like it. # See https://github.com/NixOS/nix/issues/9020. -{ a = rec { b = c + 1; d = 2; }; a.c = d + 3; }.a.b +{ + a = rec { + b = c + 1; + d = 2; + }; + a.c = d + 3; +} +.a.b diff --git a/tests/functional/lang/eval-okay-remove.nix b/tests/functional/lang/eval-okay-remove.nix index 4ad5ba897fa..a7ee3a07148 100644 --- a/tests/functional/lang/eval-okay-remove.nix +++ b/tests/functional/lang/eval-okay-remove.nix @@ -1,5 +1,8 @@ let { - attrs = {x = 123; y = 456;}; + attrs = { + x = 123; + y = 456; + }; - body = (removeAttrs attrs ["x"]).y; -} \ No newline at end of file + body = (removeAttrs attrs [ "x" ]).y; +} diff --git a/tests/functional/lang/eval-okay-repeated-empty-attrs.nix b/tests/functional/lang/eval-okay-repeated-empty-attrs.nix index 030a3b85c76..0749e21a57c 100644 --- a/tests/functional/lang/eval-okay-repeated-empty-attrs.nix +++ b/tests/functional/lang/eval-okay-repeated-empty-attrs.nix @@ -1,2 +1,5 @@ # Tests that empty attribute sets are not printed as `«repeated»`. -[ {} {} ] +[ + { } + { } +] diff --git a/tests/functional/lang/eval-okay-repeated-empty-list.nix b/tests/functional/lang/eval-okay-repeated-empty-list.nix index 376c51be886..7e24fe81b27 100644 --- a/tests/functional/lang/eval-okay-repeated-empty-list.nix +++ b/tests/functional/lang/eval-okay-repeated-empty-list.nix @@ -1 +1,4 @@ -[ [] [] ] +[ + [ ] + [ ] +] diff --git a/tests/functional/lang/eval-okay-replacestrings.nix b/tests/functional/lang/eval-okay-replacestrings.nix index a803e65199a..81a932a1daa 100644 --- a/tests/functional/lang/eval-okay-replacestrings.nix +++ b/tests/functional/lang/eval-okay-replacestrings.nix @@ -1,12 +1,13 @@ with builtins; -[ (replaceStrings ["o"] ["a"] "foobar") - (replaceStrings ["o"] [""] "foobar") - (replaceStrings ["oo"] ["u"] "foobar") - (replaceStrings ["oo" "a"] ["a" "oo"] "foobar") - (replaceStrings ["oo" "oo"] ["u" "i"] "foobar") - (replaceStrings [""] ["X"] "abc") - (replaceStrings [""] ["X"] "") - (replaceStrings ["-"] ["_"] "a-b") - (replaceStrings ["oo" "XX"] ["u" (throw "unreachable")] "foobar") +[ + (replaceStrings [ "o" ] [ "a" ] "foobar") + (replaceStrings [ "o" ] [ "" ] "foobar") + (replaceStrings [ "oo" ] [ "u" ] "foobar") + (replaceStrings [ "oo" "a" ] [ "a" "oo" ] "foobar") + (replaceStrings [ "oo" "oo" ] [ "u" "i" ] "foobar") + (replaceStrings [ "" ] [ "X" ] "abc") + (replaceStrings [ "" ] [ "X" ] "") + (replaceStrings [ "-" ] [ "_" ] "a-b") + (replaceStrings [ "oo" "XX" ] [ "u" (throw "unreachable") ] "foobar") ] diff --git a/tests/functional/lang/eval-okay-scope-1.nix b/tests/functional/lang/eval-okay-scope-1.nix index fa38a7174e0..b7bbcc432d5 100644 --- a/tests/functional/lang/eval-okay-scope-1.nix +++ b/tests/functional/lang/eval-okay-scope-1.nix @@ -1,6 +1,13 @@ -(({x}: x: +( + ( + { x }: + x: - { x = 1; - y = x; - } -) {x = 2;} 3).y + { + x = 1; + y = x; + } + ) + { x = 2; } + 3 +).y diff --git a/tests/functional/lang/eval-okay-scope-2.nix b/tests/functional/lang/eval-okay-scope-2.nix index eb8b02bc499..54f7ec3b230 100644 --- a/tests/functional/lang/eval-okay-scope-2.nix +++ b/tests/functional/lang/eval-okay-scope-2.nix @@ -1,6 +1,12 @@ -((x: {x}: - rec { - x = 1; - y = x; - } -) 2 {x = 3;}).y +( + ( + x: + { x }: + rec { + x = 1; + y = x; + } + ) + 2 + { x = 3; } +).y diff --git a/tests/functional/lang/eval-okay-scope-3.nix b/tests/functional/lang/eval-okay-scope-3.nix index 10d6bc04d83..6a77583b7da 100644 --- a/tests/functional/lang/eval-okay-scope-3.nix +++ b/tests/functional/lang/eval-okay-scope-3.nix @@ -1,6 +1,13 @@ -((x: as: {x}: - rec { - inherit (as) x; - y = x; - } -) 2 {x = 4;} {x = 3;}).y +( + ( + x: as: + { x }: + rec { + inherit (as) x; + y = x; + } + ) + 2 + { x = 4; } + { x = 3; } +).y diff --git a/tests/functional/lang/eval-okay-scope-4.nix b/tests/functional/lang/eval-okay-scope-4.nix index dc8243bc854..ccae8564cda 100644 --- a/tests/functional/lang/eval-okay-scope-4.nix +++ b/tests/functional/lang/eval-okay-scope-4.nix @@ -3,8 +3,13 @@ let { x = "a"; y = "b"; - f = {x ? y, y ? x}: x + y; - - body = f {x = "c";} + f {y = "d";}; + f = + { + x ? y, + y ? x, + }: + x + y; + + body = f { x = "c"; } + f { y = "d"; }; } diff --git a/tests/functional/lang/eval-okay-scope-6.nix b/tests/functional/lang/eval-okay-scope-6.nix index 0995d4e7e7e..be2cc31a1f2 100644 --- a/tests/functional/lang/eval-okay-scope-6.nix +++ b/tests/functional/lang/eval-okay-scope-6.nix @@ -1,7 +1,12 @@ let { - f = {x ? y, y ? x}: x + y; + f = + { + x ? y, + y ? x, + }: + x + y; - body = f {x = "c";} + f {y = "d";}; + body = f { x = "c"; } + f { y = "d"; }; } diff --git a/tests/functional/lang/eval-okay-scope-7.nix b/tests/functional/lang/eval-okay-scope-7.nix index 4da02968f6b..91f22f55388 100644 --- a/tests/functional/lang/eval-okay-scope-7.nix +++ b/tests/functional/lang/eval-okay-scope-7.nix @@ -3,4 +3,5 @@ rec { x = { y = 1; }; -}.y +} +.y diff --git a/tests/functional/lang/eval-okay-search-path.nix b/tests/functional/lang/eval-okay-search-path.nix index 6fe33decc01..702e1b64c15 100644 --- a/tests/functional/lang/eval-okay-search-path.nix +++ b/tests/functional/lang/eval-okay-search-path.nix @@ -6,5 +6,16 @@ assert isFunction (import ); assert length __nixPath == 5; assert length (filter (x: baseNameOf x.path == "dir4") __nixPath) == 1; -import + import + import + import - + (let __nixPath = [ { path = ./dir2; } { path = ./dir1; } ]; in import ) +import ++ import ++ import ++ import ++ ( + let + __nixPath = [ + { path = ./dir2; } + { path = ./dir1; } + ]; + in + import +) diff --git a/tests/functional/lang/eval-okay-sort.nix b/tests/functional/lang/eval-okay-sort.nix index 50aa78e4032..412bda4a09f 100644 --- a/tests/functional/lang/eval-okay-sort.nix +++ b/tests/functional/lang/eval-okay-sort.nix @@ -1,20 +1,64 @@ with builtins; -[ (sort lessThan [ 483 249 526 147 42 77 ]) - (sort (x: y: y < x) [ 483 249 526 147 42 77 ]) - (sort lessThan [ "foo" "bar" "xyzzy" "fnord" ]) - (sort (x: y: x.key < y.key) - [ { key = 1; value = "foo"; } { key = 2; value = "bar"; } { key = 1; value = "fnord"; } ]) +[ (sort lessThan [ - [ 1 6 ] + 483 + 249 + 526 + 147 + 42 + 77 + ]) + (sort (x: y: y < x) [ + 483 + 249 + 526 + 147 + 42 + 77 + ]) + (sort lessThan [ + "foo" + "bar" + "xyzzy" + "fnord" + ]) + (sort (x: y: x.key < y.key) [ + { + key = 1; + value = "foo"; + } + { + key = 2; + value = "bar"; + } + { + key = 1; + value = "fnord"; + } + ]) + (sort lessThan [ + [ + 1 + 6 + ] [ ] - [ 2 3 ] + [ + 2 + 3 + ] [ 3 ] - [ 1 5 ] + [ + 1 + 5 + ] [ 2 ] [ 1 ] [ ] - [ 1 4 ] + [ + 1 + 4 + ] [ 3 ] ]) ] diff --git a/tests/functional/lang/eval-okay-string.nix b/tests/functional/lang/eval-okay-string.nix index 47cc989ad46..d3b743fdbed 100644 --- a/tests/functional/lang/eval-okay-string.nix +++ b/tests/functional/lang/eval-okay-string.nix @@ -1,12 +1,13 @@ -"foo" + "bar" - + toString (/a/b + /c/d) - + toString (/foo/bar + "/../xyzzy/." + "/foo.txt") - + ("/../foo" + toString /x/y) - + "escape: \"quote\" \n \\" - + "end +"foo" ++ "bar" ++ toString (/a/b + /c/d) ++ toString (/foo/bar + "/../xyzzy/." + "/foo.txt") ++ ("/../foo" + toString /x/y) ++ "escape: \"quote\" \n \\" ++ "end of line" - + "foo${if true then "b${"a" + "r"}" else "xyzzy"}blaat" - + "foo$bar" - + "$\"$\"" - + "$" ++ "foo${if true then "b${"a" + "r"}" else "xyzzy"}blaat" ++ "foo$bar" ++ "$\"$\"" ++ "$" diff --git a/tests/functional/lang/eval-okay-strings-as-attrs-names.nix b/tests/functional/lang/eval-okay-strings-as-attrs-names.nix index 5e40928dbe3..158dc8e754e 100644 --- a/tests/functional/lang/eval-okay-strings-as-attrs-names.nix +++ b/tests/functional/lang/eval-okay-strings-as-attrs-names.nix @@ -14,7 +14,5 @@ let # variable. "foo bar" = 1; -in t1 == "test" - && t2 == "caseok" - && t3 == true - && t4 == ["key 1"] +in +t1 == "test" && t2 == "caseok" && t3 == true && t4 == [ "key 1" ] diff --git a/tests/functional/lang/eval-okay-substring-context.nix b/tests/functional/lang/eval-okay-substring-context.nix index d0ef70d4e67..9e9d3a1aa95 100644 --- a/tests/functional/lang/eval-okay-substring-context.nix +++ b/tests/functional/lang/eval-okay-substring-context.nix @@ -2,10 +2,15 @@ with builtins; let - s = "${builtins.derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }}"; + s = "${builtins.derivation { + name = "test"; + builder = "/bin/sh"; + system = "x86_64-linux"; + }}"; in -if getContext s == getContext "${substring 0 0 s + unsafeDiscardStringContext s}" -then "okay" -else throw "empty substring should preserve context" +if getContext s == getContext "${substring 0 0 s + unsafeDiscardStringContext s}" then + "okay" +else + throw "empty substring should preserve context" diff --git a/tests/functional/lang/eval-okay-tail-call-1.nix b/tests/functional/lang/eval-okay-tail-call-1.nix index a3962ce3fdb..d3ec0c9adfd 100644 --- a/tests/functional/lang/eval-okay-tail-call-1.nix +++ b/tests/functional/lang/eval-okay-tail-call-1.nix @@ -1,3 +1,4 @@ let f = n: if n == 100000 then n else f (n + 1); -in f 0 +in +f 0 diff --git a/tests/functional/lang/eval-okay-tojson.nix b/tests/functional/lang/eval-okay-tojson.nix index ce67943bead..863c0766392 100644 --- a/tests/functional/lang/eval-okay-tojson.nix +++ b/tests/functional/lang/eval-okay-tojson.nix @@ -1,13 +1,26 @@ -builtins.toJSON - { a = 123; - b = -456; - c = "foo"; - d = "foo\n\"bar\""; - e = true; - f = false; - g = [ 1 2 3 ]; - h = [ "a" [ "b" { "foo\nbar" = {}; } ] ]; - i = 1 + 2; - j = 1.44; - k = { __toString = self: self.a; a = "foo"; }; - } +builtins.toJSON { + a = 123; + b = -456; + c = "foo"; + d = "foo\n\"bar\""; + e = true; + f = false; + g = [ + 1 + 2 + 3 + ]; + h = [ + "a" + [ + "b" + { "foo\nbar" = { }; } + ] + ]; + i = 1 + 2; + j = 1.44; + k = { + __toString = self: self.a; + a = "foo"; + }; +} diff --git a/tests/functional/lang/eval-okay-toxml2.nix b/tests/functional/lang/eval-okay-toxml2.nix index ff1791b30eb..0d5989a50e7 100644 --- a/tests/functional/lang/eval-okay-toxml2.nix +++ b/tests/functional/lang/eval-okay-toxml2.nix @@ -1 +1,8 @@ -builtins.toXML [("a" + "b") 10 (rec {x = "x"; y = x;})] +builtins.toXML [ + ("a" + "b") + 10 + (rec { + x = "x"; + y = x; + }) +] diff --git a/tests/functional/lang/eval-okay-tryeval.nix b/tests/functional/lang/eval-okay-tryeval.nix index 629bc440a85..22b23d88342 100644 --- a/tests/functional/lang/eval-okay-tryeval.nix +++ b/tests/functional/lang/eval-okay-tryeval.nix @@ -1,5 +1,8 @@ { x = builtins.tryEval "x"; - y = builtins.tryEval (assert false; "y"); + y = builtins.tryEval ( + assert false; + "y" + ); z = builtins.tryEval (throw "bla"); } diff --git a/tests/functional/lang/eval-okay-types.nix b/tests/functional/lang/eval-okay-types.nix index 9b58be5d1dd..0814489edd3 100644 --- a/tests/functional/lang/eval-okay-types.nix +++ b/tests/functional/lang/eval-okay-types.nix @@ -1,6 +1,7 @@ with builtins; -[ (isNull null) +[ + (isNull null) (isNull (x: x)) (isFunction (x: x)) (isFunction "fnord") @@ -29,7 +30,11 @@ with builtins; (typeOf "xyzzy") (typeOf null) (typeOf { x = 456; }) - (typeOf [ 1 2 3 ]) + (typeOf [ + 1 + 2 + 3 + ]) (typeOf (x: x)) (typeOf ((x: y: x) 1)) (typeOf map) diff --git a/tests/functional/lang/eval-okay-versions.nix b/tests/functional/lang/eval-okay-versions.nix index e9111f5f433..3456015e538 100644 --- a/tests/functional/lang/eval-okay-versions.nix +++ b/tests/functional/lang/eval-okay-versions.nix @@ -10,10 +10,13 @@ let lt = builtins.sub 0 1; gt = 1; - versionTest = v1: v2: expected: - let d1 = builtins.compareVersions v1 v2; - d2 = builtins.compareVersions v2 v1; - in d1 == builtins.sub 0 d2 && d1 == expected; + versionTest = + v1: v2: expected: + let + d1 = builtins.compareVersions v1 v2; + d2 = builtins.compareVersions v2 v1; + in + d1 == builtins.sub 0 d2 && d1 == expected; tests = [ ((builtins.parseDrvName name1).name == "hello") @@ -40,4 +43,5 @@ let (versionTest "2.3pre1" "2.3q" lt) ]; -in (import ./lib.nix).and tests +in +(import ./lib.nix).and tests diff --git a/tests/functional/lang/eval-okay-xml.nix b/tests/functional/lang/eval-okay-xml.nix index 9ee9f8a0b4f..9785c66ef42 100644 --- a/tests/functional/lang/eval-okay-xml.nix +++ b/tests/functional/lang/eval-okay-xml.nix @@ -10,12 +10,31 @@ rec { c = "foo" + "bar"; - f = {z, x, y}: if y then x else z; + f = + { + z, + x, + y, + }: + if y then x else z; id = x: x; - at = args@{x, y, z}: x; - - ellipsis = {x, y, z, ...}: x; + at = + args@{ + x, + y, + z, + }: + x; + + ellipsis = + { + x, + y, + z, + ... + }: + x; } diff --git a/tests/functional/lang/eval-okay-zipAttrsWith.nix b/tests/functional/lang/eval-okay-zipAttrsWith.nix index 877d4e5fa31..20f6891115e 100644 --- a/tests/functional/lang/eval-okay-zipAttrsWith.nix +++ b/tests/functional/lang/eval-okay-zipAttrsWith.nix @@ -3,7 +3,6 @@ with import ./lib.nix; let str = builtins.hashString "sha256" "test"; in -builtins.zipAttrsWith - (n: v: { inherit n v; }) - (map (n: { ${builtins.substring n 1 str} = n; }) - (range 0 31)) +builtins.zipAttrsWith (n: v: { inherit n v; }) ( + map (n: { ${builtins.substring n 1 str} = n; }) (range 0 31) +) diff --git a/tests/functional/lang/lib.nix b/tests/functional/lang/lib.nix index 028a538314b..126128abe7a 100644 --- a/tests/functional/lang/lib.nix +++ b/tests/functional/lang/lib.nix @@ -2,60 +2,76 @@ with builtins; rec { - fold = op: nul: list: - if list == [] - then nul - else op (head list) (fold op nul (tail list)); + fold = + op: nul: list: + if list == [ ] then nul else op (head list) (fold op nul (tail list)); - concat = - fold (x: y: x + y) ""; + concat = fold (x: y: x + y) ""; and = fold (x: y: x && y) true; - flatten = x: - if isList x - then fold (x: y: (flatten x) ++ y) [] x - else [x]; + flatten = x: if isList x then fold (x: y: (flatten x) ++ y) [ ] x else [ x ]; sum = foldl' (x: y: add x y) 0; - hasSuffix = ext: fileName: - let lenFileName = stringLength fileName; - lenExt = stringLength ext; - in !(lessThan lenFileName lenExt) && - substring (sub lenFileName lenExt) lenFileName fileName == ext; + hasSuffix = + ext: fileName: + let + lenFileName = stringLength fileName; + lenExt = stringLength ext; + in + !(lessThan lenFileName lenExt) && substring (sub lenFileName lenExt) lenFileName fileName == ext; # Split a list at the given position. - splitAt = pos: list: - if pos == 0 then {first = []; second = list;} else - if list == [] then {first = []; second = [];} else - let res = splitAt (sub pos 1) (tail list); - in {first = [(head list)] ++ res.first; second = res.second;}; + splitAt = + pos: list: + if pos == 0 then + { + first = [ ]; + second = list; + } + else if list == [ ] then + { + first = [ ]; + second = [ ]; + } + else + let + res = splitAt (sub pos 1) (tail list); + in + { + first = [ (head list) ] ++ res.first; + second = res.second; + }; # Stable merge sort. - sortBy = comp: list: - if lessThan 1 (length list) - then + sortBy = + comp: list: + if lessThan 1 (length list) then let split = splitAt (div (length list) 2) list; first = sortBy comp split.first; second = sortBy comp split.second; - in mergeLists comp first second - else list; + in + mergeLists comp first second + else + list; - mergeLists = comp: list1: list2: - if list1 == [] then list2 else - if list2 == [] then list1 else - if comp (head list2) (head list1) then [(head list2)] ++ mergeLists comp list1 (tail list2) else - [(head list1)] ++ mergeLists comp (tail list1) list2; + mergeLists = + comp: list1: list2: + if list1 == [ ] then + list2 + else if list2 == [ ] then + list1 + else if comp (head list2) (head list1) then + [ (head list2) ] ++ mergeLists comp list1 (tail list2) + else + [ (head list1) ] ++ mergeLists comp (tail list1) list2; id = x: x; const = x: y: x; - range = first: last: - if first > last - then [] - else genList (n: first + n) (last - first + 1); + range = first: last: if first > last then [ ] else genList (n: first + n) (last - first + 1); } diff --git a/tests/functional/linux-sandbox-cert-test.nix b/tests/functional/linux-sandbox-cert-test.nix index 2fc083ea932..82989c64f88 100644 --- a/tests/functional/linux-sandbox-cert-test.nix +++ b/tests/functional/linux-sandbox-cert-test.nix @@ -22,9 +22,12 @@ mkDerivation ( # derivations being cached, and do not want to compute the right hash. false; ''; - } // { - fixed-output = { outputHash = "sha256:0000000000000000000000000000000000000000000000000000000000000000"; }; + } + // { + fixed-output = { + outputHash = "sha256:0000000000000000000000000000000000000000000000000000000000000000"; + }; normal = { }; - }.${mode} + } + .${mode} ) - diff --git a/tests/functional/multiple-outputs.nix b/tests/functional/multiple-outputs.nix index 6ba7c523d8e..2c9243097d5 100644 --- a/tests/functional/multiple-outputs.nix +++ b/tests/functional/multiple-outputs.nix @@ -5,94 +5,111 @@ rec { # Want to ensure that "out" doesn't get a suffix on it's path. nameCheck = mkDerivation { name = "multiple-outputs-a"; - outputs = [ "out" "dev" ]; - builder = builtins.toFile "builder.sh" - '' - mkdir $first $second - test -z $all - echo "first" > $first/file - echo "second" > $second/file - ln -s $first $second/link - ''; + outputs = [ + "out" + "dev" + ]; + builder = builtins.toFile "builder.sh" '' + mkdir $first $second + test -z $all + echo "first" > $first/file + echo "second" > $second/file + ln -s $first $second/link + ''; helloString = "Hello, world!"; }; a = mkDerivation { name = "multiple-outputs-a"; - outputs = [ "first" "second" ]; - builder = builtins.toFile "builder.sh" - '' - mkdir $first $second - test -z $all - echo "first" > $first/file - echo "second" > $second/file - ln -s $first $second/link - ''; + outputs = [ + "first" + "second" + ]; + builder = builtins.toFile "builder.sh" '' + mkdir $first $second + test -z $all + echo "first" > $first/file + echo "second" > $second/file + ln -s $first $second/link + ''; helloString = "Hello, world!"; }; use-a = mkDerivation { name = "use-a"; inherit (a) first second; - builder = builtins.toFile "builder.sh" - '' - cat $first/file $second/file >$out - ''; + builder = builtins.toFile "builder.sh" '' + cat $first/file $second/file >$out + ''; }; b = mkDerivation { - defaultOutput = assert a.second.helloString == "Hello, world!"; a; - firstOutput = assert a.outputName == "first"; a.first.first; - secondOutput = assert a.second.outputName == "second"; a.second.first.first.second.second.first.second; + defaultOutput = + assert a.second.helloString == "Hello, world!"; + a; + firstOutput = + assert a.outputName == "first"; + a.first.first; + secondOutput = + assert a.second.outputName == "second"; + a.second.first.first.second.second.first.second; allOutputs = a.all; name = "multiple-outputs-b"; - builder = builtins.toFile "builder.sh" - '' - mkdir $out - test "$firstOutput $secondOutput" = "$allOutputs" - test "$defaultOutput" = "$firstOutput" - test "$(cat $firstOutput/file)" = "first" - test "$(cat $secondOutput/file)" = "second" - echo "success" > $out/file - ''; + builder = builtins.toFile "builder.sh" '' + mkdir $out + test "$firstOutput $secondOutput" = "$allOutputs" + test "$defaultOutput" = "$firstOutput" + test "$(cat $firstOutput/file)" = "first" + test "$(cat $secondOutput/file)" = "second" + echo "success" > $out/file + ''; }; c = mkDerivation { name = "multiple-outputs-c"; drv = b.drvPath; - builder = builtins.toFile "builder.sh" - '' - mkdir $out - ln -s $drv $out/drv - ''; + builder = builtins.toFile "builder.sh" '' + mkdir $out + ln -s $drv $out/drv + ''; }; d = mkDerivation { name = "multiple-outputs-d"; drv = builtins.unsafeDiscardOutputDependency b.drvPath; - builder = builtins.toFile "builder.sh" - '' - mkdir $out - echo $drv > $out/drv - ''; + builder = builtins.toFile "builder.sh" '' + mkdir $out + echo $drv > $out/drv + ''; }; - cyclic = (mkDerivation { - name = "cyclic-outputs"; - outputs = [ "a" "b" "c" ]; - builder = builtins.toFile "builder.sh" - '' + cyclic = + (mkDerivation { + name = "cyclic-outputs"; + outputs = [ + "a" + "b" + "c" + ]; + builder = builtins.toFile "builder.sh" '' mkdir $a $b $c echo $a > $b/foo echo $b > $c/bar echo $c > $a/baz ''; - }).a; + }).a; e = mkDerivation { name = "multiple-outputs-e"; - outputs = [ "a_a" "b" "c" ]; - meta.outputsToInstall = [ "a_a" "b" ]; + outputs = [ + "a_a" + "b" + "c" + ]; + meta.outputsToInstall = [ + "a_a" + "b" + ]; buildCommand = "mkdir $a_a $b $c"; }; @@ -104,33 +121,37 @@ rec { independent = mkDerivation { name = "multiple-outputs-independent"; - outputs = [ "first" "second" ]; - builder = builtins.toFile "builder.sh" - '' - mkdir $first $second - test -z $all - echo "first" > $first/file - echo "second" > $second/file - ''; + outputs = [ + "first" + "second" + ]; + builder = builtins.toFile "builder.sh" '' + mkdir $first $second + test -z $all + echo "first" > $first/file + echo "second" > $second/file + ''; }; use-independent = mkDerivation { name = "use-independent"; inherit (a) first second; - builder = builtins.toFile "builder.sh" - '' - cat $first/file $second/file >$out - ''; + builder = builtins.toFile "builder.sh" '' + cat $first/file $second/file >$out + ''; }; invalid-output-name-1 = mkDerivation { name = "invalid-output-name-1"; - outputs = [ "out/"]; + outputs = [ "out/" ]; }; invalid-output-name-2 = mkDerivation { name = "invalid-output-name-2"; - outputs = [ "x" "foo$"]; + outputs = [ + "x" + "foo$" + ]; }; } diff --git a/tests/functional/nar-access.nix b/tests/functional/nar-access.nix index 9948abe59ff..b1e88189a39 100644 --- a/tests/functional/nar-access.nix +++ b/tests/functional/nar-access.nix @@ -1,23 +1,22 @@ with import ./config.nix; rec { - a = mkDerivation { - name = "nar-index-a"; - builder = builtins.toFile "builder.sh" - '' - mkdir $out - mkdir $out/foo - touch $out/foo-x - touch $out/foo/bar - touch $out/foo/baz - touch $out/qux - mkdir $out/zyx + a = mkDerivation { + name = "nar-index-a"; + builder = builtins.toFile "builder.sh" '' + mkdir $out + mkdir $out/foo + touch $out/foo-x + touch $out/foo/bar + touch $out/foo/baz + touch $out/qux + mkdir $out/zyx - cat >$out/foo/data <$out/foo/data < $out - '' else '' - cp -r ${../common} ./common - cp ${../common.sh} ./common.sh - cp ${../config.nix} ./config.nix - cp -r ${./.} ./nested-sandboxing + buildCommand = + '' + set -x + set -eu -o pipefail + '' + + ( + if altitude == 0 then + '' + echo Deep enough! > $out + '' + else + '' + cp -r ${../common} ./common + cp ${../common.sh} ./common.sh + cp ${../config.nix} ./config.nix + cp -r ${./.} ./nested-sandboxing - export PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH + export PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH - export _NIX_TEST_SOURCE_DIR=$PWD - export _NIX_TEST_BUILD_DIR=$PWD + export _NIX_TEST_SOURCE_DIR=$PWD + export _NIX_TEST_BUILD_DIR=$PWD - source common.sh - source ./nested-sandboxing/command.sh + source common.sh + source ./nested-sandboxing/command.sh - runNixBuild ${storeFun} ${toString altitude} >> $out - ''); + runNixBuild ${storeFun} ${toString altitude} >> $out + '' + ); } diff --git a/tests/functional/package.nix b/tests/functional/package.nix index d1582b05d14..74c034196fd 100644 --- a/tests/functional/package.nix +++ b/tests/functional/package.nix @@ -1,103 +1,110 @@ -{ lib -, stdenv -, mkMesonDerivation +{ + lib, + stdenv, + mkMesonDerivation, -, meson -, ninja -, pkg-config + meson, + ninja, + pkg-config, -, jq -, git -, mercurial -, util-linux + jq, + git, + mercurial, + util-linux, -, nix-store -, nix-expr -, nix-cli + nix-store, + nix-expr, + nix-cli, -, busybox-sandbox-shell ? null + busybox-sandbox-shell ? null, -# Configuration Options + # Configuration Options -, pname ? "nix-functional-tests" -, version + pname ? "nix-functional-tests", + version, -# For running the functional tests against a different pre-built Nix. -, test-daemon ? null + # For running the functional tests against a different pre-built Nix. + test-daemon ? null, }: let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { - inherit pname version; - - workDir = ./.; - fileset = fileset.unions [ - ../../scripts/nix-profile.sh.in - ../../.version - ../../tests/functional - ./. - ]; - - # Hack for sake of the dev shell - passthru.externalNativeBuildInputs = [ - meson - ninja - pkg-config - - jq - git - mercurial - ] ++ lib.optionals stdenv.hostPlatform.isLinux [ - # For various sandboxing tests that needs a statically-linked shell, - # etc. - busybox-sandbox-shell - # For Overlay FS tests need `mount`, `umount`, and `unshare`. - # For `script` command (ensuring a TTY) - # TODO use `unixtools` to be precise over which executables instead? - util-linux - ]; - - nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [ - nix-cli - ]; - - buildInputs = [ - nix-store - nix-expr - ]; - - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../../.version - '' - # TEMP hack for Meson before make is gone, where - # `src/nix-functional-tests` is during the transition a symlink and - # not the actual directory directory. - + '' - cd $(readlink -e $PWD) - echo $PWD | grep tests/functional +mkMesonDerivation ( + finalAttrs: + { + inherit pname version; + + workDir = ./.; + fileset = fileset.unions [ + ../../scripts/nix-profile.sh.in + ../../.version + ../../tests/functional + ./. + ]; + + # Hack for sake of the dev shell + passthru.externalNativeBuildInputs = + [ + meson + ninja + pkg-config + + jq + git + mercurial + ] + ++ lib.optionals stdenv.hostPlatform.isLinux [ + # For various sandboxing tests that needs a statically-linked shell, + # etc. + busybox-sandbox-shell + # For Overlay FS tests need `mount`, `umount`, and `unshare`. + # For `script` command (ensuring a TTY) + # TODO use `unixtools` to be precise over which executables instead? + util-linux + ]; + + nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [ + nix-cli + ]; + + buildInputs = [ + nix-store + nix-expr + ]; + + preConfigure = + # "Inline" .version so it's not a symlink, and includes the suffix. + # Do the meson utils, without modification. + '' + chmod u+w ./.version + echo ${version} > ../../../.version + '' + # TEMP hack for Meson before make is gone, where + # `src/nix-functional-tests` is during the transition a symlink and + # not the actual directory directory. + + '' + cd $(readlink -e $PWD) + echo $PWD | grep tests/functional + ''; + + mesonCheckFlags = [ + "--print-errorlogs" + ]; + + doCheck = true; + + installPhase = '' + mkdir $out ''; - mesonCheckFlags = [ - "--print-errorlogs" - ]; + meta = { + platforms = lib.platforms.unix; + }; - doCheck = true; - - installPhase = '' - mkdir $out - ''; - - meta = { - platforms = lib.platforms.unix; - }; - -} // lib.optionalAttrs (test-daemon != null) { - NIX_DAEMON_PACKAGE = test-daemon; -}) + } + // lib.optionalAttrs (test-daemon != null) { + NIX_DAEMON_PACKAGE = test-daemon; + } +) diff --git a/tests/functional/parallel.nix b/tests/functional/parallel.nix index 23f142059f5..0adfe7d8e53 100644 --- a/tests/functional/parallel.nix +++ b/tests/functional/parallel.nix @@ -1,19 +1,33 @@ -{sleepTime ? 3}: +{ + sleepTime ? 3, +}: with import ./config.nix; let - mkDrv = text: inputs: mkDerivation { - name = "parallel"; - builder = ./parallel.builder.sh; - inherit text inputs shared sleepTime; - }; + mkDrv = + text: inputs: + mkDerivation { + name = "parallel"; + builder = ./parallel.builder.sh; + inherit + text + inputs + shared + sleepTime + ; + }; - a = mkDrv "a" []; - b = mkDrv "b" [a]; - c = mkDrv "c" [a]; - d = mkDrv "d" [a]; - e = mkDrv "e" [b c d]; + a = mkDrv "a" [ ]; + b = mkDrv "b" [ a ]; + c = mkDrv "c" [ a ]; + d = mkDrv "d" [ a ]; + e = mkDrv "e" [ + b + c + d + ]; -in e +in +e diff --git a/tests/functional/path.nix b/tests/functional/path.nix index 883c3c41bb1..b554765e85e 100644 --- a/tests/functional/path.nix +++ b/tests/functional/path.nix @@ -3,12 +3,12 @@ with import ./config.nix; mkDerivation { name = "filter"; builder = builtins.toFile "builder" "ln -s $input $out"; - input = - builtins.path { - path = ((builtins.getEnv "TEST_ROOT") + "/filterin"); - filter = path: type: - type != "symlink" - && baseNameOf path != "foo" - && !((import ./lang/lib.nix).hasSuffix ".bak" (baseNameOf path)); - }; + input = builtins.path { + path = ((builtins.getEnv "TEST_ROOT") + "/filterin"); + filter = + path: type: + type != "symlink" + && baseNameOf path != "foo" + && !((import ./lang/lib.nix).hasSuffix ".bak" (baseNameOf path)); + }; } diff --git a/tests/functional/readfile-context.nix b/tests/functional/readfile-context.nix index 54cd1afd9d3..d9880ca3201 100644 --- a/tests/functional/readfile-context.nix +++ b/tests/functional/readfile-context.nix @@ -25,4 +25,5 @@ let input = builtins.readFile (dependent + "/file1"); }; -in readDependent +in +readDependent diff --git a/tests/functional/recursive.nix b/tests/functional/recursive.nix index fe438f0ba5c..be9e55da37e 100644 --- a/tests/functional/recursive.nix +++ b/tests/functional/recursive.nix @@ -1,4 +1,6 @@ -let config_nix = /. + "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; in +let + config_nix = /. + "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; +in with import config_nix; mkDerivation rec { @@ -15,7 +17,9 @@ mkDerivation rec { buildCommand = '' mkdir $out - opts="--experimental-features nix-command ${if (NIX_TESTS_CA_BY_DEFAULT == "1") then "--extra-experimental-features ca-derivations" else ""}" + opts="--experimental-features nix-command ${ + if (NIX_TESTS_CA_BY_DEFAULT == "1") then "--extra-experimental-features ca-derivations" else "" + }" PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH diff --git a/tests/functional/repl/doc-comment-function.nix b/tests/functional/repl/doc-comment-function.nix index cdd2413476f..a85d4a99fdb 100644 --- a/tests/functional/repl/doc-comment-function.nix +++ b/tests/functional/repl/doc-comment-function.nix @@ -1,3 +1,4 @@ -/** A doc comment for a file that only contains a function */ -{ ... }: -{ } +/** + A doc comment for a file that only contains a function +*/ +{ ... }: { } diff --git a/tests/functional/repl/doc-comments.nix b/tests/functional/repl/doc-comments.nix index e91ee0b513d..a7a285d48b9 100644 --- a/tests/functional/repl/doc-comments.nix +++ b/tests/functional/repl/doc-comments.nix @@ -6,55 +6,106 @@ multiply 2 3 => 6 ``` - */ + */ multiply = x: y: x * y; - /**👈 precisely this wide 👉*/ + /** + 👈 precisely this wide 👉 + */ measurement = x: x; - floatedIn = /** This also works. */ + floatedIn = + /** + This also works. + */ x: y: x; - compact=/**boom*/x: x; + compact = + /** + boom + */ + x: x; # https://github.com/NixOS/rfcs/blob/master/rfcs/0145-doc-strings.md#ambiguous-placement - /** Ignore!!! */ - unambiguous = - /** Very close */ + /** + Ignore!!! + */ + unambiguous = + /** + Very close + */ x: x; - /** Firmly rigid. */ + /** + Firmly rigid. + */ constant = true; - /** Immovably fixed. */ + /** + Immovably fixed. + */ lib.version = "9000"; - /** Unchangeably constant. */ + /** + Unchangeably constant. + */ lib.attr.empty = { }; lib.attr.undocumented = { }; - nonStrict = /** My syntax is not strict, but I'm strict anyway. */ x: x; - strict = /** I don't have to be strict, but I am anyway. */ { ... }: null; + nonStrict = + /** + My syntax is not strict, but I'm strict anyway. + */ + x: x; + strict = + /** + I don't have to be strict, but I am anyway. + */ + { ... }: null; # Note that pre and post are the same here. I just had to name them somehow. - strictPre = /** Here's one way to do this */ a@{ ... }: a; - strictPost = /** Here's another way to do this */ { ... }@a: a; + strictPre = + /** + Here's one way to do this + */ + a@{ ... }: a; + strictPost = + /** + Here's another way to do this + */ + { ... }@a: a; # TODO - /** You won't see this. */ + /** + You won't see this. + */ curriedArgs = - /** A documented function. */ + /** + A documented function. + */ x: - /** The function returned by applying once */ + /** + The function returned by applying once + */ y: - /** A function body performing summation of two items */ + /** + A function body performing summation of two items + */ x + y; - /** Documented formals (but you won't see this comment) */ + /** + Documented formals (but you won't see this comment) + */ documentedFormals = - /** Finds x */ - { /** The x attribute */ - x - }: x; + /** + Finds x + */ + { + /** + The x attribute + */ + x, + }: + x; } diff --git a/tests/functional/repl/doc-functor.nix b/tests/functional/repl/doc-functor.nix index f526f453f19..8a663886cf2 100644 --- a/tests/functional/repl/doc-functor.nix +++ b/tests/functional/repl/doc-functor.nix @@ -25,14 +25,14 @@ rec { makeOverridable = f: { /** This is a function that can be overridden. - */ + */ __functor = self: f; override = throw "not implemented"; }; /** Compute x^2 - */ + */ square = x: x * x; helper = makeOverridable square; @@ -41,8 +41,14 @@ rec { makeVeryOverridable = f: { /** This is a function that can be overridden. - */ - __functor = self: arg: f arg // { override = throw "not implemented"; overrideAttrs = throw "not implemented"; }; + */ + __functor = + self: arg: + f arg + // { + override = throw "not implemented"; + overrideAttrs = throw "not implemented"; + }; override = throw "not implemented"; }; @@ -64,7 +70,6 @@ rec { */ helper3 = makeVeryOverridable (x: x * x * x); - # ------ # getDoc traverses a potentially infinite structure in case of __functor, so @@ -73,7 +78,7 @@ rec { recursive = { /** This looks bad, but the docs are ok because of the eta expansion. - */ + */ __functor = self: x: self x; }; @@ -81,21 +86,23 @@ rec { /** Docs probably won't work in this case, because the "partial" application of self results in an infinite recursion. - */ + */ __functor = self: self.__functor self; }; - diverging = let - /** - Docs probably won't work in this case, because the "partial" application - of self results in an diverging computation that causes a stack overflow. - It's not an infinite recursion because each call is different. - This must be handled by the documentation retrieval logic, as it - reimplements the __functor invocation to be partial. - */ - f = x: { - __functor = self: (f (x + 1)); - }; - in f null; + diverging = + let + /** + Docs probably won't work in this case, because the "partial" application + of self results in an diverging computation that causes a stack overflow. + It's not an infinite recursion because each call is different. + This must be handled by the documentation retrieval logic, as it + reimplements the __functor invocation to be partial. + */ + f = x: { + __functor = self: (f (x + 1)); + }; + in + f null; } diff --git a/tests/functional/secure-drv-outputs.nix b/tests/functional/secure-drv-outputs.nix index b4ac8ff531f..169c3c5875b 100644 --- a/tests/functional/secure-drv-outputs.nix +++ b/tests/functional/secure-drv-outputs.nix @@ -4,20 +4,18 @@ with import ./config.nix; good = mkDerivation { name = "good"; - builder = builtins.toFile "builder" - '' - mkdir $out - echo > $out/good - ''; + builder = builtins.toFile "builder" '' + mkdir $out + echo > $out/good + ''; }; bad = mkDerivation { name = "good"; - builder = builtins.toFile "builder" - '' - mkdir $out - echo > $out/bad - ''; + builder = builtins.toFile "builder" '' + mkdir $out + echo > $out/bad + ''; }; } diff --git a/tests/functional/shell-hello.nix b/tests/functional/shell-hello.nix index c920d7cb459..470798dd9e1 100644 --- a/tests/functional/shell-hello.nix +++ b/tests/functional/shell-hello.nix @@ -3,57 +3,56 @@ with import ./config.nix; rec { hello = mkDerivation { name = "hello"; - outputs = [ "out" "dev" ]; + outputs = [ + "out" + "dev" + ]; meta.outputsToInstall = [ "out" ]; - buildCommand = - '' - mkdir -p $out/bin $dev/bin + buildCommand = '' + mkdir -p $out/bin $dev/bin - cat > $out/bin/hello < $out/bin/hello < $dev/bin/hello2 < $dev/bin/hello2 < $out/bin/hello < $out/bin/hello < $out/bin/env <&2 - exit 1 - fi - exec env - EOF - chmod +x $out/bin/env - ''; + cat > $out/bin/env <&2 + exit 1 + fi + exec env + EOF + chmod +x $out/bin/env + ''; }; } diff --git a/tests/functional/shell.nix b/tests/functional/shell.nix index 4b1a0623a81..5e9f4881819 100644 --- a/tests/functional/shell.nix +++ b/tests/functional/shell.nix @@ -1,102 +1,130 @@ -{ inNixShell ? false, contentAddressed ? false, fooContents ? "foo" }: +{ + inNixShell ? false, + contentAddressed ? false, + fooContents ? "foo", +}: -let cfg = import ./config.nix; in +let + cfg = import ./config.nix; +in with cfg; let mkDerivation = if contentAddressed then - args: cfg.mkDerivation ({ - __contentAddressed = true; - outputHashMode = "recursive"; - outputHashAlgo = "sha256"; - } // args) - else cfg.mkDerivation; + args: + cfg.mkDerivation ( + { + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + } + // args + ) + else + cfg.mkDerivation; in -let pkgs = rec { - setupSh = builtins.toFile "setup" '' - export VAR_FROM_STDENV_SETUP=foo - for pkg in $buildInputs; do - export PATH=$PATH:$pkg/bin - done - - declare -a arr1=(1 2 "3 4" 5) - declare -a arr2=(x $'\n' $'x\ny') - fun() { - echo blabla - } - runHook() { - eval "''${!1}" - } - ''; +let + pkgs = rec { + setupSh = builtins.toFile "setup" '' + export VAR_FROM_STDENV_SETUP=foo + for pkg in $buildInputs; do + export PATH=$PATH:$pkg/bin + done - stdenv = mkDerivation { - name = "stdenv"; - buildCommand = '' - mkdir -p $out - ln -s ${setupSh} $out/setup + declare -a arr1=(1 2 "3 4" 5) + declare -a arr2=(x $'\n' $'x\ny') + fun() { + echo blabla + } + runHook() { + eval "''${!1}" + } ''; - } // { inherit mkDerivation; }; - shellDrv = mkDerivation { - name = "shellDrv"; - builder = "/does/not/exist"; - VAR_FROM_NIX = "bar"; - ASCII_PERCENT = "%"; - ASCII_AT = "@"; - TEST_inNixShell = if inNixShell then "true" else "false"; - FOO = fooContents; - inherit stdenv; - outputs = ["dev" "out"]; - } // { - shellHook = abort "Ignore non-drv shellHook attr"; - }; + stdenv = + mkDerivation { + name = "stdenv"; + buildCommand = '' + mkdir -p $out + ln -s ${setupSh} $out/setup + ''; + } + // { + inherit mkDerivation; + }; - # https://github.com/NixOS/nix/issues/5431 - # See nix-shell.sh - polo = mkDerivation { - name = "polo"; - inherit stdenv; - shellHook = '' - echo Polo - ''; - }; + shellDrv = + mkDerivation { + name = "shellDrv"; + builder = "/does/not/exist"; + VAR_FROM_NIX = "bar"; + ASCII_PERCENT = "%"; + ASCII_AT = "@"; + TEST_inNixShell = if inNixShell then "true" else "false"; + FOO = fooContents; + inherit stdenv; + outputs = [ + "dev" + "out" + ]; + } + // { + shellHook = abort "Ignore non-drv shellHook attr"; + }; - # Used by nix-shell -p - runCommand = name: args: buildCommand: mkDerivation (args // { - inherit name buildCommand stdenv; - }); + # https://github.com/NixOS/nix/issues/5431 + # See nix-shell.sh + polo = mkDerivation { + name = "polo"; + inherit stdenv; + shellHook = '' + echo Polo + ''; + }; - foo = runCommand "foo" {} '' - mkdir -p $out/bin - echo 'echo ${fooContents}' > $out/bin/foo - chmod a+rx $out/bin/foo - ln -s ${shell} $out/bin/bash - ''; + # Used by nix-shell -p + runCommand = + name: args: buildCommand: + mkDerivation ( + args + // { + inherit name buildCommand stdenv; + } + ); - bar = runCommand "bar" {} '' - mkdir -p $out/bin - echo 'echo bar' > $out/bin/bar - chmod a+rx $out/bin/bar - ''; + foo = runCommand "foo" { } '' + mkdir -p $out/bin + echo 'echo ${fooContents}' > $out/bin/foo + chmod a+rx $out/bin/foo + ln -s ${shell} $out/bin/bash + ''; - bash = shell; - bashInteractive = runCommand "bash" {} '' - mkdir -p $out/bin - ln -s ${shell} $out/bin/bash - ''; + bar = runCommand "bar" { } '' + mkdir -p $out/bin + echo 'echo bar' > $out/bin/bar + chmod a+rx $out/bin/bar + ''; - # ruby "interpreter" that outputs "$@" - ruby = runCommand "ruby" {} '' - mkdir -p $out/bin - echo 'printf %s "$*"' > $out/bin/ruby - chmod a+rx $out/bin/ruby - ''; + bash = shell; + bashInteractive = runCommand "bash" { } '' + mkdir -p $out/bin + ln -s ${shell} $out/bin/bash + ''; - inherit (cfg) shell; + # ruby "interpreter" that outputs "$@" + ruby = runCommand "ruby" { } '' + mkdir -p $out/bin + echo 'printf %s "$*"' > $out/bin/ruby + chmod a+rx $out/bin/ruby + ''; - callPackage = f: args: f (pkgs // args); + inherit (cfg) shell; - inherit pkgs; -}; in pkgs + callPackage = f: args: f (pkgs // args); + + inherit pkgs; + }; +in +pkgs diff --git a/tests/functional/simple-failing.nix b/tests/functional/simple-failing.nix index d176c9c51e6..6cf29ae3842 100644 --- a/tests/functional/simple-failing.nix +++ b/tests/functional/simple-failing.nix @@ -2,11 +2,10 @@ with import ./config.nix; mkDerivation { name = "simple-failing"; - builder = builtins.toFile "builder.sh" - '' - echo "This should fail" - exit 1 - ''; + builder = builtins.toFile "builder.sh" '' + echo "This should fail" + exit 1 + ''; PATH = ""; goodPath = path; } diff --git a/tests/functional/structured-attrs-shell.nix b/tests/functional/structured-attrs-shell.nix index 57c1e6bd2da..a819e39cdae 100644 --- a/tests/functional/structured-attrs-shell.nix +++ b/tests/functional/structured-attrs-shell.nix @@ -12,8 +12,15 @@ mkDerivation { name = "structured2"; __structuredAttrs = true; inherit stdenv; - outputs = [ "out" "dev" ]; - my.list = [ "a" "b" "c" ]; + outputs = [ + "out" + "dev" + ]; + my.list = [ + "a" + "b" + "c" + ]; exportReferencesGraph.refs = [ dep ]; buildCommand = '' touch ''${outputs[out]}; touch ''${outputs[dev]} diff --git a/tests/functional/structured-attrs.nix b/tests/functional/structured-attrs.nix index e93139a4457..4e19845176e 100644 --- a/tests/functional/structured-attrs.nix +++ b/tests/functional/structured-attrs.nix @@ -16,7 +16,10 @@ mkDerivation { __structuredAttrs = true; - outputs = [ "out" "dev" ]; + outputs = [ + "out" + "dev" + ]; buildCommand = '' set -x @@ -43,12 +46,24 @@ mkDerivation { [[ $json =~ '"references":[]' ]] ''; - buildInputs = [ "a" "b" "c" 123 "'" "\"" null ]; + buildInputs = [ + "a" + "b" + "c" + 123 + "'" + "\"" + null + ]; hardening.format = true; hardening.fortify = false; - outer.inner = [ 1 2 3 ]; + outer.inner = [ + 1 + 2 + 3 + ]; int = 123456789; diff --git a/tests/functional/undefined-variable.nix b/tests/functional/undefined-variable.nix index 579985497e9..8e88dd8fe02 100644 --- a/tests/functional/undefined-variable.nix +++ b/tests/functional/undefined-variable.nix @@ -1 +1,4 @@ -let f = builtins.toFile "test-file.nix" "asd"; in import f +let + f = builtins.toFile "test-file.nix" "asd"; +in +import f diff --git a/tests/functional/user-envs.nix b/tests/functional/user-envs.nix index 46f8b51dda1..cc63812c4a7 100644 --- a/tests/functional/user-envs.nix +++ b/tests/functional/user-envs.nix @@ -1,5 +1,6 @@ # Some dummy arguments... -{ foo ? "foo" +{ + foo ? "foo", }: with import ./config.nix; @@ -8,27 +9,41 @@ assert foo == "foo"; let - platforms = let x = "foobar"; in [ x x ]; + platforms = + let + x = "foobar"; + in + [ + x + x + ]; - makeDrv = name: progName: (mkDerivation { - name = assert progName != "fail"; name; - inherit progName system; - builder = ./user-envs.builder.sh; - } // { - meta = { - description = "A silly test package with some \${escaped anti-quotation} in it"; - inherit platforms; - }; - }); + makeDrv = + name: progName: + ( + mkDerivation { + name = + assert progName != "fail"; + name; + inherit progName system; + builder = ./user-envs.builder.sh; + } + // { + meta = { + description = "A silly test package with some \${escaped anti-quotation} in it"; + inherit platforms; + }; + } + ); in - [ - (makeDrv "foo-1.0" "foo") - (makeDrv "foo-2.0pre1" "foo") - (makeDrv "bar-0.1" "bar") - (makeDrv "foo-2.0" "foo") - (makeDrv "bar-0.1.1" "bar") - (makeDrv "foo-0.1" "foo" // { meta.priority = 10; }) - (makeDrv "fail-0.1" "fail") - ] +[ + (makeDrv "foo-1.0" "foo") + (makeDrv "foo-2.0pre1" "foo") + (makeDrv "bar-0.1" "bar") + (makeDrv "foo-2.0" "foo") + (makeDrv "bar-0.1.1" "bar") + (makeDrv "foo-0.1" "foo" // { meta.priority = 10; }) + (makeDrv "fail-0.1" "fail") +] diff --git a/tests/installer/default.nix b/tests/installer/default.nix index 4aed6eae489..d48537dd0d0 100644 --- a/tests/installer/default.nix +++ b/tests/installer/default.nix @@ -1,5 +1,6 @@ -{ binaryTarballs -, nixpkgsFor +{ + binaryTarballs, + nixpkgsFor, }: let @@ -41,8 +42,9 @@ let }; }; - mockChannel = pkgs: - pkgs.runCommandNoCC "mock-channel" {} '' + mockChannel = + pkgs: + pkgs.runCommandNoCC "mock-channel" { } '' mkdir nixexprs mkdir -p $out/channel echo -n 'someContent' > nixexprs/someFile @@ -54,14 +56,14 @@ let images = { /* - "ubuntu-14-04" = { - image = import { - url = "https://app.vagrantup.com/ubuntu/boxes/trusty64/versions/20190514.0.0/providers/virtualbox.box"; - hash = "sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="; + "ubuntu-14-04" = { + image = import { + url = "https://app.vagrantup.com/ubuntu/boxes/trusty64/versions/20190514.0.0/providers/virtualbox.box"; + hash = "sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="; + }; + rootDisk = "box-disk1.vmdk"; + system = "x86_64-linux"; }; - rootDisk = "box-disk1.vmdk"; - system = "x86_64-linux"; - }; */ "ubuntu-16-04" = { @@ -95,14 +97,14 @@ let # Currently fails with 'error while loading shared libraries: # libsodium.so.23: cannot stat shared object: Invalid argument'. /* - "rhel-6" = { - image = import { - url = "https://app.vagrantup.com/generic/boxes/rhel6/versions/4.1.12/providers/libvirt.box"; - hash = "sha256-QwzbvRoRRGqUCQptM7X/InRWFSP2sqwRt2HaaO6zBGM="; + "rhel-6" = { + image = import { + url = "https://app.vagrantup.com/generic/boxes/rhel6/versions/4.1.12/providers/libvirt.box"; + hash = "sha256-QwzbvRoRRGqUCQptM7X/InRWFSP2sqwRt2HaaO6zBGM="; + }; + rootDisk = "box.img"; + system = "x86_64-linux"; }; - rootDisk = "box.img"; - system = "x86_64-linux"; - }; */ "rhel-7" = { @@ -137,12 +139,18 @@ let }; - makeTest = imageName: testName: - let image = images.${imageName}; in + makeTest = + imageName: testName: + let + image = images.${imageName}; + in with nixpkgsFor.${image.system}.native; - runCommand - "installer-test-${imageName}-${testName}" - { buildInputs = [ qemu_kvm openssh ]; + runCommand "installer-test-${imageName}-${testName}" + { + buildInputs = [ + qemu_kvm + openssh + ]; image = image.image; postBoot = image.postBoot or ""; installScript = installScripts.${testName}.script; @@ -247,9 +255,6 @@ let in -builtins.mapAttrs (imageName: image: - { ${image.system} = builtins.mapAttrs (testName: test: - makeTest imageName testName - ) installScripts; - } -) images +builtins.mapAttrs (imageName: image: { + ${image.system} = builtins.mapAttrs (testName: test: makeTest imageName testName) installScripts; +}) images diff --git a/tests/nixos/authorization.nix b/tests/nixos/authorization.nix index fdeae06ed34..6540e9fa337 100644 --- a/tests/nixos/authorization.nix +++ b/tests/nixos/authorization.nix @@ -4,8 +4,11 @@ nodes.machine = { virtualisation.writableStore = true; # TODO add a test without allowed-users setting. allowed-users is uncommon among NixOS users. - nix.settings.allowed-users = ["alice" "bob"]; - nix.settings.trusted-users = ["alice"]; + nix.settings.allowed-users = [ + "alice" + "bob" + ]; + nix.settings.trusted-users = [ "alice" ]; users.users.alice.isNormalUser = true; users.users.bob.isNormalUser = true; @@ -15,80 +18,80 @@ }; testScript = - let - pathFour = "/nix/store/20xfy868aiic0r0flgzq4n5dq1yvmxkn-four"; - in - '' - machine.wait_for_unit("multi-user.target") - machine.succeed(""" - exec 1>&2 - echo kSELDhobKaF8/VdxIxdP7EQe+Q > one - diff $(nix store add-file one) one - """) - machine.succeed(""" - su --login alice -c ' - set -x - cd ~ - echo ehHtmfuULXYyBV6NBk6QUi8iE0 > two - ls - diff $(echo $(nix store add-file two)) two' 1>&2 - """) - machine.succeed(""" - su --login bob -c ' - set -x - cd ~ - echo 0Jw8RNp7cK0W2AdNbcquofcOVk > three - diff $(nix store add-file three) three - ' 1>&2 - """) + let + pathFour = "/nix/store/20xfy868aiic0r0flgzq4n5dq1yvmxkn-four"; + in + '' + machine.wait_for_unit("multi-user.target") + machine.succeed(""" + exec 1>&2 + echo kSELDhobKaF8/VdxIxdP7EQe+Q > one + diff $(nix store add-file one) one + """) + machine.succeed(""" + su --login alice -c ' + set -x + cd ~ + echo ehHtmfuULXYyBV6NBk6QUi8iE0 > two + ls + diff $(echo $(nix store add-file two)) two' 1>&2 + """) + machine.succeed(""" + su --login bob -c ' + set -x + cd ~ + echo 0Jw8RNp7cK0W2AdNbcquofcOVk > three + diff $(nix store add-file three) three + ' 1>&2 + """) - # We're going to check that a path is not created - machine.succeed(""" - ! [[ -e ${pathFour} ]] - """) - machine.succeed(""" - su --login mallory -c ' - set -x - cd ~ - echo 5mgtDj0ohrWkT50TLR0f4tIIxY > four; - (! nix store add-file four 2>&1) | grep -F "cannot open connection to remote store" - (! nix store add-file four 2>&1) | grep -F "Connection reset by peer" + # We're going to check that a path is not created + machine.succeed(""" ! [[ -e ${pathFour} ]] - ' 1>&2 - """) - - # Check that the file _can_ be added, and matches the expected path we were checking - machine.succeed(""" - exec 1>&2 - echo 5mgtDj0ohrWkT50TLR0f4tIIxY > four - four="$(nix store add-file four)" - diff $four four - diff <(echo $four) <(echo ${pathFour}) - """) + """) + machine.succeed(""" + su --login mallory -c ' + set -x + cd ~ + echo 5mgtDj0ohrWkT50TLR0f4tIIxY > four; + (! nix store add-file four 2>&1) | grep -F "cannot open connection to remote store" + (! nix store add-file four 2>&1) | grep -F "Connection reset by peer" + ! [[ -e ${pathFour} ]] + ' 1>&2 + """) - machine.succeed(""" - su --login alice -c 'nix-store --verify --repair' - """) + # Check that the file _can_ be added, and matches the expected path we were checking + machine.succeed(""" + exec 1>&2 + echo 5mgtDj0ohrWkT50TLR0f4tIIxY > four + four="$(nix store add-file four)" + diff $four four + diff <(echo $four) <(echo ${pathFour}) + """) - machine.succeed(""" - set -x - su --login bob -c '(! nix-store --verify --repair 2>&1)' | tee diag 1>&2 - grep -F "you are not privileged to repair paths" diag - """) + machine.succeed(""" + su --login alice -c 'nix-store --verify --repair' + """) - machine.succeed(""" + machine.succeed(""" set -x - su --login mallory -c ' - nix-store --generate-binary-cache-key cache1.example.org sk1 pk1 - (! nix store sign --key-file sk1 ${pathFour} 2>&1)' | tee diag 1>&2 - grep -F "cannot open connection to remote store 'daemon'" diag - """) + su --login bob -c '(! nix-store --verify --repair 2>&1)' | tee diag 1>&2 + grep -F "you are not privileged to repair paths" diag + """) - machine.succeed(""" - su --login bob -c ' - nix-store --generate-binary-cache-key cache1.example.org sk1 pk1 - nix store sign --key-file sk1 ${pathFour} - ' - """) - ''; + machine.succeed(""" + set -x + su --login mallory -c ' + nix-store --generate-binary-cache-key cache1.example.org sk1 pk1 + (! nix store sign --key-file sk1 ${pathFour} 2>&1)' | tee diag 1>&2 + grep -F "cannot open connection to remote store 'daemon'" diag + """) + + machine.succeed(""" + su --login bob -c ' + nix-store --generate-binary-cache-key cache1.example.org sk1 pk1 + nix store sign --key-file sk1 ${pathFour} + ' + """) + ''; } diff --git a/tests/nixos/ca-fd-leak/default.nix b/tests/nixos/ca-fd-leak/default.nix index a6ae72adc93..902aacdc650 100644 --- a/tests/nixos/ca-fd-leak/default.nix +++ b/tests/nixos/ca-fd-leak/default.nix @@ -27,12 +27,15 @@ let # domain socket. # Compiled statically so that we can easily send it to the VM and use it # inside the build sandbox. - sender = pkgs.runCommandWith { - name = "sender"; - stdenv = pkgs.pkgsStatic.stdenv; - } '' - $CC -static -o $out ${./sender.c} - ''; + sender = + pkgs.runCommandWith + { + name = "sender"; + stdenv = pkgs.pkgsStatic.stdenv; + } + '' + $CC -static -o $out ${./sender.c} + ''; # Okay, so we have a file descriptor shipped out of the FOD now. But the # Nix store is read-only, right? .. Well, yeah. But this file descriptor @@ -47,44 +50,57 @@ in name = "ca-fd-leak"; nodes.machine = - { config, lib, pkgs, ... }: - { virtualisation.writableStore = true; + { + config, + lib, + pkgs, + ... + }: + { + virtualisation.writableStore = true; nix.settings.substituters = lib.mkForce [ ]; - virtualisation.additionalPaths = [ pkgs.busybox-sandbox-shell sender smuggler pkgs.socat ]; + virtualisation.additionalPaths = [ + pkgs.busybox-sandbox-shell + sender + smuggler + pkgs.socat + ]; }; - testScript = { nodes }: '' - start_all() + testScript = + { nodes }: + '' + start_all() - machine.succeed("echo hello") - # Start the smuggler server - machine.succeed("${smuggler}/bin/smuggler ${socketName} >&2 &") + machine.succeed("echo hello") + # Start the smuggler server + machine.succeed("${smuggler}/bin/smuggler ${socketName} >&2 &") - # Build the smuggled derivation. - # This will connect to the smuggler server and send it the file descriptor - machine.succeed(r""" - nix-build -E ' - builtins.derivation { - name = "smuggled"; - system = builtins.currentSystem; - # look ma, no tricks! - outputHashMode = "flat"; - outputHashAlgo = "sha256"; - outputHash = builtins.hashString "sha256" "hello, world\n"; - builder = "${pkgs.busybox-sandbox-shell}/bin/sh"; - args = [ "-c" "echo \"hello, world\" > $out; ''${${sender}} ${socketName}" ]; - }' - """.strip()) + # Build the smuggled derivation. + # This will connect to the smuggler server and send it the file descriptor + machine.succeed(r""" + nix-build -E ' + builtins.derivation { + name = "smuggled"; + system = builtins.currentSystem; + # look ma, no tricks! + outputHashMode = "flat"; + outputHashAlgo = "sha256"; + outputHash = builtins.hashString "sha256" "hello, world\n"; + builder = "${pkgs.busybox-sandbox-shell}/bin/sh"; + args = [ "-c" "echo \"hello, world\" > $out; ''${${sender}} ${socketName}" ]; + }' + """.strip()) - # Tell the smuggler server that we're done - machine.execute("echo done | ${pkgs.socat}/bin/socat - ABSTRACT-CONNECT:${socketName}") + # Tell the smuggler server that we're done + machine.execute("echo done | ${pkgs.socat}/bin/socat - ABSTRACT-CONNECT:${socketName}") - # Check that the file was not modified - machine.succeed(r""" - cat ./result - test "$(cat ./result)" = "hello, world" - """.strip()) - ''; + # Check that the file was not modified + machine.succeed(r""" + cat ./result + test "$(cat ./result)" = "hello, world" + """.strip()) + ''; } diff --git a/tests/nixos/cgroups/default.nix b/tests/nixos/cgroups/default.nix index b8febbf4bda..a6b4bca8c76 100644 --- a/tests/nixos/cgroups/default.nix +++ b/tests/nixos/cgroups/default.nix @@ -3,38 +3,39 @@ { name = "cgroups"; - nodes = - { - host = - { config, pkgs, ... }: - { virtualisation.additionalPaths = [ pkgs.stdenvNoCC ]; - nix.extraOptions = - '' - extra-experimental-features = nix-command auto-allocate-uids cgroups - extra-system-features = uid-range - ''; - nix.settings.use-cgroups = true; - nix.nixPath = [ "nixpkgs=${nixpkgs}" ]; - }; - }; - - testScript = { nodes }: '' - start_all() - - host.wait_for_unit("multi-user.target") - - # Start build in background - host.execute("NIX_REMOTE=daemon nix build --auto-allocate-uids --file ${./hang.nix} >&2 &") - service = "/sys/fs/cgroup/system.slice/nix-daemon.service" - - # Wait for cgroups to be created - host.succeed(f"until [ -e {service}/nix-daemon ]; do sleep 1; done", timeout=30) - host.succeed(f"until [ -e {service}/nix-build-uid-* ]; do sleep 1; done", timeout=30) - - # Check that there aren't processes where there shouldn't be, and that there are where there should be - host.succeed(f'[ -z "$(cat {service}/cgroup.procs)" ]') - host.succeed(f'[ -n "$(cat {service}/nix-daemon/cgroup.procs)" ]') - host.succeed(f'[ -n "$(cat {service}/nix-build-uid-*/cgroup.procs)" ]') - ''; + nodes = { + host = + { config, pkgs, ... }: + { + virtualisation.additionalPaths = [ pkgs.stdenvNoCC ]; + nix.extraOptions = '' + extra-experimental-features = nix-command auto-allocate-uids cgroups + extra-system-features = uid-range + ''; + nix.settings.use-cgroups = true; + nix.nixPath = [ "nixpkgs=${nixpkgs}" ]; + }; + }; + + testScript = + { nodes }: + '' + start_all() + + host.wait_for_unit("multi-user.target") + + # Start build in background + host.execute("NIX_REMOTE=daemon nix build --auto-allocate-uids --file ${./hang.nix} >&2 &") + service = "/sys/fs/cgroup/system.slice/nix-daemon.service" + + # Wait for cgroups to be created + host.succeed(f"until [ -e {service}/nix-daemon ]; do sleep 1; done", timeout=30) + host.succeed(f"until [ -e {service}/nix-build-uid-* ]; do sleep 1; done", timeout=30) + + # Check that there aren't processes where there shouldn't be, and that there are where there should be + host.succeed(f'[ -z "$(cat {service}/cgroup.procs)" ]') + host.succeed(f'[ -n "$(cat {service}/nix-daemon/cgroup.procs)" ]') + host.succeed(f'[ -n "$(cat {service}/nix-build-uid-*/cgroup.procs)" ]') + ''; } diff --git a/tests/nixos/cgroups/hang.nix b/tests/nixos/cgroups/hang.nix index cefe2d031c0..d7b337b0c05 100644 --- a/tests/nixos/cgroups/hang.nix +++ b/tests/nixos/cgroups/hang.nix @@ -1,9 +1,10 @@ { }: -with import {}; +with import { }; runCommand "hang" - { requiredSystemFeatures = "uid-range"; + { + requiredSystemFeatures = "uid-range"; } '' sleep infinity diff --git a/tests/nixos/chroot-store.nix b/tests/nixos/chroot-store.nix index 4b167fc3839..f89a20bc4d5 100644 --- a/tests/nixos/chroot-store.nix +++ b/tests/nixos/chroot-store.nix @@ -1,31 +1,45 @@ -{ lib, config, nixpkgs, ... }: +{ + lib, + config, + nixpkgs, + ... +}: let pkgs = config.nodes.machine.nixpkgs.pkgs; pkgA = pkgs.hello; pkgB = pkgs.cowsay; -in { +in +{ name = "chroot-store"; - nodes = - { machine = - { config, lib, pkgs, ... }: - { virtualisation.writableStore = true; - virtualisation.additionalPaths = [ pkgA ]; - environment.systemPackages = [ pkgB ]; - nix.extraOptions = "experimental-features = nix-command"; - }; - }; + nodes = { + machine = + { + config, + lib, + pkgs, + ... + }: + { + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ pkgA ]; + environment.systemPackages = [ pkgB ]; + nix.extraOptions = "experimental-features = nix-command"; + }; + }; - testScript = { nodes }: '' - # fmt: off - start_all() + testScript = + { nodes }: + '' + # fmt: off + start_all() - machine.succeed("nix copy --no-check-sigs --to /tmp/nix ${pkgA}") + machine.succeed("nix copy --no-check-sigs --to /tmp/nix ${pkgA}") - machine.succeed("nix shell --store /tmp/nix ${pkgA} --command hello >&2") + machine.succeed("nix shell --store /tmp/nix ${pkgA} --command hello >&2") - # Test that /nix/store is available via an overlayfs mount. - machine.succeed("nix shell --store /tmp/nix ${pkgA} --command cowsay foo >&2") - ''; + # Test that /nix/store is available via an overlayfs mount. + machine.succeed("nix shell --store /tmp/nix ${pkgA} --command cowsay foo >&2") + ''; } diff --git a/tests/nixos/containers/containers.nix b/tests/nixos/containers/containers.nix index 6773f5628a3..b590dc8498f 100644 --- a/tests/nixos/containers/containers.nix +++ b/tests/nixos/containers/containers.nix @@ -4,60 +4,67 @@ { name = "containers"; - nodes = - { - host = - { config, lib, pkgs, nodes, ... }: - { virtualisation.writableStore = true; - virtualisation.diskSize = 2048; - virtualisation.additionalPaths = - [ pkgs.stdenvNoCC - (import ./systemd-nspawn.nix { inherit nixpkgs; }).toplevel - ]; - virtualisation.memorySize = 4096; - nix.settings.substituters = lib.mkForce [ ]; - nix.extraOptions = - '' - extra-experimental-features = nix-command auto-allocate-uids cgroups - extra-system-features = uid-range - ''; - nix.nixPath = [ "nixpkgs=${nixpkgs}" ]; - }; - }; - - testScript = { nodes }: '' - start_all() - - host.succeed("nix --version >&2") - - # Test that 'id' gives the expected result in various configurations. - - # Existing UIDs, sandbox. - host.succeed("nix build --no-auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-1") - host.succeed("[[ $(cat ./result) = 'uid=1000(nixbld) gid=100(nixbld) groups=100(nixbld)' ]]") - - # Existing UIDs, no sandbox. - host.succeed("nix build --no-auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-2") - host.succeed("[[ $(cat ./result) = 'uid=30001(nixbld1) gid=30000(nixbld) groups=30000(nixbld)' ]]") - - # Auto-allocated UIDs, sandbox. - host.succeed("nix build --auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-3") - host.succeed("[[ $(cat ./result) = 'uid=1000(nixbld) gid=100(nixbld) groups=100(nixbld)' ]]") - - # Auto-allocated UIDs, no sandbox. - host.succeed("nix build --auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-4") - host.succeed("[[ $(cat ./result) = 'uid=872415232 gid=30000(nixbld) groups=30000(nixbld)' ]]") - - # Auto-allocated UIDs, UID range, sandbox. - host.succeed("nix build --auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-5 --arg uidRange true") - host.succeed("[[ $(cat ./result) = 'uid=0(root) gid=0(root) groups=0(root)' ]]") - - # Auto-allocated UIDs, UID range, no sandbox. - host.fail("nix build --auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-6 --arg uidRange true") - - # Run systemd-nspawn in a Nix build. - host.succeed("nix build --auto-allocate-uids --sandbox -L --offline --impure --file ${./systemd-nspawn.nix} --argstr nixpkgs ${nixpkgs}") - host.succeed("[[ $(cat ./result/msg) = 'Hello World' ]]") - ''; + nodes = { + host = + { + config, + lib, + pkgs, + nodes, + ... + }: + { + virtualisation.writableStore = true; + virtualisation.diskSize = 2048; + virtualisation.additionalPaths = [ + pkgs.stdenvNoCC + (import ./systemd-nspawn.nix { inherit nixpkgs; }).toplevel + ]; + virtualisation.memorySize = 4096; + nix.settings.substituters = lib.mkForce [ ]; + nix.extraOptions = '' + extra-experimental-features = nix-command auto-allocate-uids cgroups + extra-system-features = uid-range + ''; + nix.nixPath = [ "nixpkgs=${nixpkgs}" ]; + }; + }; + + testScript = + { nodes }: + '' + start_all() + + host.succeed("nix --version >&2") + + # Test that 'id' gives the expected result in various configurations. + + # Existing UIDs, sandbox. + host.succeed("nix build --no-auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-1") + host.succeed("[[ $(cat ./result) = 'uid=1000(nixbld) gid=100(nixbld) groups=100(nixbld)' ]]") + + # Existing UIDs, no sandbox. + host.succeed("nix build --no-auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-2") + host.succeed("[[ $(cat ./result) = 'uid=30001(nixbld1) gid=30000(nixbld) groups=30000(nixbld)' ]]") + + # Auto-allocated UIDs, sandbox. + host.succeed("nix build --auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-3") + host.succeed("[[ $(cat ./result) = 'uid=1000(nixbld) gid=100(nixbld) groups=100(nixbld)' ]]") + + # Auto-allocated UIDs, no sandbox. + host.succeed("nix build --auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-4") + host.succeed("[[ $(cat ./result) = 'uid=872415232 gid=30000(nixbld) groups=30000(nixbld)' ]]") + + # Auto-allocated UIDs, UID range, sandbox. + host.succeed("nix build --auto-allocate-uids --sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-5 --arg uidRange true") + host.succeed("[[ $(cat ./result) = 'uid=0(root) gid=0(root) groups=0(root)' ]]") + + # Auto-allocated UIDs, UID range, no sandbox. + host.fail("nix build --auto-allocate-uids --no-sandbox -L --offline --impure --file ${./id-test.nix} --argstr name id-test-6 --arg uidRange true") + + # Run systemd-nspawn in a Nix build. + host.succeed("nix build --auto-allocate-uids --sandbox -L --offline --impure --file ${./systemd-nspawn.nix} --argstr nixpkgs ${nixpkgs}") + host.succeed("[[ $(cat ./result/msg) = 'Hello World' ]]") + ''; } diff --git a/tests/nixos/containers/id-test.nix b/tests/nixos/containers/id-test.nix index 8eb9d38f9a2..2139327ad88 100644 --- a/tests/nixos/containers/id-test.nix +++ b/tests/nixos/containers/id-test.nix @@ -1,8 +1,10 @@ -{ name, uidRange ? false }: +{ + name, + uidRange ? false, +}: -with import {}; +with import { }; -runCommand name - { requiredSystemFeatures = if uidRange then ["uid-range"] else []; - } - "id; id > $out" +runCommand name { + requiredSystemFeatures = if uidRange then [ "uid-range" ] else [ ]; +} "id; id > $out" diff --git a/tests/nixos/containers/systemd-nspawn.nix b/tests/nixos/containers/systemd-nspawn.nix index 1dad4ebd754..4516f4e1394 100644 --- a/tests/nixos/containers/systemd-nspawn.nix +++ b/tests/nixos/containers/systemd-nspawn.nix @@ -2,7 +2,8 @@ let - machine = { config, pkgs, ... }: + machine = + { config, pkgs, ... }: { system.stateVersion = "22.05"; boot.isContainer = true; @@ -31,10 +32,12 @@ let }; }; - cfg = (import (nixpkgs + "/nixos/lib/eval-config.nix") { - modules = [ machine ]; - system = "x86_64-linux"; - }); + cfg = ( + import (nixpkgs + "/nixos/lib/eval-config.nix") { + modules = [ machine ]; + system = "x86_64-linux"; + } + ); config = cfg.config; @@ -43,7 +46,8 @@ in with cfg._module.args.pkgs; runCommand "test" - { buildInputs = [ config.system.path ]; + { + buildInputs = [ config.system.path ]; requiredSystemFeatures = [ "uid-range" ]; toplevel = config.system.build.toplevel; } diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 8e0cb1b225b..ca72034ec4f 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -1,17 +1,26 @@ -{ lib, nixpkgs, nixpkgsFor, self }: +{ + lib, + nixpkgs, + nixpkgsFor, + self, +}: let nixos-lib = import (nixpkgs + "/nixos/lib") { }; - noTests = pkg: pkg.overrideAttrs ( - finalAttrs: prevAttrs: { - doCheck = false; - doInstallCheck = false; - }); + noTests = + pkg: + pkg.overrideAttrs ( + finalAttrs: prevAttrs: { + doCheck = false; + doInstallCheck = false; + } + ); # https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests - runNixOSTestFor = system: test: + runNixOSTestFor = + system: test: (nixos-lib.runTest { imports = [ test @@ -36,44 +45,61 @@ let # allow running tests against older nix versions via `nix eval --apply` # Example: # nix build "$(nix eval --raw --impure .#hydraJobs.tests.fetch-git --apply 't: (t.forNix "2.19.2").drvPath')^*" - forNix = nixVersion: runNixOSTestFor system { - imports = [test]; - defaults.nixpkgs.overlays = [(curr: prev: { - nix = let - packages = (builtins.getFlake "nix/${nixVersion}").packages.${system}; - in packages.nix-cli or packages.nix; - })]; - }; + forNix = + nixVersion: + runNixOSTestFor system { + imports = [ test ]; + defaults.nixpkgs.overlays = [ + (curr: prev: { + nix = + let + packages = (builtins.getFlake "nix/${nixVersion}").packages.${system}; + in + packages.nix-cli or packages.nix; + }) + ]; + }; }; # Checks that a NixOS configuration does not contain any references to our # locally defined Nix version. - checkOverrideNixVersion = { pkgs, lib, ... }: { - # pkgs.nix: The new Nix in this repo - # We disallow it, to make sure we don't accidentally use it. - system.forbiddenDependenciesRegexes = [ - (lib.strings.escapeRegex "nix-${pkgs.nix.version}") - ]; - }; - - otherNixes.nix_2_3.setNixPackage = { lib, pkgs, ... }: { - imports = [ checkOverrideNixVersion ]; - nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; - }; - - otherNixes.nix_2_13.setNixPackage = { lib, pkgs, ... }: { - imports = [ checkOverrideNixVersion ]; - nix.package = lib.mkForce ( - self.inputs.nixpkgs-23-11.legacyPackages.${pkgs.stdenv.hostPlatform.system}.nixVersions.nix_2_13.overrideAttrs (o: { - meta = o.meta // { knownVulnerabilities = []; }; - }) - ); - }; + checkOverrideNixVersion = + { pkgs, lib, ... }: + { + # pkgs.nix: The new Nix in this repo + # We disallow it, to make sure we don't accidentally use it. + system.forbiddenDependenciesRegexes = [ + (lib.strings.escapeRegex "nix-${pkgs.nix.version}") + ]; + }; + + otherNixes.nix_2_3.setNixPackage = + { lib, pkgs, ... }: + { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + + otherNixes.nix_2_13.setNixPackage = + { lib, pkgs, ... }: + { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce ( + self.inputs.nixpkgs-23-11.legacyPackages.${pkgs.stdenv.hostPlatform.system}.nixVersions.nix_2_13.overrideAttrs + (o: { + meta = o.meta // { + knownVulnerabilities = [ ]; + }; + }) + ); + }; - otherNixes.nix_2_18.setNixPackage = { lib, pkgs, ... }: { - imports = [ checkOverrideNixVersion ]; - nix.package = lib.mkForce pkgs.nixVersions.nix_2_18; - }; + otherNixes.nix_2_18.setNixPackage = + { lib, pkgs, ... }: + { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_18; + }; in @@ -86,30 +112,37 @@ in } // lib.concatMapAttrs ( - nixVersion: { setNixPackage, ... }: + nixVersion: + { setNixPackage, ... }: { "remoteBuilds_remote_${nixVersion}" = runNixOSTestFor "x86_64-linux" { name = "remoteBuilds_remote_${nixVersion}"; imports = [ ./remote-builds.nix ]; - builders.config = { lib, pkgs, ... }: { - imports = [ setNixPackage ]; - }; + builders.config = + { lib, pkgs, ... }: + { + imports = [ setNixPackage ]; + }; }; "remoteBuilds_local_${nixVersion}" = runNixOSTestFor "x86_64-linux" { name = "remoteBuilds_local_${nixVersion}"; imports = [ ./remote-builds.nix ]; - nodes.client = { lib, pkgs, ... }: { - imports = [ setNixPackage ]; - }; + nodes.client = + { lib, pkgs, ... }: + { + imports = [ setNixPackage ]; + }; }; "remoteBuildsSshNg_remote_${nixVersion}" = runNixOSTestFor "x86_64-linux" { name = "remoteBuildsSshNg_remote_${nixVersion}"; imports = [ ./remote-builds-ssh-ng.nix ]; - builders.config = { lib, pkgs, ... }: { - imports = [ setNixPackage ]; - }; + builders.config = + { lib, pkgs, ... }: + { + imports = [ setNixPackage ]; + }; }; # FIXME: these tests don't work yet @@ -143,9 +176,7 @@ in containers = runNixOSTestFor "x86_64-linux" ./containers/containers.nix; - setuid = lib.genAttrs - ["x86_64-linux"] - (system: runNixOSTestFor system ./setuid.nix); + setuid = lib.genAttrs [ "x86_64-linux" ] (system: runNixOSTestFor system ./setuid.nix); fetch-git = runNixOSTestFor "x86_64-linux" ./fetch-git; diff --git a/tests/nixos/fetch-git/default.nix b/tests/nixos/fetch-git/default.nix index 1d6bcb63783..329fb463e8e 100644 --- a/tests/nixos/fetch-git/default.nix +++ b/tests/nixos/fetch-git/default.nix @@ -7,26 +7,27 @@ ]; /* - Test cases + Test cases - Test cases are automatically imported from ./test-cases/{name} + Test cases are automatically imported from ./test-cases/{name} - The following is set up automatically for each test case: - - a repo with the {name} is created on the gitea server - - a repo with the {name} is created on the client - - the client repo is configured to push to the server repo + The following is set up automatically for each test case: + - a repo with the {name} is created on the gitea server + - a repo with the {name} is created on the client + - the client repo is configured to push to the server repo - Python variables: - - repo.path: the path to the directory of the client repo - - repo.git: the git command with the client repo as the working directory - - repo.remote: the url to the server repo + Python variables: + - repo.path: the path to the directory of the client repo + - repo.git: the git command with the client repo as the working directory + - repo.remote: the url to the server repo */ - testCases = - map - (testCaseName: {...}: { + testCases = map ( + testCaseName: + { ... }: + { imports = [ (./test-cases + "/${testCaseName}") ]; # ensures tests are named like their directories they are defined in name = testCaseName; - }) - (lib.attrNames (builtins.readDir ./test-cases)); + } + ) (lib.attrNames (builtins.readDir ./test-cases)); } diff --git a/tests/nixos/fetch-git/test-cases/http-auth/default.nix b/tests/nixos/fetch-git/test-cases/http-auth/default.nix index d483d54fb24..7ad9a8914e2 100644 --- a/tests/nixos/fetch-git/test-cases/http-auth/default.nix +++ b/tests/nixos/fetch-git/test-cases/http-auth/default.nix @@ -5,7 +5,8 @@ script = '' # add a file to the repo client.succeed(f""" - echo ${config.name /* to make the git tree and store path unique */} > {repo.path}/test-case \ + echo ${config.name # to make the git tree and store path unique + } > {repo.path}/test-case \ && echo lutyabrook > {repo.path}/new-york-state \ && {repo.git} add test-case new-york-state \ && {repo.git} commit -m 'commit1' diff --git a/tests/nixos/fetch-git/test-cases/http-simple/default.nix b/tests/nixos/fetch-git/test-cases/http-simple/default.nix index dcab8067e59..51b3882b5a6 100644 --- a/tests/nixos/fetch-git/test-cases/http-simple/default.nix +++ b/tests/nixos/fetch-git/test-cases/http-simple/default.nix @@ -4,7 +4,8 @@ script = '' # add a file to the repo client.succeed(f""" - echo ${config.name /* to make the git tree and store path unique */} > {repo.path}/test-case \ + echo ${config.name # to make the git tree and store path unique + } > {repo.path}/test-case \ && echo chiang-mai > {repo.path}/thailand \ && {repo.git} add test-case thailand \ && {repo.git} commit -m 'commit1' diff --git a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix index f5fba169846..89285d00ed4 100644 --- a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix +++ b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix @@ -4,7 +4,8 @@ script = '' # add a file to the repo client.succeed(f""" - echo ${config.name /* to make the git tree and store path unique */} > {repo.path}/test-case \ + echo ${config.name # to make the git tree and store path unique + } > {repo.path}/test-case \ && echo chiang-mai > {repo.path}/thailand \ && {repo.git} add test-case thailand \ && {repo.git} commit -m 'commit1' diff --git a/tests/nixos/fetch-git/testsupport/gitea-repo.nix b/tests/nixos/fetch-git/testsupport/gitea-repo.nix index e9f4adcc1d3..c8244207fbb 100644 --- a/tests/nixos/fetch-git/testsupport/gitea-repo.nix +++ b/tests/nixos/fetch-git/testsupport/gitea-repo.nix @@ -8,25 +8,27 @@ let boolPyLiteral = b: if b then "True" else "False"; - testCaseExtension = { config, ... }: { - options = { - repo.enable = mkOption { - type = types.bool; - default = true; - description = "Whether to provide a repo variable - automatic repo creation."; + testCaseExtension = + { config, ... }: + { + options = { + repo.enable = mkOption { + type = types.bool; + default = true; + description = "Whether to provide a repo variable - automatic repo creation."; + }; + repo.private = mkOption { + type = types.bool; + default = false; + description = "Whether the repo should be private."; + }; }; - repo.private = mkOption { - type = types.bool; - default = false; - description = "Whether the repo should be private."; + config = mkIf config.repo.enable { + setupScript = '' + repo = Repo("${config.name}", private=${boolPyLiteral config.repo.private}) + ''; }; }; - config = mkIf config.repo.enable { - setupScript = '' - repo = Repo("${config.name}", private=${boolPyLiteral config.repo.private}) - ''; - }; - }; in { options = { diff --git a/tests/nixos/fetch-git/testsupport/gitea.nix b/tests/nixos/fetch-git/testsupport/gitea.nix index cf87bb4662d..9409acff7cb 100644 --- a/tests/nixos/fetch-git/testsupport/gitea.nix +++ b/tests/nixos/fetch-git/testsupport/gitea.nix @@ -1,4 +1,11 @@ -{ lib, nixpkgs, system, pkgs, ... }: let +{ + lib, + nixpkgs, + system, + pkgs, + ... +}: +let clientPrivateKey = pkgs.writeText "id_ed25519" '' -----BEGIN OPENSSH PRIVATE KEY----- b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW @@ -9,41 +16,52 @@ -----END OPENSSH PRIVATE KEY----- ''; - clientPublicKey = - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFt5a8eH8BYZYjoQhzXGVKKHJe1pw1D0p7O2Vb9VTLzB"; + clientPublicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFt5a8eH8BYZYjoQhzXGVKKHJe1pw1D0p7O2Vb9VTLzB"; -in { +in +{ imports = [ ../testsupport/setup.nix ../testsupport/gitea-repo.nix ]; nodes = { - gitea = { pkgs, ... }: { - services.gitea.enable = true; - services.gitea.settings.service.DISABLE_REGISTRATION = true; - services.gitea.settings.log.LEVEL = "Info"; - services.gitea.settings.database.LOG_SQL = false; - services.openssh.enable = true; - networking.firewall.allowedTCPPorts = [ 3000 ]; - environment.systemPackages = [ pkgs.git pkgs.gitea ]; + gitea = + { pkgs, ... }: + { + services.gitea.enable = true; + services.gitea.settings.service.DISABLE_REGISTRATION = true; + services.gitea.settings.log.LEVEL = "Info"; + services.gitea.settings.database.LOG_SQL = false; + services.openssh.enable = true; + networking.firewall.allowedTCPPorts = [ 3000 ]; + environment.systemPackages = [ + pkgs.git + pkgs.gitea + ]; - users.users.root.openssh.authorizedKeys.keys = [clientPublicKey]; + users.users.root.openssh.authorizedKeys.keys = [ clientPublicKey ]; - # TODO: remove this after updating to nixos-23.11 - nixpkgs.pkgs = lib.mkForce (import nixpkgs { - inherit system; - config.permittedInsecurePackages = [ - "gitea-1.19.4" - ]; - }); - }; - client = { pkgs, ... }: { - environment.systemPackages = [ pkgs.git ]; - }; - }; - defaults = { pkgs, ... }: { - environment.systemPackages = [ pkgs.jq ]; + # TODO: remove this after updating to nixos-23.11 + nixpkgs.pkgs = lib.mkForce ( + import nixpkgs { + inherit system; + config.permittedInsecurePackages = [ + "gitea-1.19.4" + ]; + } + ); + }; + client = + { pkgs, ... }: + { + environment.systemPackages = [ pkgs.git ]; + }; }; + defaults = + { pkgs, ... }: + { + environment.systemPackages = [ pkgs.jq ]; + }; setupScript = '' import shlex diff --git a/tests/nixos/fetch-git/testsupport/setup.nix b/tests/nixos/fetch-git/testsupport/setup.nix index a81d5614b44..c13386c7223 100644 --- a/tests/nixos/fetch-git/testsupport/setup.nix +++ b/tests/nixos/fetch-git/testsupport/setup.nix @@ -1,11 +1,16 @@ -{ lib, config, extendModules, ... }: +{ + lib, + config, + extendModules, + ... +}: let inherit (lib) mkOption types ; - indent = lib.replaceStrings ["\n"] ["\n "]; + indent = lib.replaceStrings [ "\n" ] [ "\n " ]; execTestCase = testCase: '' @@ -35,37 +40,39 @@ in description = '' The test cases. See `testScript`. ''; - type = types.listOf (types.submodule { - options.name = mkOption { - type = types.str; - description = '' - The name of the test case. + type = types.listOf ( + types.submodule { + options.name = mkOption { + type = types.str; + description = '' + The name of the test case. - A repository with that name will be set up on the gitea server and locally. - ''; - }; - options.description = mkOption { - type = types.str; - description = '' - A description of the test case. - ''; - }; - options.setupScript = mkOption { - type = types.lines; - description = '' - Python code that runs before the test case. - ''; - default = ""; - }; - options.script = mkOption { - type = types.lines; - description = '' - Python code that runs the test. + A repository with that name will be set up on the gitea server and locally. + ''; + }; + options.description = mkOption { + type = types.str; + description = '' + A description of the test case. + ''; + }; + options.setupScript = mkOption { + type = types.lines; + description = '' + Python code that runs before the test case. + ''; + default = ""; + }; + options.script = mkOption { + type = types.lines; + description = '' + Python code that runs the test. - Variables defined by the global `setupScript`, as well as `testCases.*.setupScript` will be available here. - ''; - }; - }); + Variables defined by the global `setupScript`, as well as `testCases.*.setupScript` will be available here. + ''; + }; + } + ); }; }; @@ -74,10 +81,12 @@ in environment.variables = { _NIX_FORCE_HTTP = "1"; }; - nix.settings.experimental-features = ["nix-command" "flakes"]; + nix.settings.experimental-features = [ + "nix-command" + "flakes" + ]; }; - setupScript = '' - ''; + setupScript = ''''; testScript = '' start_all(); diff --git a/tests/nixos/fetchurl.nix b/tests/nixos/fetchurl.nix index bfae8deecac..e8663debbcd 100644 --- a/tests/nixos/fetchurl.nix +++ b/tests/nixos/fetchurl.nix @@ -5,16 +5,20 @@ let - makeTlsCert = name: pkgs.runCommand name { - nativeBuildInputs = with pkgs; [ openssl ]; - } '' - mkdir -p $out - openssl req -x509 \ - -subj '/CN=${name}/' -days 49710 \ - -addext 'subjectAltName = DNS:${name}' \ - -keyout "$out/key.pem" -newkey ed25519 \ - -out "$out/cert.pem" -noenc - ''; + makeTlsCert = + name: + pkgs.runCommand name + { + nativeBuildInputs = with pkgs; [ openssl ]; + } + '' + mkdir -p $out + openssl req -x509 \ + -subj '/CN=${name}/' -days 49710 \ + -addext 'subjectAltName = DNS:${name}' \ + -keyout "$out/key.pem" -newkey ed25519 \ + -out "$out/cert.pem" -noenc + ''; goodCert = makeTlsCert "good"; badCert = makeTlsCert "bad"; @@ -25,39 +29,44 @@ in name = "fetchurl"; nodes = { - machine = { pkgs, ... }: { - services.nginx = { - enable = true; - - virtualHosts."good" = { - addSSL = true; - sslCertificate = "${goodCert}/cert.pem"; - sslCertificateKey = "${goodCert}/key.pem"; - root = pkgs.runCommand "nginx-root" {} '' - mkdir "$out" - echo 'hello world' > "$out/index.html" - ''; + machine = + { pkgs, ... }: + { + services.nginx = { + enable = true; + + virtualHosts."good" = { + addSSL = true; + sslCertificate = "${goodCert}/cert.pem"; + sslCertificateKey = "${goodCert}/key.pem"; + root = pkgs.runCommand "nginx-root" { } '' + mkdir "$out" + echo 'hello world' > "$out/index.html" + ''; + }; + + virtualHosts."bad" = { + addSSL = true; + sslCertificate = "${badCert}/cert.pem"; + sslCertificateKey = "${badCert}/key.pem"; + root = pkgs.runCommand "nginx-root" { } '' + mkdir "$out" + echo 'foobar' > "$out/index.html" + ''; + }; }; - virtualHosts."bad" = { - addSSL = true; - sslCertificate = "${badCert}/cert.pem"; - sslCertificateKey = "${badCert}/key.pem"; - root = pkgs.runCommand "nginx-root" {} '' - mkdir "$out" - echo 'foobar' > "$out/index.html" - ''; - }; - }; + security.pki.certificateFiles = [ "${goodCert}/cert.pem" ]; - security.pki.certificateFiles = [ "${goodCert}/cert.pem" ]; + networking.hosts."127.0.0.1" = [ + "good" + "bad" + ]; - networking.hosts."127.0.0.1" = [ "good" "bad" ]; + virtualisation.writableStore = true; - virtualisation.writableStore = true; - - nix.settings.experimental-features = "nix-command"; - }; + nix.settings.experimental-features = "nix-command"; + }; }; testScript = '' diff --git a/tests/nixos/fsync.nix b/tests/nixos/fsync.nix index 99ac2b25d50..e215e5b3c25 100644 --- a/tests/nixos/fsync.nix +++ b/tests/nixos/fsync.nix @@ -1,4 +1,10 @@ -{ lib, config, nixpkgs, pkgs, ... }: +{ + lib, + config, + nixpkgs, + pkgs, + ... +}: let pkg1 = pkgs.go; @@ -8,32 +14,44 @@ in name = "fsync"; nodes.machine = - { config, lib, pkgs, ... }: - { virtualisation.emptyDiskImages = [ 1024 ]; + { + config, + lib, + pkgs, + ... + }: + { + virtualisation.emptyDiskImages = [ 1024 ]; environment.systemPackages = [ pkg1 ]; nix.settings.experimental-features = [ "nix-command" ]; nix.settings.fsync-store-paths = true; nix.settings.require-sigs = false; - boot.supportedFilesystems = [ "ext4" "btrfs" "xfs" ]; + boot.supportedFilesystems = [ + "ext4" + "btrfs" + "xfs" + ]; }; - testScript = { nodes }: '' - # fmt: off - for fs in ("ext4", "btrfs", "xfs"): - machine.succeed("mkfs.{} {} /dev/vdb".format(fs, "-F" if fs == "ext4" else "-f")) - machine.succeed("mkdir -p /mnt") - machine.succeed("mount /dev/vdb /mnt") - machine.succeed("sync") - machine.succeed("nix copy --offline ${pkg1} --to /mnt") - machine.crash() + testScript = + { nodes }: + '' + # fmt: off + for fs in ("ext4", "btrfs", "xfs"): + machine.succeed("mkfs.{} {} /dev/vdb".format(fs, "-F" if fs == "ext4" else "-f")) + machine.succeed("mkdir -p /mnt") + machine.succeed("mount /dev/vdb /mnt") + machine.succeed("sync") + machine.succeed("nix copy --offline ${pkg1} --to /mnt") + machine.crash() - machine.start() - machine.wait_for_unit("multi-user.target") - machine.succeed("mkdir -p /mnt") - machine.succeed("mount /dev/vdb /mnt") - machine.succeed("nix path-info --offline --store /mnt ${pkg1}") - machine.succeed("nix store verify --all --store /mnt --no-trust") + machine.start() + machine.wait_for_unit("multi-user.target") + machine.succeed("mkdir -p /mnt") + machine.succeed("mount /dev/vdb /mnt") + machine.succeed("nix path-info --offline --store /mnt ${pkg1}") + machine.succeed("nix store verify --all --store /mnt --no-trust") - machine.succeed("umount /dev/vdb") - ''; + machine.succeed("umount /dev/vdb") + ''; } diff --git a/tests/nixos/functional/as-trusted-user.nix b/tests/nixos/functional/as-trusted-user.nix index d6f825697e9..25c1b399c1c 100644 --- a/tests/nixos/functional/as-trusted-user.nix +++ b/tests/nixos/functional/as-trusted-user.nix @@ -4,7 +4,9 @@ imports = [ ./common.nix ]; nodes.machine = { - users.users.alice = { isNormalUser = true; }; + users.users.alice = { + isNormalUser = true; + }; nix.settings.trusted-users = [ "alice" ]; }; @@ -15,4 +17,4 @@ su --login --command "run-test-suite" alice >&2 """) ''; -} \ No newline at end of file +} diff --git a/tests/nixos/functional/as-user.nix b/tests/nixos/functional/as-user.nix index 1443f6e6ccd..b93c8d798a3 100644 --- a/tests/nixos/functional/as-user.nix +++ b/tests/nixos/functional/as-user.nix @@ -4,7 +4,9 @@ imports = [ ./common.nix ]; nodes.machine = { - users.users.alice = { isNormalUser = true; }; + users.users.alice = { + isNormalUser = true; + }; }; testScript = '' diff --git a/tests/nixos/functional/common.nix b/tests/nixos/functional/common.nix index 561271ba0ec..f3cab47259b 100644 --- a/tests/nixos/functional/common.nix +++ b/tests/nixos/functional/common.nix @@ -2,9 +2,11 @@ let # FIXME (roberth) reference issue - inputDerivation = pkg: (pkg.overrideAttrs (o: { - disallowedReferences = [ ]; - })).inputDerivation; + inputDerivation = + pkg: + (pkg.overrideAttrs (o: { + disallowedReferences = [ ]; + })).inputDerivation; in { @@ -12,59 +14,63 @@ in # we skip it to save time. skipTypeCheck = true; - nodes.machine = { config, pkgs, ... }: { + nodes.machine = + { config, pkgs, ... }: + { - virtualisation.writableStore = true; - system.extraDependencies = [ - (inputDerivation config.nix.package) - ]; + virtualisation.writableStore = true; + system.extraDependencies = [ + (inputDerivation config.nix.package) + ]; - nix.settings.substituters = lib.mkForce []; + nix.settings.substituters = lib.mkForce [ ]; - environment.systemPackages = let - run-test-suite = pkgs.writeShellApplication { - name = "run-test-suite"; - runtimeInputs = [ - pkgs.meson - pkgs.ninja - pkgs.jq - pkgs.git + environment.systemPackages = + let + run-test-suite = pkgs.writeShellApplication { + name = "run-test-suite"; + runtimeInputs = [ + pkgs.meson + pkgs.ninja + pkgs.jq + pkgs.git - # Want to avoid `/run/current-system/sw/bin/bash` because we - # want a store path. Likewise for coreutils. - pkgs.bash - pkgs.coreutils - ]; - text = '' - set -x + # Want to avoid `/run/current-system/sw/bin/bash` because we + # want a store path. Likewise for coreutils. + pkgs.bash + pkgs.coreutils + ]; + text = '' + set -x - cat /proc/sys/fs/file-max - ulimit -Hn - ulimit -Sn + cat /proc/sys/fs/file-max + ulimit -Hn + ulimit -Sn - cd ~ + cd ~ - cp -r ${pkgs.nixComponents.nix-functional-tests.src} nix - chmod -R +w nix + cp -r ${pkgs.nixComponents.nix-functional-tests.src} nix + chmod -R +w nix - chmod u+w nix/.version - echo ${pkgs.nixComponents.version} > nix/.version + chmod u+w nix/.version + echo ${pkgs.nixComponents.version} > nix/.version - export isTestOnNixOS=1 + export isTestOnNixOS=1 - export NIX_REMOTE_=daemon - export NIX_REMOTE=daemon + export NIX_REMOTE_=daemon + export NIX_REMOTE=daemon - export NIX_STORE=${builtins.storeDir} + export NIX_STORE=${builtins.storeDir} - meson setup nix/tests/functional build - cd build - meson test -j1 --print-errorlogs - ''; - }; - in [ - run-test-suite - pkgs.git - ]; - }; + meson setup nix/tests/functional build + cd build + meson test -j1 --print-errorlogs + ''; + }; + in + [ + run-test-suite + pkgs.git + ]; + }; } diff --git a/tests/nixos/functional/symlinked-home.nix b/tests/nixos/functional/symlinked-home.nix index 57c45d5d592..900543d0cfe 100644 --- a/tests/nixos/functional/symlinked-home.nix +++ b/tests/nixos/functional/symlinked-home.nix @@ -16,7 +16,9 @@ imports = [ ./common.nix ]; nodes.machine = { - users.users.alice = { isNormalUser = true; }; + users.users.alice = { + isNormalUser = true; + }; }; testScript = '' diff --git a/tests/nixos/git-submodules.nix b/tests/nixos/git-submodules.nix index a82ddf418eb..5b1d9ed5f5f 100644 --- a/tests/nixos/git-submodules.nix +++ b/tests/nixos/git-submodules.nix @@ -6,68 +6,74 @@ config = { name = lib.mkDefault "git-submodules"; - nodes = - { - remote = - { config, pkgs, ... }: - { - services.openssh.enable = true; - environment.systemPackages = [ pkgs.git ]; - }; + nodes = { + remote = + { config, pkgs, ... }: + { + services.openssh.enable = true; + environment.systemPackages = [ pkgs.git ]; + }; - client = - { config, lib, pkgs, ... }: - { - programs.ssh.extraConfig = "ConnectTimeout 30"; - environment.systemPackages = [ pkgs.git ]; - nix.extraOptions = "experimental-features = nix-command flakes"; - }; - }; + client = + { + config, + lib, + pkgs, + ... + }: + { + programs.ssh.extraConfig = "ConnectTimeout 30"; + environment.systemPackages = [ pkgs.git ]; + nix.extraOptions = "experimental-features = nix-command flakes"; + }; + }; - testScript = { nodes }: '' - # fmt: off - import subprocess + testScript = + { nodes }: + '' + # fmt: off + import subprocess - start_all() + start_all() - # Create an SSH key on the client. - subprocess.run([ - "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" - ], capture_output=True, check=True) - client.succeed("mkdir -p -m 700 /root/.ssh") - client.copy_from_host("key", "/root/.ssh/id_ed25519") - client.succeed("chmod 600 /root/.ssh/id_ed25519") + # Create an SSH key on the client. + subprocess.run([ + "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" + ], capture_output=True, check=True) + client.succeed("mkdir -p -m 700 /root/.ssh") + client.copy_from_host("key", "/root/.ssh/id_ed25519") + client.succeed("chmod 600 /root/.ssh/id_ed25519") - # Install the SSH key on the builders. - client.wait_for_unit("network-online.target") + # Install the SSH key on the builders. + client.wait_for_unit("network-online.target") - remote.succeed("mkdir -p -m 700 /root/.ssh") - remote.copy_from_host("key.pub", "/root/.ssh/authorized_keys") - remote.wait_for_unit("sshd") - remote.wait_for_unit("multi-user.target") - remote.wait_for_unit("network-online.target") - client.wait_for_unit("network-online.target") - client.succeed(f"ssh -o StrictHostKeyChecking=no {remote.name} 'echo hello world'") + remote.succeed("mkdir -p -m 700 /root/.ssh") + remote.copy_from_host("key.pub", "/root/.ssh/authorized_keys") + remote.wait_for_unit("sshd") + remote.wait_for_unit("multi-user.target") + remote.wait_for_unit("network-online.target") + client.wait_for_unit("network-online.target") + client.succeed(f"ssh -o StrictHostKeyChecking=no {remote.name} 'echo hello world'") - remote.succeed(""" - git init bar - git -C bar config user.email foobar@example.com - git -C bar config user.name Foobar - echo test >> bar/content - git -C bar add content - git -C bar commit -m 'Initial commit' - """) + remote.succeed(""" + git init bar + git -C bar config user.email foobar@example.com + git -C bar config user.name Foobar + echo test >> bar/content + git -C bar add content + git -C bar commit -m 'Initial commit' + """) - client.succeed(f""" - git init foo - git -C foo config user.email foobar@example.com - git -C foo config user.name Foobar - git -C foo submodule add root@{remote.name}:/tmp/bar sub - git -C foo add sub - git -C foo commit -m 'Add submodule' - """) + client.succeed(f""" + git init foo + git -C foo config user.email foobar@example.com + git -C foo config user.name Foobar + git -C foo submodule add root@{remote.name}:/tmp/bar sub + git -C foo add sub + git -C foo commit -m 'Add submodule' + """) - client.succeed("nix --flake-registry \"\" flake prefetch 'git+file:///tmp/foo?submodules=1&ref=master'") - ''; + client.succeed("nix --flake-registry \"\" flake prefetch 'git+file:///tmp/foo?submodules=1&ref=master'") + ''; }; } diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix index 69d1df410d3..dcba464a34d 100644 --- a/tests/nixos/github-flakes.nix +++ b/tests/nixos/github-flakes.nix @@ -1,21 +1,25 @@ -{ lib, config, nixpkgs, ... }: +{ + lib, + config, + nixpkgs, + ... +}: let pkgs = config.nodes.client.nixpkgs.pkgs; # Generate a fake root CA and a fake api.github.com / github.com / channels.nixos.org certificate. - cert = pkgs.runCommand "cert" { nativeBuildInputs = [ pkgs.openssl ]; } - '' - mkdir -p $out + cert = pkgs.runCommand "cert" { nativeBuildInputs = [ pkgs.openssl ]; } '' + mkdir -p $out - openssl genrsa -out ca.key 2048 - openssl req -new -x509 -days 36500 -key ca.key \ - -subj "/C=NL/ST=Denial/L=Springfield/O=Dis/CN=Root CA" -out $out/ca.crt + openssl genrsa -out ca.key 2048 + openssl req -new -x509 -days 36500 -key ca.key \ + -subj "/C=NL/ST=Denial/L=Springfield/O=Dis/CN=Root CA" -out $out/ca.crt - openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \ - -subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr - openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:github.com,DNS:channels.nixos.org") \ - -days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt - ''; + openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \ + -subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr + openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:github.com,DNS:channels.nixos.org") \ + -days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt + ''; registry = pkgs.writeTextFile { name = "registry"; @@ -53,168 +57,190 @@ let private-flake-rev = "9f1dd0df5b54a7dc75b618034482ed42ce34383d"; - private-flake-api = pkgs.runCommand "private-flake" {} - '' - mkdir -p $out/{commits,tarball} + private-flake-api = pkgs.runCommand "private-flake" { } '' + mkdir -p $out/{commits,tarball} - # Setup https://docs.github.com/en/rest/commits/commits#get-a-commit - echo '{"sha": "${private-flake-rev}", "commit": {"tree": {"sha": "ffffffffffffffffffffffffffffffffffffffff"}}}' > $out/commits/HEAD + # Setup https://docs.github.com/en/rest/commits/commits#get-a-commit + echo '{"sha": "${private-flake-rev}", "commit": {"tree": {"sha": "ffffffffffffffffffffffffffffffffffffffff"}}}' > $out/commits/HEAD - # Setup tarball download via API - dir=private-flake - mkdir $dir - echo '{ outputs = {...}: {}; }' > $dir/flake.nix - tar cfz $out/tarball/${private-flake-rev} $dir --hard-dereference - ''; + # Setup tarball download via API + dir=private-flake + mkdir $dir + echo '{ outputs = {...}: {}; }' > $dir/flake.nix + tar cfz $out/tarball/${private-flake-rev} $dir --hard-dereference + ''; - nixpkgs-api = pkgs.runCommand "nixpkgs-flake" {} - '' - mkdir -p $out/commits + nixpkgs-api = pkgs.runCommand "nixpkgs-flake" { } '' + mkdir -p $out/commits - # Setup https://docs.github.com/en/rest/commits/commits#get-a-commit - echo '{"sha": "${nixpkgs.rev}", "commit": {"tree": {"sha": "ffffffffffffffffffffffffffffffffffffffff"}}}' > $out/commits/HEAD - ''; + # Setup https://docs.github.com/en/rest/commits/commits#get-a-commit + echo '{"sha": "${nixpkgs.rev}", "commit": {"tree": {"sha": "ffffffffffffffffffffffffffffffffffffffff"}}}' > $out/commits/HEAD + ''; - archive = pkgs.runCommand "nixpkgs-flake" {} - '' - mkdir -p $out/archive + archive = pkgs.runCommand "nixpkgs-flake" { } '' + mkdir -p $out/archive - dir=NixOS-nixpkgs-${nixpkgs.shortRev} - cp -prd ${nixpkgs} $dir - # Set the correct timestamp in the tarball. - find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- - tar cfz $out/archive/${nixpkgs.rev}.tar.gz $dir --hard-dereference - ''; + dir=NixOS-nixpkgs-${nixpkgs.shortRev} + cp -prd ${nixpkgs} $dir + # Set the correct timestamp in the tarball. + find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${ + builtins.substring 12 2 nixpkgs.lastModifiedDate + } -- + tar cfz $out/archive/${nixpkgs.rev}.tar.gz $dir --hard-dereference + ''; in { name = "github-flakes"; - nodes = - { - github = - { config, pkgs, ... }: - { networking.firewall.allowedTCPPorts = [ 80 443 ]; - - services.httpd.enable = true; - services.httpd.adminAddr = "foo@example.org"; - services.httpd.extraConfig = '' - ErrorLog syslog:local6 - ''; - services.httpd.virtualHosts."channels.nixos.org" = - { forceSSL = true; - sslServerKey = "${cert}/server.key"; - sslServerCert = "${cert}/server.crt"; - servedDirs = - [ { urlPath = "/"; - dir = registry; - } - ]; - }; - services.httpd.virtualHosts."api.github.com" = - { forceSSL = true; - sslServerKey = "${cert}/server.key"; - sslServerCert = "${cert}/server.crt"; - servedDirs = - [ { urlPath = "/repos/NixOS/nixpkgs"; - dir = nixpkgs-api; - } - { urlPath = "/repos/fancy-enterprise/private-flake"; - dir = private-flake-api; - } - ]; - }; - services.httpd.virtualHosts."github.com" = - { forceSSL = true; - sslServerKey = "${cert}/server.key"; - sslServerCert = "${cert}/server.crt"; - servedDirs = - [ { urlPath = "/NixOS/nixpkgs"; - dir = archive; - } - ]; - }; + nodes = { + github = + { config, pkgs, ... }: + { + networking.firewall.allowedTCPPorts = [ + 80 + 443 + ]; + + services.httpd.enable = true; + services.httpd.adminAddr = "foo@example.org"; + services.httpd.extraConfig = '' + ErrorLog syslog:local6 + ''; + services.httpd.virtualHosts."channels.nixos.org" = { + forceSSL = true; + sslServerKey = "${cert}/server.key"; + sslServerCert = "${cert}/server.crt"; + servedDirs = [ + { + urlPath = "/"; + dir = registry; + } + ]; }; - - client = - { config, lib, pkgs, nodes, ... }: - { virtualisation.writableStore = true; - virtualisation.diskSize = 2048; - virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ]; - virtualisation.memorySize = 4096; - nix.settings.substituters = lib.mkForce [ ]; - nix.extraOptions = "experimental-features = nix-command flakes"; - networking.hosts.${(builtins.head nodes.github.networking.interfaces.eth1.ipv4.addresses).address} = - [ "channels.nixos.org" "api.github.com" "github.com" ]; - security.pki.certificateFiles = [ "${cert}/ca.crt" ]; + services.httpd.virtualHosts."api.github.com" = { + forceSSL = true; + sslServerKey = "${cert}/server.key"; + sslServerCert = "${cert}/server.crt"; + servedDirs = [ + { + urlPath = "/repos/NixOS/nixpkgs"; + dir = nixpkgs-api; + } + { + urlPath = "/repos/fancy-enterprise/private-flake"; + dir = private-flake-api; + } + ]; }; - }; - - testScript = { nodes }: '' - # fmt: off - import json - import time - - start_all() - - def cat_log(): - github.succeed("cat /var/log/httpd/*.log >&2") - - github.wait_for_unit("httpd.service") - github.wait_for_unit("network-online.target") - - client.wait_for_unit("network-online.target") - client.succeed("curl -v https://github.com/ >&2") - out = client.succeed("nix registry list") - print(out) - assert "github:NixOS/nixpkgs" in out, "nixpkgs flake not found" - assert "github:fancy-enterprise/private-flake" in out, "private flake not found" - cat_log() - - # If no github access token is provided, nix should use the public archive url... - out = client.succeed("nix flake metadata nixpkgs --json") - print(out) - info = json.loads(out) - assert info["revision"] == "${nixpkgs.rev}", f"revision mismatch: {info['revision']} != ${nixpkgs.rev}" - cat_log() - - # ... otherwise it should use the API - out = client.succeed("nix flake metadata private-flake --json --access-tokens github.com=ghp_000000000000000000000000000000000000 --tarball-ttl 0") - print(out) - info = json.loads(out) - assert info["revision"] == "${private-flake-rev}", f"revision mismatch: {info['revision']} != ${private-flake-rev}" - assert info["fingerprint"] - cat_log() - - # Fetching with the resolved URL should produce the same result. - info2 = json.loads(client.succeed(f"nix flake metadata {info['url']} --json --access-tokens github.com=ghp_000000000000000000000000000000000000 --tarball-ttl 0")) - print(info["fingerprint"], info2["fingerprint"]) - assert info["fingerprint"] == info2["fingerprint"], "fingerprint mismatch" - - client.succeed("nix registry pin nixpkgs") - client.succeed("nix flake metadata nixpkgs --tarball-ttl 0 >&2") - - # Test fetchTree on a github URL. - hash = client.succeed(f"nix eval --no-trust-tarballs-from-git-forges --raw --expr '(fetchTree {info['url']}).narHash'") - assert hash == info['locked']['narHash'] - - # Fetching without a narHash should succeed if trust-github is set and fail otherwise. - client.succeed(f"nix eval --raw --expr 'builtins.fetchTree github:github:fancy-enterprise/private-flake/{info['revision']}'") - out = client.fail(f"nix eval --no-trust-tarballs-from-git-forges --raw --expr 'builtins.fetchTree github:github:fancy-enterprise/private-flake/{info['revision']}' 2>&1") - assert "will not fetch unlocked input" in out, "--no-trust-tarballs-from-git-forges did not fail with the expected error" - - # Shut down the web server. The flake should be cached on the client. - github.succeed("systemctl stop httpd.service") - - info = json.loads(client.succeed("nix flake metadata nixpkgs --json")) - date = time.strftime("%Y%m%d%H%M%S", time.gmtime(info['lastModified'])) - assert date == "${nixpkgs.lastModifiedDate}", "time mismatch" - - client.succeed("nix build nixpkgs#hello") - - # The build shouldn't fail even with --tarball-ttl 0 (the server - # being down should not be a fatal error). - client.succeed("nix build nixpkgs#fuse --tarball-ttl 0") - ''; + services.httpd.virtualHosts."github.com" = { + forceSSL = true; + sslServerKey = "${cert}/server.key"; + sslServerCert = "${cert}/server.crt"; + servedDirs = [ + { + urlPath = "/NixOS/nixpkgs"; + dir = archive; + } + ]; + }; + }; + + client = + { + config, + lib, + pkgs, + nodes, + ... + }: + { + virtualisation.writableStore = true; + virtualisation.diskSize = 2048; + virtualisation.additionalPaths = [ + pkgs.hello + pkgs.fuse + ]; + virtualisation.memorySize = 4096; + nix.settings.substituters = lib.mkForce [ ]; + nix.extraOptions = "experimental-features = nix-command flakes"; + networking.hosts.${(builtins.head nodes.github.networking.interfaces.eth1.ipv4.addresses).address} = + [ + "channels.nixos.org" + "api.github.com" + "github.com" + ]; + security.pki.certificateFiles = [ "${cert}/ca.crt" ]; + }; + }; + + testScript = + { nodes }: + '' + # fmt: off + import json + import time + + start_all() + + def cat_log(): + github.succeed("cat /var/log/httpd/*.log >&2") + + github.wait_for_unit("httpd.service") + github.wait_for_unit("network-online.target") + + client.wait_for_unit("network-online.target") + client.succeed("curl -v https://github.com/ >&2") + out = client.succeed("nix registry list") + print(out) + assert "github:NixOS/nixpkgs" in out, "nixpkgs flake not found" + assert "github:fancy-enterprise/private-flake" in out, "private flake not found" + cat_log() + + # If no github access token is provided, nix should use the public archive url... + out = client.succeed("nix flake metadata nixpkgs --json") + print(out) + info = json.loads(out) + assert info["revision"] == "${nixpkgs.rev}", f"revision mismatch: {info['revision']} != ${nixpkgs.rev}" + cat_log() + + # ... otherwise it should use the API + out = client.succeed("nix flake metadata private-flake --json --access-tokens github.com=ghp_000000000000000000000000000000000000 --tarball-ttl 0") + print(out) + info = json.loads(out) + assert info["revision"] == "${private-flake-rev}", f"revision mismatch: {info['revision']} != ${private-flake-rev}" + assert info["fingerprint"] + cat_log() + + # Fetching with the resolved URL should produce the same result. + info2 = json.loads(client.succeed(f"nix flake metadata {info['url']} --json --access-tokens github.com=ghp_000000000000000000000000000000000000 --tarball-ttl 0")) + print(info["fingerprint"], info2["fingerprint"]) + assert info["fingerprint"] == info2["fingerprint"], "fingerprint mismatch" + + client.succeed("nix registry pin nixpkgs") + client.succeed("nix flake metadata nixpkgs --tarball-ttl 0 >&2") + + # Test fetchTree on a github URL. + hash = client.succeed(f"nix eval --no-trust-tarballs-from-git-forges --raw --expr '(fetchTree {info['url']}).narHash'") + assert hash == info['locked']['narHash'] + + # Fetching without a narHash should succeed if trust-github is set and fail otherwise. + client.succeed(f"nix eval --raw --expr 'builtins.fetchTree github:github:fancy-enterprise/private-flake/{info['revision']}'") + out = client.fail(f"nix eval --no-trust-tarballs-from-git-forges --raw --expr 'builtins.fetchTree github:github:fancy-enterprise/private-flake/{info['revision']}' 2>&1") + assert "will not fetch unlocked input" in out, "--no-trust-tarballs-from-git-forges did not fail with the expected error" + + # Shut down the web server. The flake should be cached on the client. + github.succeed("systemctl stop httpd.service") + + info = json.loads(client.succeed("nix flake metadata nixpkgs --json")) + date = time.strftime("%Y%m%d%H%M%S", time.gmtime(info['lastModified'])) + assert date == "${nixpkgs.lastModifiedDate}", "time mismatch" + + client.succeed("nix build nixpkgs#hello") + + # The build shouldn't fail even with --tarball-ttl 0 (the server + # being down should not be a fatal error). + client.succeed("nix build nixpkgs#fuse --tarball-ttl 0") + ''; } diff --git a/tests/nixos/gzip-content-encoding.nix b/tests/nixos/gzip-content-encoding.nix index a5a0033fd19..22d196c6186 100644 --- a/tests/nixos/gzip-content-encoding.nix +++ b/tests/nixos/gzip-content-encoding.nix @@ -30,42 +30,45 @@ in { name = "gzip-content-encoding"; - nodes = - { machine = + nodes = { + machine = { config, pkgs, ... }: - { networking.firewall.allowedTCPPorts = [ 80 ]; + { + networking.firewall.allowedTCPPorts = [ 80 ]; services.nginx.enable = true; - services.nginx.virtualHosts."localhost" = - { root = "${ztdCompressedFile}/share/"; - # Make sure that nginx really tries to compress the - # file on the fly with no regard to size/mime. - # http://nginx.org/en/docs/http/ngx_http_gzip_module.html - extraConfig = '' - gzip on; - gzip_types *; - gzip_proxied any; - gzip_min_length 0; - ''; - }; + services.nginx.virtualHosts."localhost" = { + root = "${ztdCompressedFile}/share/"; + # Make sure that nginx really tries to compress the + # file on the fly with no regard to size/mime. + # http://nginx.org/en/docs/http/ngx_http_gzip_module.html + extraConfig = '' + gzip on; + gzip_types *; + gzip_proxied any; + gzip_min_length 0; + ''; + }; virtualisation.writableStore = true; virtualisation.additionalPaths = with pkgs; [ file ]; nix.settings.substituters = lib.mkForce [ ]; }; - }; + }; # Check that when nix-prefetch-url is used with a zst tarball it does not get decompressed. - testScript = { nodes }: '' - # fmt: off - start_all() + testScript = + { nodes }: + '' + # fmt: off + start_all() - machine.wait_for_unit("nginx.service") - machine.succeed(""" - # Make sure that the file is properly compressed as the test would be meaningless otherwise - curl --compressed -v http://localhost/archive |& tr -s ' ' |& grep --ignore-case 'content-encoding: gzip' - archive_path=$(nix-prefetch-url http://localhost/archive --print-path | tail -n1) - [[ $(${fileCmd} --brief --mime-type $archive_path) == "application/zstd" ]] - tar --zstd -xf $archive_path - """) - ''; + machine.wait_for_unit("nginx.service") + machine.succeed(""" + # Make sure that the file is properly compressed as the test would be meaningless otherwise + curl --compressed -v http://localhost/archive |& tr -s ' ' |& grep --ignore-case 'content-encoding: gzip' + archive_path=$(nix-prefetch-url http://localhost/archive --print-path | tail -n1) + [[ $(${fileCmd} --brief --mime-type $archive_path) == "application/zstd" ]] + tar --zstd -xf $archive_path + """) + ''; } diff --git a/tests/nixos/nix-copy-closure.nix b/tests/nixos/nix-copy-closure.nix index 44324e989b3..b6ec856e0e4 100644 --- a/tests/nixos/nix-copy-closure.nix +++ b/tests/nixos/nix-copy-closure.nix @@ -1,6 +1,11 @@ # Test ‘nix-copy-closure’. -{ lib, config, nixpkgs, ... }: +{ + lib, + config, + nixpkgs, + ... +}: let pkgs = config.nodes.client.nixpkgs.pkgs; @@ -10,74 +15,90 @@ let pkgC = pkgs.hello; pkgD = pkgs.tmux; -in { +in +{ name = "nix-copy-closure"; - nodes = - { client = - { config, lib, pkgs, ... }: - { virtualisation.writableStore = true; - virtualisation.additionalPaths = [ pkgA pkgD.drvPath ]; - nix.settings.substituters = lib.mkForce [ ]; - }; - - server = - { config, pkgs, ... }: - { services.openssh.enable = true; - virtualisation.writableStore = true; - virtualisation.additionalPaths = [ pkgB pkgC ]; - }; - }; - - testScript = { nodes }: '' - # fmt: off - import subprocess - - start_all() - - # Create an SSH key on the client. - subprocess.run([ - "${pkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" - ], capture_output=True, check=True) - - client.succeed("mkdir -m 700 /root/.ssh") - client.copy_from_host("key", "/root/.ssh/id_ed25519") - client.succeed("chmod 600 /root/.ssh/id_ed25519") - - # Install the SSH key on the server. - server.succeed("mkdir -m 700 /root/.ssh") - server.copy_from_host("key.pub", "/root/.ssh/authorized_keys") - server.wait_for_unit("sshd") - server.wait_for_unit("multi-user.target") - server.wait_for_unit("network-online.target") - - client.wait_for_unit("network-online.target") - client.succeed(f"ssh -o StrictHostKeyChecking=no {server.name} 'echo hello world'") - - # Copy the closure of package A from the client to the server. - server.fail("nix-store --check-validity ${pkgA}") - client.succeed("nix-copy-closure --to server --gzip ${pkgA} >&2") - server.succeed("nix-store --check-validity ${pkgA}") - - # Copy the closure of package B from the server to the client. - client.fail("nix-store --check-validity ${pkgB}") - client.succeed("nix-copy-closure --from server --gzip ${pkgB} >&2") - client.succeed("nix-store --check-validity ${pkgB}") - - # Copy the closure of package C via the SSH substituter. - client.fail("nix-store -r ${pkgC}") - - # Copy the derivation of package D's derivation from the client to the server. - server.fail("nix-store --check-validity ${pkgD.drvPath}") - client.succeed("nix-copy-closure --to server --gzip ${pkgD.drvPath} >&2") - server.succeed("nix-store --check-validity ${pkgD.drvPath}") - - # FIXME - # client.succeed( - # "nix-store --option use-ssh-substituter true" - # " --option ssh-substituter-hosts root\@server" - # " -r ${pkgC} >&2" - # ) - # client.succeed("nix-store --check-validity ${pkgC}") - ''; + nodes = { + client = + { + config, + lib, + pkgs, + ... + }: + { + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ + pkgA + pkgD.drvPath + ]; + nix.settings.substituters = lib.mkForce [ ]; + }; + + server = + { config, pkgs, ... }: + { + services.openssh.enable = true; + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ + pkgB + pkgC + ]; + }; + }; + + testScript = + { nodes }: + '' + # fmt: off + import subprocess + + start_all() + + # Create an SSH key on the client. + subprocess.run([ + "${pkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" + ], capture_output=True, check=True) + + client.succeed("mkdir -m 700 /root/.ssh") + client.copy_from_host("key", "/root/.ssh/id_ed25519") + client.succeed("chmod 600 /root/.ssh/id_ed25519") + + # Install the SSH key on the server. + server.succeed("mkdir -m 700 /root/.ssh") + server.copy_from_host("key.pub", "/root/.ssh/authorized_keys") + server.wait_for_unit("sshd") + server.wait_for_unit("multi-user.target") + server.wait_for_unit("network-online.target") + + client.wait_for_unit("network-online.target") + client.succeed(f"ssh -o StrictHostKeyChecking=no {server.name} 'echo hello world'") + + # Copy the closure of package A from the client to the server. + server.fail("nix-store --check-validity ${pkgA}") + client.succeed("nix-copy-closure --to server --gzip ${pkgA} >&2") + server.succeed("nix-store --check-validity ${pkgA}") + + # Copy the closure of package B from the server to the client. + client.fail("nix-store --check-validity ${pkgB}") + client.succeed("nix-copy-closure --from server --gzip ${pkgB} >&2") + client.succeed("nix-store --check-validity ${pkgB}") + + # Copy the closure of package C via the SSH substituter. + client.fail("nix-store -r ${pkgC}") + + # Copy the derivation of package D's derivation from the client to the server. + server.fail("nix-store --check-validity ${pkgD.drvPath}") + client.succeed("nix-copy-closure --to server --gzip ${pkgD.drvPath} >&2") + server.succeed("nix-store --check-validity ${pkgD.drvPath}") + + # FIXME + # client.succeed( + # "nix-store --option use-ssh-substituter true" + # " --option ssh-substituter-hosts root\@server" + # " -r ${pkgC} >&2" + # ) + # client.succeed("nix-store --check-validity ${pkgC}") + ''; } diff --git a/tests/nixos/nix-copy.nix b/tests/nixos/nix-copy.nix index a6a04b52ca6..3565e83e71a 100644 --- a/tests/nixos/nix-copy.nix +++ b/tests/nixos/nix-copy.nix @@ -2,7 +2,13 @@ # Run interactively with: # rm key key.pub; nix run .#hydraJobs.tests.nix-copy.driverInteractive -{ lib, config, nixpkgs, hostPkgs, ... }: +{ + lib, + config, + nixpkgs, + hostPkgs, + ... +}: let pkgs = config.nodes.client.nixpkgs.pkgs; @@ -12,101 +18,117 @@ let pkgC = pkgs.hello; pkgD = pkgs.tmux; -in { +in +{ name = "nix-copy"; enableOCR = true; - nodes = - { client = - { config, lib, pkgs, ... }: - { virtualisation.writableStore = true; - virtualisation.additionalPaths = [ pkgA pkgD.drvPath ]; - nix.settings.substituters = lib.mkForce [ ]; - nix.settings.experimental-features = [ "nix-command" ]; - services.getty.autologinUser = "root"; - programs.ssh.extraConfig = '' - Host * - ControlMaster auto - ControlPath ~/.ssh/master-%h:%r@%n:%p - ControlPersist 15m - ''; - }; - - server = - { config, pkgs, ... }: - { services.openssh.enable = true; - services.openssh.settings.PermitRootLogin = "yes"; - users.users.root.hashedPasswordFile = null; - users.users.root.password = "foobar"; - virtualisation.writableStore = true; - virtualisation.additionalPaths = [ pkgB pkgC ]; - }; - }; - - testScript = { nodes }: '' - # fmt: off - import subprocess - - # Create an SSH key on the client. - subprocess.run([ - "${pkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" - ], capture_output=True, check=True) - - start_all() - - server.wait_for_unit("sshd") - server.wait_for_unit("multi-user.target") - server.wait_for_unit("network-online.target") - - client.wait_for_unit("network-online.target") - client.wait_for_unit("getty@tty1.service") - # Either the prompt: ]# - # or an OCR misreading of it: 1# - client.wait_for_text("[]1]#") - - # Copy the closure of package A from the client to the server using password authentication, - # and check that all prompts are visible - server.fail("nix-store --check-validity ${pkgA}") - client.send_chars("nix copy --to ssh://server ${pkgA} >&2; echo -n do; echo ne\n") - client.wait_for_text("continue connecting") - client.send_chars("yes\n") - client.wait_for_text("Password:") - client.send_chars("foobar\n") - client.wait_for_text("done") - server.succeed("nix-store --check-validity ${pkgA}") - - # Check that ControlMaster is working - client.send_chars("nix copy --to ssh://server ${pkgA} >&2; echo done\n") - client.wait_for_text("done") - - client.copy_from_host("key", "/root/.ssh/id_ed25519") - client.succeed("chmod 600 /root/.ssh/id_ed25519") - - # Install the SSH key on the server. - server.copy_from_host("key.pub", "/root/.ssh/authorized_keys") - server.succeed("systemctl restart sshd") - client.succeed(f"ssh -o StrictHostKeyChecking=no {server.name} 'echo hello world'") - client.succeed(f"ssh -O check {server.name}") - client.succeed(f"ssh -O exit {server.name}") - client.fail(f"ssh -O check {server.name}") - - # Check that an explicit master will work - client.succeed(f"ssh -MNfS /tmp/master {server.name}") - client.succeed(f"ssh -S /tmp/master -O check {server.name}") - client.succeed("NIX_SSHOPTS='-oControlPath=/tmp/master' nix copy --to ssh://server ${pkgA} >&2") - client.succeed(f"ssh -S /tmp/master -O exit {server.name}") - - # Copy the closure of package B from the server to the client, using ssh-ng. - client.fail("nix-store --check-validity ${pkgB}") - # Shouldn't download untrusted paths by default - client.fail("nix copy --from ssh-ng://server ${pkgB} >&2") - client.succeed("nix copy --no-check-sigs --from ssh-ng://server ${pkgB} >&2") - client.succeed("nix-store --check-validity ${pkgB}") - - # Copy the derivation of package D's derivation from the client to the server. - server.fail("nix-store --check-validity ${pkgD.drvPath}") - client.succeed("nix copy --derivation --to ssh://server ${pkgD.drvPath} >&2") - server.succeed("nix-store --check-validity ${pkgD.drvPath}") - ''; + nodes = { + client = + { + config, + lib, + pkgs, + ... + }: + { + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ + pkgA + pkgD.drvPath + ]; + nix.settings.substituters = lib.mkForce [ ]; + nix.settings.experimental-features = [ "nix-command" ]; + services.getty.autologinUser = "root"; + programs.ssh.extraConfig = '' + Host * + ControlMaster auto + ControlPath ~/.ssh/master-%h:%r@%n:%p + ControlPersist 15m + ''; + }; + + server = + { config, pkgs, ... }: + { + services.openssh.enable = true; + services.openssh.settings.PermitRootLogin = "yes"; + users.users.root.hashedPasswordFile = null; + users.users.root.password = "foobar"; + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ + pkgB + pkgC + ]; + }; + }; + + testScript = + { nodes }: + '' + # fmt: off + import subprocess + + # Create an SSH key on the client. + subprocess.run([ + "${pkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" + ], capture_output=True, check=True) + + start_all() + + server.wait_for_unit("sshd") + server.wait_for_unit("multi-user.target") + server.wait_for_unit("network-online.target") + + client.wait_for_unit("network-online.target") + client.wait_for_unit("getty@tty1.service") + # Either the prompt: ]# + # or an OCR misreading of it: 1# + client.wait_for_text("[]1]#") + + # Copy the closure of package A from the client to the server using password authentication, + # and check that all prompts are visible + server.fail("nix-store --check-validity ${pkgA}") + client.send_chars("nix copy --to ssh://server ${pkgA} >&2; echo -n do; echo ne\n") + client.wait_for_text("continue connecting") + client.send_chars("yes\n") + client.wait_for_text("Password:") + client.send_chars("foobar\n") + client.wait_for_text("done") + server.succeed("nix-store --check-validity ${pkgA}") + + # Check that ControlMaster is working + client.send_chars("nix copy --to ssh://server ${pkgA} >&2; echo done\n") + client.wait_for_text("done") + + client.copy_from_host("key", "/root/.ssh/id_ed25519") + client.succeed("chmod 600 /root/.ssh/id_ed25519") + + # Install the SSH key on the server. + server.copy_from_host("key.pub", "/root/.ssh/authorized_keys") + server.succeed("systemctl restart sshd") + client.succeed(f"ssh -o StrictHostKeyChecking=no {server.name} 'echo hello world'") + client.succeed(f"ssh -O check {server.name}") + client.succeed(f"ssh -O exit {server.name}") + client.fail(f"ssh -O check {server.name}") + + # Check that an explicit master will work + client.succeed(f"ssh -MNfS /tmp/master {server.name}") + client.succeed(f"ssh -S /tmp/master -O check {server.name}") + client.succeed("NIX_SSHOPTS='-oControlPath=/tmp/master' nix copy --to ssh://server ${pkgA} >&2") + client.succeed(f"ssh -S /tmp/master -O exit {server.name}") + + # Copy the closure of package B from the server to the client, using ssh-ng. + client.fail("nix-store --check-validity ${pkgB}") + # Shouldn't download untrusted paths by default + client.fail("nix copy --from ssh-ng://server ${pkgB} >&2") + client.succeed("nix copy --no-check-sigs --from ssh-ng://server ${pkgB} >&2") + client.succeed("nix-store --check-validity ${pkgB}") + + # Copy the derivation of package D's derivation from the client to the server. + server.fail("nix-store --check-validity ${pkgD.drvPath}") + client.succeed("nix copy --derivation --to ssh://server ${pkgD.drvPath} >&2") + server.succeed("nix-store --check-validity ${pkgD.drvPath}") + ''; } diff --git a/tests/nixos/nix-docker.nix b/tests/nixos/nix-docker.nix index 00b04482c15..bd77b25c8b2 100644 --- a/tests/nixos/nix-docker.nix +++ b/tests/nixos/nix-docker.nix @@ -1,6 +1,12 @@ # Test the container built by ../../docker.nix. -{ lib, config, nixpkgs, hostPkgs, ... }: +{ + lib, + config, + nixpkgs, + hostPkgs, + ... +}: let pkgs = config.nodes.machine.nixpkgs.pkgs; @@ -19,36 +25,54 @@ let containerTestScript = ./nix-docker-test.sh; -in { +in +{ name = "nix-docker"; - nodes = - { machine = - { config, lib, pkgs, ... }: - { virtualisation.diskSize = 4096; - }; - cache = - { config, lib, pkgs, ... }: - { virtualisation.additionalPaths = [ pkgs.stdenv pkgs.hello ]; - services.harmonia.enable = true; - networking.firewall.allowedTCPPorts = [ 5000 ]; - }; - }; - - testScript = { nodes }: '' - cache.wait_for_unit("harmonia.service") - cache.wait_for_unit("network-online.target") - - machine.succeed("mkdir -p /etc/containers") - machine.succeed("""echo '{"default":[{"type":"insecureAcceptAnything"}]}' > /etc/containers/policy.json""") - - machine.succeed("${pkgs.podman}/bin/podman load -i ${nixImage}") - machine.succeed("${pkgs.podman}/bin/podman run --rm nix nix --version") - machine.succeed("${pkgs.podman}/bin/podman run --rm -i nix < ${containerTestScript}") - - machine.succeed("${pkgs.podman}/bin/podman load -i ${nixUserImage}") - machine.succeed("${pkgs.podman}/bin/podman run --rm nix-user nix --version") - machine.succeed("${pkgs.podman}/bin/podman run --rm -i nix-user < ${containerTestScript}") - machine.succeed("[[ $(${pkgs.podman}/bin/podman run --rm nix-user stat -c %u /nix/store) = 1000 ]]") - ''; + nodes = { + machine = + { + config, + lib, + pkgs, + ... + }: + { + virtualisation.diskSize = 4096; + }; + cache = + { + config, + lib, + pkgs, + ... + }: + { + virtualisation.additionalPaths = [ + pkgs.stdenv + pkgs.hello + ]; + services.harmonia.enable = true; + networking.firewall.allowedTCPPorts = [ 5000 ]; + }; + }; + + testScript = + { nodes }: + '' + cache.wait_for_unit("harmonia.service") + cache.wait_for_unit("network-online.target") + + machine.succeed("mkdir -p /etc/containers") + machine.succeed("""echo '{"default":[{"type":"insecureAcceptAnything"}]}' > /etc/containers/policy.json""") + + machine.succeed("${pkgs.podman}/bin/podman load -i ${nixImage}") + machine.succeed("${pkgs.podman}/bin/podman run --rm nix nix --version") + machine.succeed("${pkgs.podman}/bin/podman run --rm -i nix < ${containerTestScript}") + + machine.succeed("${pkgs.podman}/bin/podman load -i ${nixUserImage}") + machine.succeed("${pkgs.podman}/bin/podman run --rm nix-user nix --version") + machine.succeed("${pkgs.podman}/bin/podman run --rm -i nix-user < ${containerTestScript}") + machine.succeed("[[ $(${pkgs.podman}/bin/podman run --rm nix-user stat -c %u /nix/store) = 1000 ]]") + ''; } diff --git a/tests/nixos/nss-preload.nix b/tests/nixos/nss-preload.nix index b7e704f395d..29cd5e6a296 100644 --- a/tests/nixos/nss-preload.nix +++ b/tests/nixos/nss-preload.nix @@ -1,4 +1,9 @@ -{ lib, config, nixpkgs, ... }: +{ + lib, + config, + nixpkgs, + ... +}: let @@ -44,81 +49,119 @@ in name = "nss-preload"; nodes = { - http_dns = { lib, pkgs, config, ... }: { - networking.firewall.enable = false; - networking.interfaces.eth1.ipv6.addresses = lib.mkForce [ - { address = "fd21::1"; prefixLength = 64; } - ]; - networking.interfaces.eth1.ipv4.addresses = lib.mkForce [ - { address = "192.168.0.1"; prefixLength = 24; } - ]; - - services.unbound = { - enable = true; - enableRootTrustAnchor = false; - settings = { - server = { - interface = [ "192.168.0.1" "fd21::1" "::1" "127.0.0.1" ]; - access-control = [ "192.168.0.0/24 allow" "fd21::/64 allow" "::1 allow" "127.0.0.0/8 allow" ]; - local-data = [ - ''"example.com. IN A 192.168.0.1"'' - ''"example.com. IN AAAA fd21::1"'' - ''"tarballs.nixos.org. IN A 192.168.0.1"'' - ''"tarballs.nixos.org. IN AAAA fd21::1"'' - ]; + http_dns = + { + lib, + pkgs, + config, + ... + }: + { + networking.firewall.enable = false; + networking.interfaces.eth1.ipv6.addresses = lib.mkForce [ + { + address = "fd21::1"; + prefixLength = 64; + } + ]; + networking.interfaces.eth1.ipv4.addresses = lib.mkForce [ + { + address = "192.168.0.1"; + prefixLength = 24; + } + ]; + + services.unbound = { + enable = true; + enableRootTrustAnchor = false; + settings = { + server = { + interface = [ + "192.168.0.1" + "fd21::1" + "::1" + "127.0.0.1" + ]; + access-control = [ + "192.168.0.0/24 allow" + "fd21::/64 allow" + "::1 allow" + "127.0.0.0/8 allow" + ]; + local-data = [ + ''"example.com. IN A 192.168.0.1"'' + ''"example.com. IN AAAA fd21::1"'' + ''"tarballs.nixos.org. IN A 192.168.0.1"'' + ''"tarballs.nixos.org. IN AAAA fd21::1"'' + ]; + }; }; }; - }; - services.nginx = { - enable = true; - virtualHosts."example.com" = { - root = pkgs.runCommand "testdir" {} '' - mkdir "$out" - echo hello world > "$out/index.html" - ''; + services.nginx = { + enable = true; + virtualHosts."example.com" = { + root = pkgs.runCommand "testdir" { } '' + mkdir "$out" + echo hello world > "$out/index.html" + ''; + }; }; }; - }; # client consumes a remote resolver - client = { lib, nodes, pkgs, ... }: { - networking.useDHCP = false; - networking.nameservers = [ - (lib.head nodes.http_dns.networking.interfaces.eth1.ipv6.addresses).address - (lib.head nodes.http_dns.networking.interfaces.eth1.ipv4.addresses).address - ]; - networking.interfaces.eth1.ipv6.addresses = [ - { address = "fd21::10"; prefixLength = 64; } - ]; - networking.interfaces.eth1.ipv4.addresses = [ - { address = "192.168.0.10"; prefixLength = 24; } - ]; - - nix.settings.extra-sandbox-paths = lib.mkForce []; - nix.settings.substituters = lib.mkForce []; - nix.settings.sandbox = lib.mkForce true; - }; + client = + { + lib, + nodes, + pkgs, + ... + }: + { + networking.useDHCP = false; + networking.nameservers = [ + (lib.head nodes.http_dns.networking.interfaces.eth1.ipv6.addresses).address + (lib.head nodes.http_dns.networking.interfaces.eth1.ipv4.addresses).address + ]; + networking.interfaces.eth1.ipv6.addresses = [ + { + address = "fd21::10"; + prefixLength = 64; + } + ]; + networking.interfaces.eth1.ipv4.addresses = [ + { + address = "192.168.0.10"; + prefixLength = 24; + } + ]; + + nix.settings.extra-sandbox-paths = lib.mkForce [ ]; + nix.settings.substituters = lib.mkForce [ ]; + nix.settings.sandbox = lib.mkForce true; + }; }; - testScript = { nodes, ... }: '' - http_dns.wait_for_unit("network-online.target") - http_dns.wait_for_unit("nginx") - http_dns.wait_for_open_port(80) - http_dns.wait_for_unit("unbound") - http_dns.wait_for_open_port(53) - - client.start() - client.wait_for_unit('multi-user.target') - client.wait_for_unit('network-online.target') - - with subtest("can fetch data from a remote server outside sandbox"): - client.succeed("nix --version >&2") - client.succeed("curl -vvv http://example.com/index.html >&2") - - with subtest("nix-build can lookup dns and fetch data"): - client.succeed(""" - nix-build ${nix-fetch} >&2 - """) - ''; + testScript = + { nodes, ... }: + '' + http_dns.wait_for_unit("network-online.target") + http_dns.wait_for_unit("nginx") + http_dns.wait_for_open_port(80) + http_dns.wait_for_unit("unbound") + http_dns.wait_for_open_port(53) + + client.start() + client.wait_for_unit('multi-user.target') + client.wait_for_unit('network-online.target') + + with subtest("can fetch data from a remote server outside sandbox"): + client.succeed("nix --version >&2") + client.succeed("curl -vvv http://example.com/index.html >&2") + + with subtest("nix-build can lookup dns and fetch data"): + client.succeed(""" + nix-build ${nix-fetch} >&2 + """) + ''; } diff --git a/tests/nixos/remote-builds-ssh-ng.nix b/tests/nixos/remote-builds-ssh-ng.nix index 3562d2d2f6b..72652202932 100644 --- a/tests/nixos/remote-builds-ssh-ng.nix +++ b/tests/nixos/remote-builds-ssh-ng.nix @@ -1,11 +1,17 @@ -test@{ config, lib, hostPkgs, ... }: +test@{ + config, + lib, + hostPkgs, + ... +}: let pkgs = config.nodes.client.nixpkgs.pkgs; # Trivial Nix expression to build remotely. - expr = config: nr: pkgs.writeText "expr.nix" - '' + expr = + config: nr: + pkgs.writeText "expr.nix" '' let utils = builtins.storePath ${config.system.build.extraUtils}; in derivation { name = "hello-${toString nr}"; @@ -41,87 +47,94 @@ in config = { name = lib.mkDefault "remote-builds-ssh-ng"; - nodes = - { - builder = - { config, pkgs, ... }: - { - imports = [ test.config.builders.config ]; - services.openssh.enable = true; - virtualisation.writableStore = true; - nix.settings.sandbox = true; - nix.settings.substituters = lib.mkForce [ ]; - }; - - client = - { config, lib, pkgs, ... }: - { - nix.settings.max-jobs = 0; # force remote building - nix.distributedBuilds = true; - nix.buildMachines = - [{ - hostName = "builder"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - protocol = "ssh-ng"; - }]; - virtualisation.writableStore = true; - virtualisation.additionalPaths = [ config.system.build.extraUtils ]; - nix.settings.substituters = lib.mkForce [ ]; - programs.ssh.extraConfig = "ConnectTimeout 30"; - }; - }; - - testScript = { nodes }: '' - # fmt: off - import subprocess - - start_all() - - # Create an SSH key on the client. - subprocess.run([ - "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" - ], capture_output=True, check=True) - client.succeed("mkdir -p -m 700 /root/.ssh") - client.copy_from_host("key", "/root/.ssh/id_ed25519") - client.succeed("chmod 600 /root/.ssh/id_ed25519") - - # Install the SSH key on the builder. - client.wait_for_unit("network-online.target") - builder.succeed("mkdir -p -m 700 /root/.ssh") - builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") - builder.wait_for_unit("sshd") - builder.wait_for_unit("multi-user.target") - builder.wait_for_unit("network-online.target") - - client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") - - # Perform a build - out = client.succeed("nix-build ${expr nodes.client 1} 2> build-output") - - # Verify that the build was done on the builder - builder.succeed(f"test -e {out.strip()}") - - # Print the build log, prefix the log lines to avoid nix intercepting lines starting with @nix - buildOutput = client.succeed("sed -e 's/^/build-output:/' build-output") - print(buildOutput) - - # Make sure that we get the expected build output - client.succeed("grep -qF Hello build-output") - - # We don't want phase reporting in the build output - client.fail("grep -qF '@nix' build-output") - - # Get the log file - client.succeed(f"nix-store --read-log {out.strip()} > log-output") - # Prefix the log lines to avoid nix intercepting lines starting with @nix - logOutput = client.succeed("sed -e 's/^/log-file:/' log-output") - print(logOutput) - - # Check that we get phase reporting in the log file - client.succeed("grep -q '@nix {\"action\":\"setPhase\",\"phase\":\"buildPhase\"}' log-output") - ''; + nodes = { + builder = + { config, pkgs, ... }: + { + imports = [ test.config.builders.config ]; + services.openssh.enable = true; + virtualisation.writableStore = true; + nix.settings.sandbox = true; + nix.settings.substituters = lib.mkForce [ ]; + }; + + client = + { + config, + lib, + pkgs, + ... + }: + { + nix.settings.max-jobs = 0; # force remote building + nix.distributedBuilds = true; + nix.buildMachines = [ + { + hostName = "builder"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + protocol = "ssh-ng"; + } + ]; + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ config.system.build.extraUtils ]; + nix.settings.substituters = lib.mkForce [ ]; + programs.ssh.extraConfig = "ConnectTimeout 30"; + }; + }; + + testScript = + { nodes }: + '' + # fmt: off + import subprocess + + start_all() + + # Create an SSH key on the client. + subprocess.run([ + "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" + ], capture_output=True, check=True) + client.succeed("mkdir -p -m 700 /root/.ssh") + client.copy_from_host("key", "/root/.ssh/id_ed25519") + client.succeed("chmod 600 /root/.ssh/id_ed25519") + + # Install the SSH key on the builder. + client.wait_for_unit("network-online.target") + builder.succeed("mkdir -p -m 700 /root/.ssh") + builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") + builder.wait_for_unit("sshd") + builder.wait_for_unit("multi-user.target") + builder.wait_for_unit("network-online.target") + + client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") + + # Perform a build + out = client.succeed("nix-build ${expr nodes.client 1} 2> build-output") + + # Verify that the build was done on the builder + builder.succeed(f"test -e {out.strip()}") + + # Print the build log, prefix the log lines to avoid nix intercepting lines starting with @nix + buildOutput = client.succeed("sed -e 's/^/build-output:/' build-output") + print(buildOutput) + + # Make sure that we get the expected build output + client.succeed("grep -qF Hello build-output") + + # We don't want phase reporting in the build output + client.fail("grep -qF '@nix' build-output") + + # Get the log file + client.succeed(f"nix-store --read-log {out.strip()} > log-output") + # Prefix the log lines to avoid nix intercepting lines starting with @nix + logOutput = client.succeed("sed -e 's/^/log-file:/' log-output") + print(logOutput) + + # Check that we get phase reporting in the log file + client.succeed("grep -q '@nix {\"action\":\"setPhase\",\"phase\":\"buildPhase\"}' log-output") + ''; }; } diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index 4fca4b93849..3251984db5e 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -1,6 +1,11 @@ # Test Nix's remote build feature. -test@{ config, lib, hostPkgs, ... }: +test@{ + config, + lib, + hostPkgs, + ... +}: let pkgs = config.nodes.client.nixpkgs.pkgs; @@ -21,8 +26,9 @@ let }; # Trivial Nix expression to build remotely. - expr = config: nr: pkgs.writeText "expr.nix" - '' + expr = + config: nr: + pkgs.writeText "expr.nix" '' let utils = builtins.storePath ${config.system.build.extraUtils}; in derivation { name = "hello-${toString nr}"; @@ -52,107 +58,112 @@ in config = { name = lib.mkDefault "remote-builds"; - nodes = - { - builder1 = builder; - builder2 = builder; - - client = - { config, lib, pkgs, ... }: - { - nix.settings.max-jobs = 0; # force remote building - nix.distributedBuilds = true; - nix.buildMachines = - [ - { - hostName = "builder1"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - } - { - hostName = "builder2"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - } - ]; - virtualisation.writableStore = true; - virtualisation.additionalPaths = [ config.system.build.extraUtils ]; - nix.settings.substituters = lib.mkForce [ ]; - programs.ssh.extraConfig = "ConnectTimeout 30"; - environment.systemPackages = [ - # `bad-shell` is used to make sure Nix works in an environment with a misbehaving shell. - # - # More realistically, a bad shell would still run the command ("echo started") - # but considering that our solution is to avoid this shell (set via $SHELL), we - # don't need to bother with a more functional mock shell. - (pkgs.writeScriptBin "bad-shell" '' - #!${pkgs.runtimeShell} - echo "Hello, I am a broken shell" - '') - ]; - }; - }; - - testScript = { nodes }: '' - # fmt: off - import subprocess - - start_all() - - # Create an SSH key on the client. - subprocess.run([ - "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" - ], capture_output=True, check=True) - client.succeed("mkdir -p -m 700 /root/.ssh") - client.copy_from_host("key", "/root/.ssh/id_ed25519") - client.succeed("chmod 600 /root/.ssh/id_ed25519") - - # Install the SSH key on the builders. - client.wait_for_unit("network-online.target") - for builder in [builder1, builder2]: - builder.succeed("mkdir -p -m 700 /root/.ssh") - builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") - builder.wait_for_unit("sshd") - builder.wait_for_unit("network-online.target") - # Make sure the builder can handle our login correctly - builder.wait_for_unit("multi-user.target") - # Make sure there's no funny business on the client either - # (should not be necessary, but we have reason to be careful) - client.wait_for_unit("multi-user.target") - client.succeed(f""" - ssh -o StrictHostKeyChecking=no {builder.name} \ - 'echo hello world on $(hostname)' >&2 - """) - - ${lib.optionalString supportsBadShell '' - # Check that SSH uses SHELL for LocalCommand, as expected, and check that - # our test setup here is working. The next test will use this bad SHELL. - client.succeed(f"SHELL=$(which bad-shell) ssh -oLocalCommand='true' -oPermitLocalCommand=yes {builder1.name} 'echo hello world' | grep -F 'Hello, I am a broken shell'") - ''} - - # Perform a build and check that it was performed on the builder. - out = client.succeed( - "${lib.optionalString supportsBadShell "SHELL=$(which bad-shell)"} nix-build ${expr nodes.client 1} 2> build-output", - "grep -q Hello build-output" - ) - builder1.succeed(f"test -e {out}") - - # And a parallel build. - paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client 2})\!out $(nix-instantiate ${expr nodes.client 3})\!out') - out1, out2 = paths.split() - builder1.succeed(f"test -e {out1} -o -e {out2}") - builder2.succeed(f"test -e {out1} -o -e {out2}") - - # And a failing build. - client.fail("nix-build ${expr nodes.client 5}") - - # Test whether the build hook automatically skips unavailable builders. - builder1.block() - client.succeed("nix-build ${expr nodes.client 4}") - ''; + nodes = { + builder1 = builder; + builder2 = builder; + + client = + { + config, + lib, + pkgs, + ... + }: + { + nix.settings.max-jobs = 0; # force remote building + nix.distributedBuilds = true; + nix.buildMachines = [ + { + hostName = "builder1"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + } + { + hostName = "builder2"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + } + ]; + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ config.system.build.extraUtils ]; + nix.settings.substituters = lib.mkForce [ ]; + programs.ssh.extraConfig = "ConnectTimeout 30"; + environment.systemPackages = [ + # `bad-shell` is used to make sure Nix works in an environment with a misbehaving shell. + # + # More realistically, a bad shell would still run the command ("echo started") + # but considering that our solution is to avoid this shell (set via $SHELL), we + # don't need to bother with a more functional mock shell. + (pkgs.writeScriptBin "bad-shell" '' + #!${pkgs.runtimeShell} + echo "Hello, I am a broken shell" + '') + ]; + }; + }; + + testScript = + { nodes }: + '' + # fmt: off + import subprocess + + start_all() + + # Create an SSH key on the client. + subprocess.run([ + "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" + ], capture_output=True, check=True) + client.succeed("mkdir -p -m 700 /root/.ssh") + client.copy_from_host("key", "/root/.ssh/id_ed25519") + client.succeed("chmod 600 /root/.ssh/id_ed25519") + + # Install the SSH key on the builders. + client.wait_for_unit("network-online.target") + for builder in [builder1, builder2]: + builder.succeed("mkdir -p -m 700 /root/.ssh") + builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") + builder.wait_for_unit("sshd") + builder.wait_for_unit("network-online.target") + # Make sure the builder can handle our login correctly + builder.wait_for_unit("multi-user.target") + # Make sure there's no funny business on the client either + # (should not be necessary, but we have reason to be careful) + client.wait_for_unit("multi-user.target") + client.succeed(f""" + ssh -o StrictHostKeyChecking=no {builder.name} \ + 'echo hello world on $(hostname)' >&2 + """) + + ${lib.optionalString supportsBadShell '' + # Check that SSH uses SHELL for LocalCommand, as expected, and check that + # our test setup here is working. The next test will use this bad SHELL. + client.succeed(f"SHELL=$(which bad-shell) ssh -oLocalCommand='true' -oPermitLocalCommand=yes {builder1.name} 'echo hello world' | grep -F 'Hello, I am a broken shell'") + ''} + + # Perform a build and check that it was performed on the builder. + out = client.succeed( + "${lib.optionalString supportsBadShell "SHELL=$(which bad-shell)"} nix-build ${expr nodes.client 1} 2> build-output", + "grep -q Hello build-output" + ) + builder1.succeed(f"test -e {out}") + + # And a parallel build. + paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client 2})\!out $(nix-instantiate ${expr nodes.client 3})\!out') + out1, out2 = paths.split() + builder1.succeed(f"test -e {out1} -o -e {out2}") + builder2.succeed(f"test -e {out1} -o -e {out2}") + + # And a failing build. + client.fail("nix-build ${expr nodes.client 5}") + + # Test whether the build hook automatically skips unavailable builders. + builder1.block() + client.succeed("nix-build ${expr nodes.client 4}") + ''; }; } diff --git a/tests/nixos/s3-binary-cache-store.nix b/tests/nixos/s3-binary-cache-store.nix index f8659b830cf..8e480866070 100644 --- a/tests/nixos/s3-binary-cache-store.nix +++ b/tests/nixos/s3-binary-cache-store.nix @@ -1,4 +1,9 @@ -{ lib, config, nixpkgs, ... }: +{ + lib, + config, + nixpkgs, + ... +}: let pkgs = config.nodes.client.nixpkgs.pkgs; @@ -12,71 +17,81 @@ let storeUrl = "s3://my-cache?endpoint=http://server:9000®ion=eu-west-1"; objectThatDoesNotExist = "s3://my-cache/foo-that-does-not-exist?endpoint=http://server:9000®ion=eu-west-1"; -in { +in +{ name = "s3-binary-cache-store"; - nodes = - { server = - { config, lib, pkgs, ... }: - { virtualisation.writableStore = true; - virtualisation.additionalPaths = [ pkgA ]; - environment.systemPackages = [ pkgs.minio-client ]; - nix.extraOptions = '' - experimental-features = nix-command - substituters = + nodes = { + server = + { + config, + lib, + pkgs, + ... + }: + { + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ pkgA ]; + environment.systemPackages = [ pkgs.minio-client ]; + nix.extraOptions = '' + experimental-features = nix-command + substituters = + ''; + services.minio = { + enable = true; + region = "eu-west-1"; + rootCredentialsFile = pkgs.writeText "minio-credentials-full" '' + MINIO_ROOT_USER=${accessKey} + MINIO_ROOT_PASSWORD=${secretKey} ''; - services.minio = { - enable = true; - region = "eu-west-1"; - rootCredentialsFile = pkgs.writeText "minio-credentials-full" '' - MINIO_ROOT_USER=${accessKey} - MINIO_ROOT_PASSWORD=${secretKey} - ''; - }; - networking.firewall.allowedTCPPorts = [ 9000 ]; }; + networking.firewall.allowedTCPPorts = [ 9000 ]; + }; - client = - { config, pkgs, ... }: - { virtualisation.writableStore = true; - nix.extraOptions = '' - experimental-features = nix-command - substituters = - ''; - }; - }; + client = + { config, pkgs, ... }: + { + virtualisation.writableStore = true; + nix.extraOptions = '' + experimental-features = nix-command + substituters = + ''; + }; + }; - testScript = { nodes }: '' - # fmt: off - start_all() + testScript = + { nodes }: + '' + # fmt: off + start_all() - # Create a binary cache. - server.wait_for_unit("minio") - server.wait_for_unit("network-online.target") + # Create a binary cache. + server.wait_for_unit("minio") + server.wait_for_unit("network-online.target") - server.succeed("mc config host add minio http://localhost:9000 ${accessKey} ${secretKey} --api s3v4") - server.succeed("mc mb minio/my-cache") + server.succeed("mc config host add minio http://localhost:9000 ${accessKey} ${secretKey} --api s3v4") + server.succeed("mc mb minio/my-cache") - server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}") + server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}") - client.wait_for_unit("network-online.target") + client.wait_for_unit("network-online.target") - # Test fetchurl on s3:// URLs while we're at it. - client.succeed("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"s3://my-cache/nix-cache-info?endpoint=http://server:9000®ion=eu-west-1\"; }'") + # Test fetchurl on s3:// URLs while we're at it. + client.succeed("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"s3://my-cache/nix-cache-info?endpoint=http://server:9000®ion=eu-west-1\"; }'") - # Test that the format string in the error message is properly setup and won't display `%s` instead of the failed URI - msg = client.fail("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"${objectThatDoesNotExist}\"; }' 2>&1") - if "S3 object '${objectThatDoesNotExist}' does not exist" not in msg: - print(msg) # So that you can see the message that was improperly formatted - raise Exception("Error message formatting didn't work") + # Test that the format string in the error message is properly setup and won't display `%s` instead of the failed URI + msg = client.fail("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"${objectThatDoesNotExist}\"; }' 2>&1") + if "S3 object '${objectThatDoesNotExist}' does not exist" not in msg: + print(msg) # So that you can see the message that was improperly formatted + raise Exception("Error message formatting didn't work") - # Copy a package from the binary cache. - client.fail("nix path-info ${pkgA}") + # Copy a package from the binary cache. + client.fail("nix path-info ${pkgA}") - client.succeed("${env} nix store info --store '${storeUrl}' >&2") + client.succeed("${env} nix store info --store '${storeUrl}' >&2") - client.succeed("${env} nix copy --no-check-sigs --from '${storeUrl}' ${pkgA}") + client.succeed("${env} nix copy --no-check-sigs --from '${storeUrl}' ${pkgA}") - client.succeed("nix path-info ${pkgA}") - ''; + client.succeed("nix path-info ${pkgA}") + ''; } diff --git a/tests/nixos/setuid.nix b/tests/nixos/setuid.nix index 2b66320ddaf..dc368e38373 100644 --- a/tests/nixos/setuid.nix +++ b/tests/nixos/setuid.nix @@ -1,6 +1,11 @@ # Verify that Linux builds cannot create setuid or setgid binaries. -{ lib, config, nixpkgs, ... }: +{ + lib, + config, + nixpkgs, + ... +}: let pkgs = config.nodes.machine.nixpkgs.pkgs; @@ -10,116 +15,127 @@ in name = "setuid"; nodes.machine = - { config, lib, pkgs, ... }: - { virtualisation.writableStore = true; + { + config, + lib, + pkgs, + ... + }: + { + virtualisation.writableStore = true; nix.settings.substituters = lib.mkForce [ ]; nix.nixPath = [ "nixpkgs=${lib.cleanSource pkgs.path}" ]; - virtualisation.additionalPaths = [ pkgs.stdenvNoCC pkgs.pkgsi686Linux.stdenvNoCC ]; + virtualisation.additionalPaths = [ + pkgs.stdenvNoCC + pkgs.pkgsi686Linux.stdenvNoCC + ]; }; - testScript = { nodes }: '' - # fmt: off - start_all() - - # Copying to /tmp should succeed. - machine.succeed(r""" - nix-build --no-sandbox -E '(with import {}; runCommand "foo" {} " - mkdir -p $out - cp ${pkgs.coreutils}/bin/id /tmp/id - ")' - """.strip()) - - machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') - - machine.succeed("rm /tmp/id") - - # Creating a setuid binary should fail. - machine.fail(r""" - nix-build --no-sandbox -E '(with import {}; runCommand "foo" {} " - mkdir -p $out - cp ${pkgs.coreutils}/bin/id /tmp/id - chmod 4755 /tmp/id - ")' - """.strip()) - - machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') - - machine.succeed("rm /tmp/id") - - # Creating a setgid binary should fail. - machine.fail(r""" - nix-build --no-sandbox -E '(with import {}; runCommand "foo" {} " - mkdir -p $out - cp ${pkgs.coreutils}/bin/id /tmp/id - chmod 2755 /tmp/id - ")' - """.strip()) - - machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') - - machine.succeed("rm /tmp/id") - - # The checks should also work on 32-bit binaries. - machine.fail(r""" - nix-build --no-sandbox -E '(with import { system = "i686-linux"; }; runCommand "foo" {} " - mkdir -p $out - cp ${pkgs.coreutils}/bin/id /tmp/id - chmod 2755 /tmp/id - ")' - """.strip()) - - machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') - - machine.succeed("rm /tmp/id") - - # The tests above use fchmodat(). Test chmod() as well. - machine.succeed(r""" - nix-build --no-sandbox -E '(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " - mkdir -p $out - cp ${pkgs.coreutils}/bin/id /tmp/id - perl -e \"chmod 0666, qw(/tmp/id) or die\" - ")' - """.strip()) - - machine.succeed('[[ $(stat -c %a /tmp/id) = 666 ]]') - - machine.succeed("rm /tmp/id") - - machine.fail(r""" - nix-build --no-sandbox -E '(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " - mkdir -p $out - cp ${pkgs.coreutils}/bin/id /tmp/id - perl -e \"chmod 04755, qw(/tmp/id) or die\" - ")' - """.strip()) - - machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') - - machine.succeed("rm /tmp/id") - - # And test fchmod(). - machine.succeed(r""" - nix-build --no-sandbox -E '(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " - mkdir -p $out - cp ${pkgs.coreutils}/bin/id /tmp/id - perl -e \"my \\\$x; open \\\$x, qw(/tmp/id); chmod 01750, \\\$x or die\" - ")' - """.strip()) - - machine.succeed('[[ $(stat -c %a /tmp/id) = 1750 ]]') - - machine.succeed("rm /tmp/id") - - machine.fail(r""" - nix-build --no-sandbox -E '(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " - mkdir -p $out - cp ${pkgs.coreutils}/bin/id /tmp/id - perl -e \"my \\\$x; open \\\$x, qw(/tmp/id); chmod 04777, \\\$x or die\" - ")' - """.strip()) - - machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') - - machine.succeed("rm /tmp/id") - ''; + testScript = + { nodes }: + '' + # fmt: off + start_all() + + # Copying to /tmp should succeed. + machine.succeed(r""" + nix-build --no-sandbox -E '(with import {}; runCommand "foo" {} " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + ")' + """.strip()) + + machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') + + machine.succeed("rm /tmp/id") + + # Creating a setuid binary should fail. + machine.fail(r""" + nix-build --no-sandbox -E '(with import {}; runCommand "foo" {} " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + chmod 4755 /tmp/id + ")' + """.strip()) + + machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') + + machine.succeed("rm /tmp/id") + + # Creating a setgid binary should fail. + machine.fail(r""" + nix-build --no-sandbox -E '(with import {}; runCommand "foo" {} " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + chmod 2755 /tmp/id + ")' + """.strip()) + + machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') + + machine.succeed("rm /tmp/id") + + # The checks should also work on 32-bit binaries. + machine.fail(r""" + nix-build --no-sandbox -E '(with import { system = "i686-linux"; }; runCommand "foo" {} " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + chmod 2755 /tmp/id + ")' + """.strip()) + + machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') + + machine.succeed("rm /tmp/id") + + # The tests above use fchmodat(). Test chmod() as well. + machine.succeed(r""" + nix-build --no-sandbox -E '(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + perl -e \"chmod 0666, qw(/tmp/id) or die\" + ")' + """.strip()) + + machine.succeed('[[ $(stat -c %a /tmp/id) = 666 ]]') + + machine.succeed("rm /tmp/id") + + machine.fail(r""" + nix-build --no-sandbox -E '(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + perl -e \"chmod 04755, qw(/tmp/id) or die\" + ")' + """.strip()) + + machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') + + machine.succeed("rm /tmp/id") + + # And test fchmod(). + machine.succeed(r""" + nix-build --no-sandbox -E '(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + perl -e \"my \\\$x; open \\\$x, qw(/tmp/id); chmod 01750, \\\$x or die\" + ")' + """.strip()) + + machine.succeed('[[ $(stat -c %a /tmp/id) = 1750 ]]') + + machine.succeed("rm /tmp/id") + + machine.fail(r""" + nix-build --no-sandbox -E '(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + perl -e \"my \\\$x; open \\\$x, qw(/tmp/id); chmod 04777, \\\$x or die\" + ")' + """.strip()) + + machine.succeed('[[ $(stat -c %a /tmp/id) = 555 ]]') + + machine.succeed("rm /tmp/id") + ''; } diff --git a/tests/nixos/sourcehut-flakes.nix b/tests/nixos/sourcehut-flakes.nix index 2f469457aca..bb26b7ebbdc 100644 --- a/tests/nixos/sourcehut-flakes.nix +++ b/tests/nixos/sourcehut-flakes.nix @@ -1,22 +1,27 @@ -{ lib, config, hostPkgs, nixpkgs, ... }: +{ + lib, + config, + hostPkgs, + nixpkgs, + ... +}: let pkgs = config.nodes.sourcehut.nixpkgs.pkgs; # Generate a fake root CA and a fake git.sr.ht certificate. - cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; } - '' - mkdir -p $out + cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; } '' + mkdir -p $out - openssl genrsa -out ca.key 2048 - openssl req -new -x509 -days 36500 -key ca.key \ - -subj "/C=NL/ST=Denial/L=Springfield/O=Dis/CN=Root CA" -out $out/ca.crt + openssl genrsa -out ca.key 2048 + openssl req -new -x509 -days 36500 -key ca.key \ + -subj "/C=NL/ST=Denial/L=Springfield/O=Dis/CN=Root CA" -out $out/ca.crt - openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \ - -subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=git.sr.ht" -out server.csr - openssl x509 -req -extfile <(printf "subjectAltName=DNS:git.sr.ht") \ - -days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt - ''; + openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \ + -subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=git.sr.ht" -out server.csr + openssl x509 -req -extfile <(printf "subjectAltName=DNS:git.sr.ht") \ + -days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt + ''; registry = pkgs.writeTextFile { name = "registry"; @@ -41,80 +46,92 @@ let destination = "/flake-registry.json"; }; - nixpkgs-repo = pkgs.runCommand "nixpkgs-flake" { } - '' - dir=NixOS-nixpkgs-${nixpkgs.shortRev} - cp -prd ${nixpkgs} $dir + nixpkgs-repo = pkgs.runCommand "nixpkgs-flake" { } '' + dir=NixOS-nixpkgs-${nixpkgs.shortRev} + cp -prd ${nixpkgs} $dir - # Set the correct timestamp in the tarball. - find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- + # Set the correct timestamp in the tarball. + find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${ + builtins.substring 12 2 nixpkgs.lastModifiedDate + } -- - mkdir -p $out/archive - tar cfz $out/archive/${nixpkgs.rev}.tar.gz $dir --hard-dereference + mkdir -p $out/archive + tar cfz $out/archive/${nixpkgs.rev}.tar.gz $dir --hard-dereference - echo 'ref: refs/heads/master' > $out/HEAD + echo 'ref: refs/heads/master' > $out/HEAD - mkdir -p $out/info - echo -e '${nixpkgs.rev}\trefs/heads/master\n${nixpkgs.rev}\trefs/tags/foo-bar' > $out/info/refs - ''; + mkdir -p $out/info + echo -e '${nixpkgs.rev}\trefs/heads/master\n${nixpkgs.rev}\trefs/tags/foo-bar' > $out/info/refs + ''; in - { - name = "sourcehut-flakes"; +{ + name = "sourcehut-flakes"; - nodes = + nodes = { + # Impersonate git.sr.ht + sourcehut = + { config, pkgs, ... }: { - # Impersonate git.sr.ht - sourcehut = - { config, pkgs, ... }: - { - networking.firewall.allowedTCPPorts = [ 80 443 ]; - - services.httpd.enable = true; - services.httpd.adminAddr = "foo@example.org"; - services.httpd.extraConfig = '' - ErrorLog syslog:local6 - ''; - services.httpd.virtualHosts."git.sr.ht" = - { - forceSSL = true; - sslServerKey = "${cert}/server.key"; - sslServerCert = "${cert}/server.crt"; - servedDirs = - [ - { - urlPath = "/~NixOS/nixpkgs"; - dir = nixpkgs-repo; - } - { - urlPath = "/~NixOS/flake-registry/blob/master"; - dir = registry; - } - ]; - }; - }; - - client = - { config, lib, pkgs, nodes, ... }: - { - virtualisation.writableStore = true; - virtualisation.diskSize = 2048; - virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ]; - virtualisation.memorySize = 4096; - nix.settings.substituters = lib.mkForce [ ]; - nix.extraOptions = '' - experimental-features = nix-command flakes - flake-registry = https://git.sr.ht/~NixOS/flake-registry/blob/master/flake-registry.json - ''; - environment.systemPackages = [ pkgs.jq ]; - networking.hosts.${(builtins.head nodes.sourcehut.networking.interfaces.eth1.ipv4.addresses).address} = - [ "git.sr.ht" ]; - security.pki.certificateFiles = [ "${cert}/ca.crt" ]; - }; + networking.firewall.allowedTCPPorts = [ + 80 + 443 + ]; + + services.httpd.enable = true; + services.httpd.adminAddr = "foo@example.org"; + services.httpd.extraConfig = '' + ErrorLog syslog:local6 + ''; + services.httpd.virtualHosts."git.sr.ht" = { + forceSSL = true; + sslServerKey = "${cert}/server.key"; + sslServerCert = "${cert}/server.crt"; + servedDirs = [ + { + urlPath = "/~NixOS/nixpkgs"; + dir = nixpkgs-repo; + } + { + urlPath = "/~NixOS/flake-registry/blob/master"; + dir = registry; + } + ]; + }; }; - testScript = { nodes }: '' + client = + { + config, + lib, + pkgs, + nodes, + ... + }: + { + virtualisation.writableStore = true; + virtualisation.diskSize = 2048; + virtualisation.additionalPaths = [ + pkgs.hello + pkgs.fuse + ]; + virtualisation.memorySize = 4096; + nix.settings.substituters = lib.mkForce [ ]; + nix.extraOptions = '' + experimental-features = nix-command flakes + flake-registry = https://git.sr.ht/~NixOS/flake-registry/blob/master/flake-registry.json + ''; + environment.systemPackages = [ pkgs.jq ]; + networking.hosts.${(builtins.head nodes.sourcehut.networking.interfaces.eth1.ipv4.addresses).address} = + [ "git.sr.ht" ]; + security.pki.certificateFiles = [ "${cert}/ca.crt" ]; + }; + }; + + testScript = + { nodes }: + '' # fmt: off import json import time diff --git a/tests/nixos/tarball-flakes.nix b/tests/nixos/tarball-flakes.nix index 84cf377ec5b..7b3638b64b8 100644 --- a/tests/nixos/tarball-flakes.nix +++ b/tests/nixos/tarball-flakes.nix @@ -1,94 +1,106 @@ -{ lib, config, nixpkgs, ... }: +{ + lib, + config, + nixpkgs, + ... +}: let pkgs = config.nodes.machine.nixpkgs.pkgs; - root = pkgs.runCommand "nixpkgs-flake" {} - '' - mkdir -p $out/{stable,tags} - - set -x - dir=nixpkgs-${nixpkgs.shortRev} - cp -prd ${nixpkgs} $dir - # Set the correct timestamp in the tarball. - find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- - tar cfz $out/stable/${nixpkgs.rev}.tar.gz $dir --hard-dereference - - # Set the "Link" header on the redirect but not the final response to - # simulate an S3-like serving environment where the final host cannot set - # arbitrary headers. - cat >$out/tags/.htaccess <; rel=\"immutable\"" - EOF - ''; + root = pkgs.runCommand "nixpkgs-flake" { } '' + mkdir -p $out/{stable,tags} + + set -x + dir=nixpkgs-${nixpkgs.shortRev} + cp -prd ${nixpkgs} $dir + # Set the correct timestamp in the tarball. + find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${ + builtins.substring 12 2 nixpkgs.lastModifiedDate + } -- + tar cfz $out/stable/${nixpkgs.rev}.tar.gz $dir --hard-dereference + + # Set the "Link" header on the redirect but not the final response to + # simulate an S3-like serving environment where the final host cannot set + # arbitrary headers. + cat >$out/tags/.htaccess <; rel=\"immutable\"" + EOF + ''; in { name = "tarball-flakes"; - nodes = - { - machine = - { config, pkgs, ... }: - { networking.firewall.allowedTCPPorts = [ 80 ]; - - services.httpd.enable = true; - services.httpd.adminAddr = "foo@example.org"; - services.httpd.extraConfig = '' - ErrorLog syslog:local6 - ''; - services.httpd.virtualHosts."localhost" = - { servedDirs = - [ { urlPath = "/"; - dir = root; - } - ]; - }; - - virtualisation.writableStore = true; - virtualisation.diskSize = 2048; - virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ]; - virtualisation.memorySize = 4096; - nix.settings.substituters = lib.mkForce [ ]; - nix.extraOptions = "experimental-features = nix-command flakes"; + nodes = { + machine = + { config, pkgs, ... }: + { + networking.firewall.allowedTCPPorts = [ 80 ]; + + services.httpd.enable = true; + services.httpd.adminAddr = "foo@example.org"; + services.httpd.extraConfig = '' + ErrorLog syslog:local6 + ''; + services.httpd.virtualHosts."localhost" = { + servedDirs = [ + { + urlPath = "/"; + dir = root; + } + ]; }; - }; - testScript = { nodes }: '' - # fmt: off - import json + virtualisation.writableStore = true; + virtualisation.diskSize = 2048; + virtualisation.additionalPaths = [ + pkgs.hello + pkgs.fuse + ]; + virtualisation.memorySize = 4096; + nix.settings.substituters = lib.mkForce [ ]; + nix.extraOptions = "experimental-features = nix-command flakes"; + }; + }; + + testScript = + { nodes }: + '' + # fmt: off + import json - start_all() + start_all() - machine.wait_for_unit("httpd.service") + machine.wait_for_unit("httpd.service") - out = machine.succeed("nix flake metadata --json http://localhost/tags/latest.tar.gz") - print(out) - info = json.loads(out) + out = machine.succeed("nix flake metadata --json http://localhost/tags/latest.tar.gz") + print(out) + info = json.loads(out) - # Check that we got redirected to the immutable URL. - assert info["locked"]["url"] == "http://localhost/stable/${nixpkgs.rev}.tar.gz" + # Check that we got redirected to the immutable URL. + assert info["locked"]["url"] == "http://localhost/stable/${nixpkgs.rev}.tar.gz" - # Check that we got a fingerprint for caching. - assert info["fingerprint"] + # Check that we got a fingerprint for caching. + assert info["fingerprint"] - # Check that we got the rev and revCount attributes. - assert info["revision"] == "${nixpkgs.rev}" - assert info["revCount"] == 1234 + # Check that we got the rev and revCount attributes. + assert info["revision"] == "${nixpkgs.rev}" + assert info["revCount"] == 1234 - # Check that a 0-byte HTTP 304 "Not modified" result works. - machine.succeed("nix flake metadata --refresh --json http://localhost/tags/latest.tar.gz") + # Check that a 0-byte HTTP 304 "Not modified" result works. + machine.succeed("nix flake metadata --refresh --json http://localhost/tags/latest.tar.gz") - # Check that fetching with rev/revCount/narHash succeeds. - machine.succeed("nix flake metadata --json http://localhost/tags/latest.tar.gz?rev=" + info["revision"]) - machine.succeed("nix flake metadata --json http://localhost/tags/latest.tar.gz?revCount=" + str(info["revCount"])) - machine.succeed("nix flake metadata --json http://localhost/tags/latest.tar.gz?narHash=" + info["locked"]["narHash"]) + # Check that fetching with rev/revCount/narHash succeeds. + machine.succeed("nix flake metadata --json http://localhost/tags/latest.tar.gz?rev=" + info["revision"]) + machine.succeed("nix flake metadata --json http://localhost/tags/latest.tar.gz?revCount=" + str(info["revCount"])) + machine.succeed("nix flake metadata --json http://localhost/tags/latest.tar.gz?narHash=" + info["locked"]["narHash"]) - # Check that fetching fails if we provide incorrect attributes. - machine.fail("nix flake metadata --json http://localhost/tags/latest.tar.gz?rev=493300eb13ae6fb387fbd47bf54a85915acc31c0") - machine.fail("nix flake metadata --json http://localhost/tags/latest.tar.gz?revCount=789") - machine.fail("nix flake metadata --json http://localhost/tags/latest.tar.gz?narHash=sha256-tbudgBSg+bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw=") - ''; + # Check that fetching fails if we provide incorrect attributes. + machine.fail("nix flake metadata --json http://localhost/tags/latest.tar.gz?rev=493300eb13ae6fb387fbd47bf54a85915acc31c0") + machine.fail("nix flake metadata --json http://localhost/tags/latest.tar.gz?revCount=789") + machine.fail("nix flake metadata --json http://localhost/tags/latest.tar.gz?narHash=sha256-tbudgBSg+bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw=") + ''; } diff --git a/tests/nixos/user-sandboxing/default.nix b/tests/nixos/user-sandboxing/default.nix index 8a16f44e84d..028efd17f1c 100644 --- a/tests/nixos/user-sandboxing/default.nix +++ b/tests/nixos/user-sandboxing/default.nix @@ -3,12 +3,15 @@ let pkgs = config.nodes.machine.nixpkgs.pkgs; - attacker = pkgs.runCommandWith { - name = "attacker"; - stdenv = pkgs.pkgsStatic.stdenv; - } '' - $CC -static -o $out ${./attacker.c} - ''; + attacker = + pkgs.runCommandWith + { + name = "attacker"; + stdenv = pkgs.pkgsStatic.stdenv; + } + '' + $CC -static -o $out ${./attacker.c} + ''; try-open-build-dir = pkgs.writeScript "try-open-build-dir" '' export PATH=${pkgs.coreutils}/bin:$PATH @@ -55,75 +58,88 @@ in name = "sandbox-setuid-leak"; nodes.machine = - { config, lib, pkgs, ... }: - { virtualisation.writableStore = true; + { + config, + lib, + pkgs, + ... + }: + { + virtualisation.writableStore = true; nix.settings.substituters = lib.mkForce [ ]; nix.nrBuildUsers = 1; - virtualisation.additionalPaths = [ pkgs.busybox-sandbox-shell attacker try-open-build-dir create-hello-world pkgs.socat ]; + virtualisation.additionalPaths = [ + pkgs.busybox-sandbox-shell + attacker + try-open-build-dir + create-hello-world + pkgs.socat + ]; boot.kernelPackages = pkgs.linuxPackages_latest; users.users.alice = { isNormalUser = true; }; }; - testScript = { nodes }: '' - start_all() - - with subtest("A builder can't give access to its build directory"): - # Make sure that a builder can't change the permissions on its build - # directory to the point of opening it up to external users - - # A derivation whose builder tries to make its build directory as open - # as possible and wait for someone to hijack it - machine.succeed(r""" - nix-build -v -E ' - builtins.derivation { - name = "open-build-dir"; - system = builtins.currentSystem; - builder = "${pkgs.busybox-sandbox-shell}/bin/sh"; - args = [ (builtins.storePath "${try-open-build-dir}") ]; - }' >&2 & - """.strip()) - - # Wait for the build to be ready - # This is OK because it runs as root, so we can access everything - machine.wait_for_file("/tmp/nix-build-open-build-dir.drv-0/build/syncPoint") - - # But Alice shouldn't be able to access the build directory - machine.fail("su alice -c 'ls /tmp/nix-build-open-build-dir.drv-0/build'") - machine.fail("su alice -c 'touch /tmp/nix-build-open-build-dir.drv-0/build/bar'") - machine.fail("su alice -c 'cat /tmp/nix-build-open-build-dir.drv-0/build/foo'") - - # Tell the user to finish the build - machine.succeed("echo foo > /tmp/nix-build-open-build-dir.drv-0/build/syncPoint") - - with subtest("Being able to execute stuff as the build user doesn't give access to the build dir"): - machine.succeed(r""" - nix-build -E ' - builtins.derivation { - name = "innocent"; - system = builtins.currentSystem; - builder = "${pkgs.busybox-sandbox-shell}/bin/sh"; - args = [ (builtins.storePath "${create-hello-world}") ]; - }' >&2 & - """.strip()) - machine.wait_for_file("/tmp/nix-build-innocent.drv-0/build/syncPoint") - - # The build ran as `nixbld1` (which is the only build user on the - # machine), but a process running as `nixbld1` outside the sandbox - # shouldn't be able to touch the build directory regardless - machine.fail("su nixbld1 --shell ${pkgs.busybox-sandbox-shell}/bin/sh -c 'ls /tmp/nix-build-innocent.drv-0/build'") - machine.fail("su nixbld1 --shell ${pkgs.busybox-sandbox-shell}/bin/sh -c 'echo pwned > /tmp/nix-build-innocent.drv-0/build/result'") - - # Finish the build - machine.succeed("echo foo > /tmp/nix-build-innocent.drv-0/build/syncPoint") - - # Check that the build was not affected - machine.succeed(r""" - cat ./result - test "$(cat ./result)" = "hello, world" - """.strip()) - ''; + testScript = + { nodes }: + '' + start_all() + + with subtest("A builder can't give access to its build directory"): + # Make sure that a builder can't change the permissions on its build + # directory to the point of opening it up to external users + + # A derivation whose builder tries to make its build directory as open + # as possible and wait for someone to hijack it + machine.succeed(r""" + nix-build -v -E ' + builtins.derivation { + name = "open-build-dir"; + system = builtins.currentSystem; + builder = "${pkgs.busybox-sandbox-shell}/bin/sh"; + args = [ (builtins.storePath "${try-open-build-dir}") ]; + }' >&2 & + """.strip()) + + # Wait for the build to be ready + # This is OK because it runs as root, so we can access everything + machine.wait_for_file("/tmp/nix-build-open-build-dir.drv-0/build/syncPoint") + + # But Alice shouldn't be able to access the build directory + machine.fail("su alice -c 'ls /tmp/nix-build-open-build-dir.drv-0/build'") + machine.fail("su alice -c 'touch /tmp/nix-build-open-build-dir.drv-0/build/bar'") + machine.fail("su alice -c 'cat /tmp/nix-build-open-build-dir.drv-0/build/foo'") + + # Tell the user to finish the build + machine.succeed("echo foo > /tmp/nix-build-open-build-dir.drv-0/build/syncPoint") + + with subtest("Being able to execute stuff as the build user doesn't give access to the build dir"): + machine.succeed(r""" + nix-build -E ' + builtins.derivation { + name = "innocent"; + system = builtins.currentSystem; + builder = "${pkgs.busybox-sandbox-shell}/bin/sh"; + args = [ (builtins.storePath "${create-hello-world}") ]; + }' >&2 & + """.strip()) + machine.wait_for_file("/tmp/nix-build-innocent.drv-0/build/syncPoint") + + # The build ran as `nixbld1` (which is the only build user on the + # machine), but a process running as `nixbld1` outside the sandbox + # shouldn't be able to touch the build directory regardless + machine.fail("su nixbld1 --shell ${pkgs.busybox-sandbox-shell}/bin/sh -c 'ls /tmp/nix-build-innocent.drv-0/build'") + machine.fail("su nixbld1 --shell ${pkgs.busybox-sandbox-shell}/bin/sh -c 'echo pwned > /tmp/nix-build-innocent.drv-0/build/result'") + + # Finish the build + machine.succeed("echo foo > /tmp/nix-build-innocent.drv-0/build/syncPoint") + + # Check that the build was not affected + machine.succeed(r""" + cat ./result + test "$(cat ./result)" = "hello, world" + """.strip()) + ''; } - diff --git a/tests/repl-completion.nix b/tests/repl-completion.nix index 3ba198a9860..07406e969cd 100644 --- a/tests/repl-completion.nix +++ b/tests/repl-completion.nix @@ -1,40 +1,45 @@ -{ runCommand, nix, expect }: +{ + runCommand, + nix, + expect, +}: # We only use expect when necessary, e.g. for testing tab completion in nix repl. # See also tests/functional/repl.sh -runCommand "repl-completion" { - nativeBuildInputs = [ - expect - nix - ]; - expectScript = '' - # Regression https://github.com/NixOS/nix/pull/10778 - spawn nix repl --offline --extra-experimental-features nix-command - expect "nix-repl>" - send "foo = import ./does-not-exist.nix\n" - expect "nix-repl>" - send "foo.\t" - expect { - "nix-repl>" { - puts "Got another prompt. Good." +runCommand "repl-completion" + { + nativeBuildInputs = [ + expect + nix + ]; + expectScript = '' + # Regression https://github.com/NixOS/nix/pull/10778 + spawn nix repl --offline --extra-experimental-features nix-command + expect "nix-repl>" + send "foo = import ./does-not-exist.nix\n" + expect "nix-repl>" + send "foo.\t" + expect { + "nix-repl>" { + puts "Got another prompt. Good." + } + eof { + puts "Got EOF. Bad." + exit 1 + } } - eof { - puts "Got EOF. Bad." - exit 1 - } - } - exit 0 - ''; - passAsFile = [ "expectScript" ]; -} -'' - export NIX_STORE=$TMPDIR/store - export NIX_STATE_DIR=$TMPDIR/state - export HOME=$TMPDIR/home - mkdir $HOME + exit 0 + ''; + passAsFile = [ "expectScript" ]; + } + '' + export NIX_STORE=$TMPDIR/store + export NIX_STATE_DIR=$TMPDIR/state + export HOME=$TMPDIR/home + mkdir $HOME - nix-store --init - expect $expectScriptPath - touch $out -'' \ No newline at end of file + nix-store --init + expect $expectScriptPath + touch $out + '' From f629d81df094d296fbd6965d825a5085eb0affcc Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 24 Jan 2025 22:21:27 +0100 Subject: [PATCH 125/361] test: Fix shifted source positions after formatting --- ...putDependencies-multi-elem-context.err.exp | 10 +-- ...putDependencies-wrong-element-kind.err.exp | 10 +-- ...al-fail-assert-equal-attrs-names-2.err.exp | 4 +- ...eval-fail-assert-equal-attrs-names.err.exp | 4 +- ...ail-assert-equal-derivations-extra.err.exp | 18 ++--- ...eval-fail-assert-equal-derivations.err.exp | 18 ++--- ...-fail-assert-equal-function-direct.err.exp | 4 +- ...eval-fail-assert-equal-list-length.err.exp | 4 +- .../lang/eval-fail-assert-nested-bool.err.exp | 76 ++++++++---------- .../functional/lang/eval-fail-assert.err.exp | 36 ++++----- .../lang/eval-fail-attr-name-type.err.exp | 14 ++-- ...fail-attrset-merge-drops-later-rec.err.exp | 9 ++- ...al-fail-bad-string-interpolation-4.err.exp | 8 +- .../lang/eval-fail-derivation-name.err.exp | 16 ++-- .../lang/eval-fail-dup-dynamic-attrs.err.exp | 16 ++-- .../lang/eval-fail-duplicate-traces.err.exp | 52 ++++++------ ...-fail-fetchurl-baseName-attrs-name.err.exp | 4 +- ...ake-ref-to-string-negative-integer.err.exp | 18 +++-- ...-foldlStrict-strict-op-application.err.exp | 44 +++++----- .../lang/eval-fail-hashfile-missing.err.exp | 10 +-- tests/functional/lang/eval-fail-list.err.exp | 6 +- .../lang/eval-fail-missing-arg.err.exp | 13 +-- .../lang/eval-fail-mutual-recursion.err.exp | 80 +++++++++---------- .../lang/eval-fail-nested-list-items.err.exp | 10 +-- .../lang/eval-fail-not-throws.err.exp | 12 +-- .../lang/eval-fail-overflowing-add.err.exp | 10 +-- .../lang/eval-fail-overflowing-div.err.exp | 30 +++---- .../lang/eval-fail-overflowing-mul.err.exp | 20 ++--- .../lang/eval-fail-overflowing-sub.err.exp | 10 +-- .../lang/eval-fail-recursion.err.exp | 18 +++-- .../functional/lang/eval-fail-remove.err.exp | 16 ++-- .../functional/lang/eval-fail-scope-5.err.exp | 36 ++++----- .../lang/eval-fail-undeclared-arg.err.exp | 8 +- .../eval-fail-using-set-as-attr-name.err.exp | 20 ++--- .../repl/doc-comment-curried-args.expected | 6 +- .../repl/doc-comment-formals.expected | 3 +- .../repl/doc-comment-function.expected | 3 +- tests/functional/repl/doc-compact.expected | 3 +- tests/functional/repl/doc-constant.expected | 33 ++++---- tests/functional/repl/doc-floatedIn.expected | 3 +- tests/functional/repl/doc-functor.expected | 52 ++++++------ .../repl/doc-lambda-flavors.expected | 12 ++- .../functional/repl/doc-measurement.expected | 3 +- .../functional/repl/doc-unambiguous.expected | 3 +- 44 files changed, 400 insertions(+), 385 deletions(-) diff --git a/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp b/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp index 6828e03c8e7..56fbffa1942 100644 --- a/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp +++ b/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp @@ -1,9 +1,9 @@ error: … while calling the 'addDrvOutputDependencies' builtin - at /pwd/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix:18:4: - 17| - 18| in builtins.addDrvOutputDependencies combo-path - | ^ - 19| + at /pwd/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix:25:1: + 24| in + 25| builtins.addDrvOutputDependencies combo-path + | ^ + 26| error: context of string '/nix/store/pg9yqs4yd85yhdm3f4i5dyaqp5jahrsz-fail.drv/nix/store/2dxd5frb715z451vbf7s8birlf3argbk-fail-2.drv' must have exactly one element, but has 2 diff --git a/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp b/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp index 72b5e636897..d8399380eb4 100644 --- a/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp +++ b/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp @@ -1,9 +1,9 @@ error: … while calling the 'addDrvOutputDependencies' builtin - at /pwd/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix:9:4: - 8| - 9| in builtins.addDrvOutputDependencies drv.outPath - | ^ - 10| + at /pwd/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix:13:1: + 12| in + 13| builtins.addDrvOutputDependencies drv.outPath + | ^ + 14| error: `addDrvOutputDependencies` can only act on derivations, not on a derivation output such as 'out' diff --git a/tests/functional/lang/eval-fail-assert-equal-attrs-names-2.err.exp b/tests/functional/lang/eval-fail-assert-equal-attrs-names-2.err.exp index 4b68d97c20c..5912e6b8c30 100644 --- a/tests/functional/lang/eval-fail-assert-equal-attrs-names-2.err.exp +++ b/tests/functional/lang/eval-fail-assert-equal-attrs-names-2.err.exp @@ -1,8 +1,8 @@ error: … while evaluating the condition of the assertion '({ a = true; } == { a = true; b = true; })' at /pwd/lang/eval-fail-assert-equal-attrs-names-2.nix:1:1: - 1| assert { a = true; } == { a = true; b = true; }; + 1| assert | ^ - 2| throw "unreachable" + 2| { error: attribute names of attribute set '{ a = true; }' differs from attribute set '{ a = true; b = true; }' diff --git a/tests/functional/lang/eval-fail-assert-equal-attrs-names.err.exp b/tests/functional/lang/eval-fail-assert-equal-attrs-names.err.exp index bc61ca63a27..a93b26324cc 100644 --- a/tests/functional/lang/eval-fail-assert-equal-attrs-names.err.exp +++ b/tests/functional/lang/eval-fail-assert-equal-attrs-names.err.exp @@ -1,8 +1,8 @@ error: … while evaluating the condition of the assertion '({ a = true; b = true; } == { a = true; })' at /pwd/lang/eval-fail-assert-equal-attrs-names.nix:1:1: - 1| assert { a = true; b = true; } == { a = true; }; + 1| assert | ^ - 2| throw "unreachable" + 2| { error: attribute names of attribute set '{ a = true; b = true; }' differs from attribute set '{ a = true; }' diff --git a/tests/functional/lang/eval-fail-assert-equal-derivations-extra.err.exp b/tests/functional/lang/eval-fail-assert-equal-derivations-extra.err.exp index 7f49240747c..9ccf5e4dc10 100644 --- a/tests/functional/lang/eval-fail-assert-equal-derivations-extra.err.exp +++ b/tests/functional/lang/eval-fail-assert-equal-derivations-extra.err.exp @@ -3,23 +3,23 @@ error: at /pwd/lang/eval-fail-assert-equal-derivations-extra.nix:1:1: 1| assert | ^ - 2| { foo = { type = "derivation"; outPath = "/nix/store/0"; }; } + 2| { … while comparing attribute 'foo' … where left hand side is - at /pwd/lang/eval-fail-assert-equal-derivations-extra.nix:2:5: - 1| assert - 2| { foo = { type = "derivation"; outPath = "/nix/store/0"; }; } + at /pwd/lang/eval-fail-assert-equal-derivations-extra.nix:3:5: + 2| { + 3| foo = { | ^ - 3| == + 4| type = "derivation"; … where right hand side is - at /pwd/lang/eval-fail-assert-equal-derivations-extra.nix:4:5: - 3| == - 4| { foo = { type = "derivation"; outPath = "/nix/store/1"; devious = true; }; }; + at /pwd/lang/eval-fail-assert-equal-derivations-extra.nix:8:5: + 7| } == { + 8| foo = { | ^ - 5| throw "unreachable" + 9| type = "derivation"; … while comparing a derivation by its 'outPath' attribute diff --git a/tests/functional/lang/eval-fail-assert-equal-derivations.err.exp b/tests/functional/lang/eval-fail-assert-equal-derivations.err.exp index d7f0face077..2be1f48583c 100644 --- a/tests/functional/lang/eval-fail-assert-equal-derivations.err.exp +++ b/tests/functional/lang/eval-fail-assert-equal-derivations.err.exp @@ -3,23 +3,23 @@ error: at /pwd/lang/eval-fail-assert-equal-derivations.nix:1:1: 1| assert | ^ - 2| { foo = { type = "derivation"; outPath = "/nix/store/0"; ignored = abort "not ignored"; }; } + 2| { … while comparing attribute 'foo' … where left hand side is - at /pwd/lang/eval-fail-assert-equal-derivations.nix:2:5: - 1| assert - 2| { foo = { type = "derivation"; outPath = "/nix/store/0"; ignored = abort "not ignored"; }; } + at /pwd/lang/eval-fail-assert-equal-derivations.nix:3:5: + 2| { + 3| foo = { | ^ - 3| == + 4| type = "derivation"; … where right hand side is - at /pwd/lang/eval-fail-assert-equal-derivations.nix:4:5: - 3| == - 4| { foo = { type = "derivation"; outPath = "/nix/store/1"; ignored = abort "not ignored"; }; }; + at /pwd/lang/eval-fail-assert-equal-derivations.nix:9:5: + 8| } == { + 9| foo = { | ^ - 5| throw "unreachable" + 10| type = "derivation"; … while comparing a derivation by its 'outPath' attribute diff --git a/tests/functional/lang/eval-fail-assert-equal-function-direct.err.exp b/tests/functional/lang/eval-fail-assert-equal-function-direct.err.exp index f06d796981b..93c88a80cd4 100644 --- a/tests/functional/lang/eval-fail-assert-equal-function-direct.err.exp +++ b/tests/functional/lang/eval-fail-assert-equal-function-direct.err.exp @@ -2,8 +2,8 @@ error: … while evaluating the condition of the assertion '((x: x) == (x: x))' at /pwd/lang/eval-fail-assert-equal-function-direct.nix:3:1: 2| # This only compares a direct comparison and makes no claims about functions in nested structures. - 3| assert + 3| assert (x: x) == (x: x); | ^ - 4| (x: x) + 4| abort "unreachable" error: distinct functions and immediate comparisons of identical functions compare as unequal diff --git a/tests/functional/lang/eval-fail-assert-equal-list-length.err.exp b/tests/functional/lang/eval-fail-assert-equal-list-length.err.exp index 90108552cf0..e82f3787517 100644 --- a/tests/functional/lang/eval-fail-assert-equal-list-length.err.exp +++ b/tests/functional/lang/eval-fail-assert-equal-list-length.err.exp @@ -1,8 +1,8 @@ error: … while evaluating the condition of the assertion '([ (1) (0) ] == [ (10) ])' at /pwd/lang/eval-fail-assert-equal-list-length.nix:1:1: - 1| assert [ 1 0 ] == [ 10 ]; + 1| assert | ^ - 2| throw "unreachable" + 2| [ error: list of size '2' is not equal to list of size '1', left hand side is '[ 1 0 ]', right hand side is '[ 10 ]' diff --git a/tests/functional/lang/eval-fail-assert-nested-bool.err.exp b/tests/functional/lang/eval-fail-assert-nested-bool.err.exp index 1debb668c98..fdc0818200b 100644 --- a/tests/functional/lang/eval-fail-assert-nested-bool.err.exp +++ b/tests/functional/lang/eval-fail-assert-nested-bool.err.exp @@ -1,74 +1,66 @@ error: … while evaluating the condition of the assertion '({ a = { b = [ ({ c = { d = true; }; }) ]; }; } == { a = { b = [ ({ c = { d = false; }; }) ]; }; })' at /pwd/lang/eval-fail-assert-nested-bool.nix:1:1: - 1| assert + 1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; }; | ^ - 2| { a.b = [ { c.d = true; } ]; } + 2| … while comparing attribute 'a' … where left hand side is - at /pwd/lang/eval-fail-assert-nested-bool.nix:2:5: - 1| assert - 2| { a.b = [ { c.d = true; } ]; } - | ^ - 3| == + at /pwd/lang/eval-fail-assert-nested-bool.nix:1:10: + 1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; }; + | ^ + 2| … where right hand side is - at /pwd/lang/eval-fail-assert-nested-bool.nix:4:5: - 3| == - 4| { a.b = [ { c.d = false; } ]; }; - | ^ - 5| + at /pwd/lang/eval-fail-assert-nested-bool.nix:1:44: + 1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; }; + | ^ + 2| … while comparing attribute 'b' … where left hand side is - at /pwd/lang/eval-fail-assert-nested-bool.nix:2:5: - 1| assert - 2| { a.b = [ { c.d = true; } ]; } - | ^ - 3| == + at /pwd/lang/eval-fail-assert-nested-bool.nix:1:10: + 1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; }; + | ^ + 2| … where right hand side is - at /pwd/lang/eval-fail-assert-nested-bool.nix:4:5: - 3| == - 4| { a.b = [ { c.d = false; } ]; }; - | ^ - 5| + at /pwd/lang/eval-fail-assert-nested-bool.nix:1:44: + 1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; }; + | ^ + 2| … while comparing list element 0 … while comparing attribute 'c' … where left hand side is - at /pwd/lang/eval-fail-assert-nested-bool.nix:2:15: - 1| assert - 2| { a.b = [ { c.d = true; } ]; } - | ^ - 3| == + at /pwd/lang/eval-fail-assert-nested-bool.nix:1:20: + 1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; }; + | ^ + 2| … where right hand side is - at /pwd/lang/eval-fail-assert-nested-bool.nix:4:15: - 3| == - 4| { a.b = [ { c.d = false; } ]; }; - | ^ - 5| + at /pwd/lang/eval-fail-assert-nested-bool.nix:1:54: + 1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; }; + | ^ + 2| … while comparing attribute 'd' … where left hand side is - at /pwd/lang/eval-fail-assert-nested-bool.nix:2:15: - 1| assert - 2| { a.b = [ { c.d = true; } ]; } - | ^ - 3| == + at /pwd/lang/eval-fail-assert-nested-bool.nix:1:20: + 1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; }; + | ^ + 2| … where right hand side is - at /pwd/lang/eval-fail-assert-nested-bool.nix:4:15: - 3| == - 4| { a.b = [ { c.d = false; } ]; }; - | ^ - 5| + at /pwd/lang/eval-fail-assert-nested-bool.nix:1:54: + 1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; }; + | ^ + 2| error: boolean 'true' is not equal to boolean 'false' diff --git a/tests/functional/lang/eval-fail-assert.err.exp b/tests/functional/lang/eval-fail-assert.err.exp index 7be9e238797..5fffe79bf0d 100644 --- a/tests/functional/lang/eval-fail-assert.err.exp +++ b/tests/functional/lang/eval-fail-assert.err.exp @@ -1,30 +1,30 @@ error: … while evaluating the attribute 'body' - at /pwd/lang/eval-fail-assert.nix:4:3: - 3| - 4| body = x "x"; + at /pwd/lang/eval-fail-assert.nix:7:3: + 6| + 7| body = x "x"; | ^ - 5| } + 8| } … from call site - at /pwd/lang/eval-fail-assert.nix:4:10: - 3| - 4| body = x "x"; + at /pwd/lang/eval-fail-assert.nix:7:10: + 6| + 7| body = x "x"; | ^ - 5| } + 8| } … while calling 'x' - at /pwd/lang/eval-fail-assert.nix:2:7: - 1| let { - 2| x = arg: assert arg == "y"; 123; - | ^ - 3| + at /pwd/lang/eval-fail-assert.nix:3:5: + 2| x = + 3| arg: + | ^ + 4| assert arg == "y"; … while evaluating the condition of the assertion '(arg == "y")' - at /pwd/lang/eval-fail-assert.nix:2:12: - 1| let { - 2| x = arg: assert arg == "y"; 123; - | ^ - 3| + at /pwd/lang/eval-fail-assert.nix:4:5: + 3| arg: + 4| assert arg == "y"; + | ^ + 5| 123; error: string '"x"' is not equal to string '"y"' diff --git a/tests/functional/lang/eval-fail-attr-name-type.err.exp b/tests/functional/lang/eval-fail-attr-name-type.err.exp index 6848a35ed80..4ea209b130f 100644 --- a/tests/functional/lang/eval-fail-attr-name-type.err.exp +++ b/tests/functional/lang/eval-fail-attr-name-type.err.exp @@ -2,20 +2,20 @@ error: … while evaluating the attribute 'puppy."${key}"' at /pwd/lang/eval-fail-attr-name-type.nix:3:5: 2| attrs = { - 3| puppy.doggy = {}; + 3| puppy.doggy = { }; | ^ 4| }; … while evaluating an attribute name - at /pwd/lang/eval-fail-attr-name-type.nix:7:17: + at /pwd/lang/eval-fail-attr-name-type.nix:7:15: 6| in - 7| attrs.puppy.${key} - | ^ + 7| attrs.puppy.${key} + | ^ 8| error: expected a string but found an integer: 1 - at /pwd/lang/eval-fail-attr-name-type.nix:7:17: + at /pwd/lang/eval-fail-attr-name-type.nix:7:15: 6| in - 7| attrs.puppy.${key} - | ^ + 7| attrs.puppy.${key} + | ^ 8| diff --git a/tests/functional/lang/eval-fail-attrset-merge-drops-later-rec.err.exp b/tests/functional/lang/eval-fail-attrset-merge-drops-later-rec.err.exp index d1cdc7b769f..ba9185dce1c 100644 --- a/tests/functional/lang/eval-fail-attrset-merge-drops-later-rec.err.exp +++ b/tests/functional/lang/eval-fail-attrset-merge-drops-later-rec.err.exp @@ -1,5 +1,6 @@ error: undefined variable 'd' - at /pwd/lang/eval-fail-attrset-merge-drops-later-rec.nix:1:26: - 1| { a.b = 1; a = rec { c = d + 2; d = 3; }; }.c - | ^ - 2| + at /pwd/lang/eval-fail-attrset-merge-drops-later-rec.nix:4:9: + 3| a = rec { + 4| c = d + 2; + | ^ + 5| d = 3; diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp index b262e814dbc..ea5910072c3 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp @@ -1,9 +1,9 @@ error: … while evaluating a path segment - at /pwd/lang/eval-fail-bad-string-interpolation-4.nix:9:3: - 8| # The error message should not be too long. - 9| ''${pkgs}'' + at /pwd/lang/eval-fail-bad-string-interpolation-4.nix:19:3: + 18| # The error message should not be too long. + 19| ''${pkgs}'' | ^ - 10| + 20| error: cannot coerce a set to a string: { a = { a = { a = { a = "ha"; b = "ha"; c = "ha"; d = "ha"; e = "ha"; f = "ha"; g = "ha"; h = "ha"; j = "ha"; }; «8 attributes elided» }; «8 attributes elided» }; «8 attributes elided» } diff --git a/tests/functional/lang/eval-fail-derivation-name.err.exp b/tests/functional/lang/eval-fail-derivation-name.err.exp index 0ef98674d81..017326c3490 100644 --- a/tests/functional/lang/eval-fail-derivation-name.err.exp +++ b/tests/functional/lang/eval-fail-derivation-name.err.exp @@ -1,17 +1,17 @@ error: … while evaluating the attribute 'outPath' at ::: - | value = commonAttrs // { - | outPath = builtins.getAttr outputName strict; - | ^ - | drvPath = strict.drvPath; + | value = commonAttrs // { + | outPath = builtins.getAttr outputName strict; + | ^ + | drvPath = strict.drvPath; … while calling the 'getAttr' builtin at ::: - | value = commonAttrs // { - | outPath = builtins.getAttr outputName strict; - | ^ - | drvPath = strict.drvPath; + | value = commonAttrs // { + | outPath = builtins.getAttr outputName strict; + | ^ + | drvPath = strict.drvPath; … while calling the 'derivationStrict' builtin at ::: diff --git a/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp b/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp index 834f9c67bc4..4eafe945b74 100644 --- a/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp +++ b/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp @@ -2,13 +2,13 @@ error: … while evaluating the attribute 'set' at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:3: 1| { - 2| set = { "${"" + "b"}" = 1; }; + 2| set = { | ^ - 3| set = { "${"b" + ""}" = 2; }; + 3| "${"" + "b"}" = 1; - error: dynamic attribute 'b' already defined at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:11 - at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:3:11: - 2| set = { "${"" + "b"}" = 1; }; - 3| set = { "${"b" + ""}" = 2; }; - | ^ - 4| } + error: dynamic attribute 'b' already defined at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:3:5 + at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:6:5: + 5| set = { + 6| "${"b" + ""}" = 2; + | ^ + 7| }; diff --git a/tests/functional/lang/eval-fail-duplicate-traces.err.exp b/tests/functional/lang/eval-fail-duplicate-traces.err.exp index cedaebd3b58..e6ae60f3ca0 100644 --- a/tests/functional/lang/eval-fail-duplicate-traces.err.exp +++ b/tests/functional/lang/eval-fail-duplicate-traces.err.exp @@ -1,51 +1,51 @@ error: … from call site - at /pwd/lang/eval-fail-duplicate-traces.nix:9:3: - 8| in - 9| throwAfter 2 - | ^ - 10| + at /pwd/lang/eval-fail-duplicate-traces.nix:6:1: + 5| in + 6| throwAfter 2 + | ^ + 7| … while calling 'throwAfter' at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: 3| let - 4| throwAfter = n: + 4| throwAfter = n: if n > 0 then throwAfter (n - 1) else throw "Uh oh!"; | ^ - 5| if n > 0 + 5| in … from call site - at /pwd/lang/eval-fail-duplicate-traces.nix:6:10: - 5| if n > 0 - 6| then throwAfter (n - 1) - | ^ - 7| else throw "Uh oh!"; + at /pwd/lang/eval-fail-duplicate-traces.nix:4:33: + 3| let + 4| throwAfter = n: if n > 0 then throwAfter (n - 1) else throw "Uh oh!"; + | ^ + 5| in … while calling 'throwAfter' at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: 3| let - 4| throwAfter = n: + 4| throwAfter = n: if n > 0 then throwAfter (n - 1) else throw "Uh oh!"; | ^ - 5| if n > 0 + 5| in … from call site - at /pwd/lang/eval-fail-duplicate-traces.nix:6:10: - 5| if n > 0 - 6| then throwAfter (n - 1) - | ^ - 7| else throw "Uh oh!"; + at /pwd/lang/eval-fail-duplicate-traces.nix:4:33: + 3| let + 4| throwAfter = n: if n > 0 then throwAfter (n - 1) else throw "Uh oh!"; + | ^ + 5| in … while calling 'throwAfter' at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: 3| let - 4| throwAfter = n: + 4| throwAfter = n: if n > 0 then throwAfter (n - 1) else throw "Uh oh!"; | ^ - 5| if n > 0 + 5| in … while calling the 'throw' builtin - at /pwd/lang/eval-fail-duplicate-traces.nix:7:10: - 6| then throwAfter (n - 1) - 7| else throw "Uh oh!"; - | ^ - 8| in + at /pwd/lang/eval-fail-duplicate-traces.nix:4:57: + 3| let + 4| throwAfter = n: if n > 0 then throwAfter (n - 1) else throw "Uh oh!"; + | ^ + 5| in error: Uh oh! diff --git a/tests/functional/lang/eval-fail-fetchurl-baseName-attrs-name.err.exp b/tests/functional/lang/eval-fail-fetchurl-baseName-attrs-name.err.exp index 30f8b6a3544..2cac02f5875 100644 --- a/tests/functional/lang/eval-fail-fetchurl-baseName-attrs-name.err.exp +++ b/tests/functional/lang/eval-fail-fetchurl-baseName-attrs-name.err.exp @@ -1,8 +1,8 @@ error: … while calling the 'fetchurl' builtin at /pwd/lang/eval-fail-fetchurl-baseName-attrs-name.nix:1:1: - 1| builtins.fetchurl { url = "https://example.com/foo.tar.gz"; name = "~wobble~"; } + 1| builtins.fetchurl { | ^ - 2| + 2| url = "https://example.com/foo.tar.gz"; error: invalid store path name when fetching URL 'https://example.com/foo.tar.gz': name '~wobble~' contains illegal character '~'. Please change the value for the 'name' attribute passed to 'fetchurl', so that it can create a valid store path. diff --git a/tests/functional/lang/eval-fail-flake-ref-to-string-negative-integer.err.exp b/tests/functional/lang/eval-fail-flake-ref-to-string-negative-integer.err.exp index 25c8d7eaaa8..2b56939c621 100644 --- a/tests/functional/lang/eval-fail-flake-ref-to-string-negative-integer.err.exp +++ b/tests/functional/lang/eval-fail-flake-ref-to-string-negative-integer.err.exp @@ -1,14 +1,16 @@ error: … while calling the 'seq' builtin - at /pwd/lang/eval-fail-flake-ref-to-string-negative-integer.nix:1:16: - 1| let n = -1; in builtins.seq n (builtins.flakeRefToString { - | ^ - 2| type = "github"; + at /pwd/lang/eval-fail-flake-ref-to-string-negative-integer.nix:4:1: + 3| in + 4| builtins.seq n ( + | ^ + 5| builtins.flakeRefToString { … while calling the 'flakeRefToString' builtin - at /pwd/lang/eval-fail-flake-ref-to-string-negative-integer.nix:1:32: - 1| let n = -1; in builtins.seq n (builtins.flakeRefToString { - | ^ - 2| type = "github"; + at /pwd/lang/eval-fail-flake-ref-to-string-negative-integer.nix:5:3: + 4| builtins.seq n ( + 5| builtins.flakeRefToString { + | ^ + 6| type = "github"; error: negative value given for flake ref attr repo: -1 diff --git a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp index 4903bc82d54..bb02ecdcb8f 100644 --- a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp +++ b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp @@ -2,36 +2,36 @@ error: … while calling the 'foldl'' builtin at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:2:1: 1| # Tests that the result of applying op is forced even if the value is never used - 2| builtins.foldl' + 2| builtins.foldl' (_: f: f null) null [ | ^ - 3| (_: f: f null) + 3| (_: throw "Not the final value, but is still forced!") … while calling anonymous lambda - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:7: - 2| builtins.foldl' - 3| (_: f: f null) - | ^ - 4| null + at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:2:21: + 1| # Tests that the result of applying op is forced even if the value is never used + 2| builtins.foldl' (_: f: f null) null [ + | ^ + 3| (_: throw "Not the final value, but is still forced!") … from call site - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:10: - 2| builtins.foldl' - 3| (_: f: f null) - | ^ - 4| null + at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:2:24: + 1| # Tests that the result of applying op is forced even if the value is never used + 2| builtins.foldl' (_: f: f null) null [ + | ^ + 3| (_: throw "Not the final value, but is still forced!") … while calling anonymous lambda - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:5:6: - 4| null - 5| [ (_: throw "Not the final value, but is still forced!") (_: 23) ] - | ^ - 6| + at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:4: + 2| builtins.foldl' (_: f: f null) null [ + 3| (_: throw "Not the final value, but is still forced!") + | ^ + 4| (_: 23) … while calling the 'throw' builtin - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:5:9: - 4| null - 5| [ (_: throw "Not the final value, but is still forced!") (_: 23) ] - | ^ - 6| + at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:7: + 2| builtins.foldl' (_: f: f null) null [ + 3| (_: throw "Not the final value, but is still forced!") + | ^ + 4| (_: 23) error: Not the final value, but is still forced! diff --git a/tests/functional/lang/eval-fail-hashfile-missing.err.exp b/tests/functional/lang/eval-fail-hashfile-missing.err.exp index 1e465392744..0d3747a6d57 100644 --- a/tests/functional/lang/eval-fail-hashfile-missing.err.exp +++ b/tests/functional/lang/eval-fail-hashfile-missing.err.exp @@ -1,10 +1,10 @@ error: … while calling the 'toString' builtin - at /pwd/lang/eval-fail-hashfile-missing.nix:4:3: - 3| in - 4| toString (builtins.concatLists (map (hash: map (builtins.hashFile hash) paths) ["md5" "sha1" "sha256" "sha512"])) - | ^ - 5| + at /pwd/lang/eval-fail-hashfile-missing.nix:7:1: + 6| in + 7| toString ( + | ^ + 8| builtins.concatLists ( … while evaluating the first argument passed to builtins.toString diff --git a/tests/functional/lang/eval-fail-list.err.exp b/tests/functional/lang/eval-fail-list.err.exp index d492f8bd2e4..8b21e9a3715 100644 --- a/tests/functional/lang/eval-fail-list.err.exp +++ b/tests/functional/lang/eval-fail-list.err.exp @@ -1,8 +1,8 @@ error: … while evaluating one of the elements to concatenate - at /pwd/lang/eval-fail-list.nix:1:2: - 1| 8++1 - | ^ + at /pwd/lang/eval-fail-list.nix:1:3: + 1| 8 ++ 1 + | ^ 2| error: expected a list but found an integer: 8 diff --git a/tests/functional/lang/eval-fail-missing-arg.err.exp b/tests/functional/lang/eval-fail-missing-arg.err.exp index 3b162fe1b60..d5a66d2c5ea 100644 --- a/tests/functional/lang/eval-fail-missing-arg.err.exp +++ b/tests/functional/lang/eval-fail-missing-arg.err.exp @@ -1,12 +1,13 @@ error: … from call site at /pwd/lang/eval-fail-missing-arg.nix:1:1: - 1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";} + 1| ( | ^ - 2| + 2| { error: function 'anonymous lambda' called without required argument 'y' - at /pwd/lang/eval-fail-missing-arg.nix:1:2: - 1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";} - | ^ - 2| + at /pwd/lang/eval-fail-missing-arg.nix:2:3: + 1| ( + 2| { + | ^ + 3| x, diff --git a/tests/functional/lang/eval-fail-mutual-recursion.err.exp b/tests/functional/lang/eval-fail-mutual-recursion.err.exp index c034afcd5e0..9d84aa43f0f 100644 --- a/tests/functional/lang/eval-fail-mutual-recursion.err.exp +++ b/tests/functional/lang/eval-fail-mutual-recursion.err.exp @@ -1,64 +1,64 @@ error: … from call site - at /pwd/lang/eval-fail-mutual-recursion.nix:36:3: - 35| in - 36| throwAfterA true 10 - | ^ - 37| + at /pwd/lang/eval-fail-mutual-recursion.nix:40:1: + 39| in + 40| throwAfterA true 10 + | ^ + 41| … while calling 'throwAfterA' - at /pwd/lang/eval-fail-mutual-recursion.nix:29:26: - 28| - 29| throwAfterA = recurse: n: - | ^ - 30| if n > 0 + at /pwd/lang/eval-fail-mutual-recursion.nix:32:14: + 31| throwAfterA = + 32| recurse: n: + | ^ + 33| if n > 0 then … from call site - at /pwd/lang/eval-fail-mutual-recursion.nix:31:10: - 30| if n > 0 - 31| then throwAfterA recurse (n - 1) - | ^ - 32| else if recurse + at /pwd/lang/eval-fail-mutual-recursion.nix:34:7: + 33| if n > 0 then + 34| throwAfterA recurse (n - 1) + | ^ + 35| else if recurse then (19 duplicate frames omitted) … from call site - at /pwd/lang/eval-fail-mutual-recursion.nix:33:10: - 32| else if recurse - 33| then throwAfterB true 10 - | ^ - 34| else throw "Uh oh!"; + at /pwd/lang/eval-fail-mutual-recursion.nix:36:7: + 35| else if recurse then + 36| throwAfterB true 10 + | ^ + 37| else … while calling 'throwAfterB' - at /pwd/lang/eval-fail-mutual-recursion.nix:22:26: - 21| let - 22| throwAfterB = recurse: n: - | ^ - 23| if n > 0 + at /pwd/lang/eval-fail-mutual-recursion.nix:23:14: + 22| throwAfterB = + 23| recurse: n: + | ^ + 24| if n > 0 then … from call site - at /pwd/lang/eval-fail-mutual-recursion.nix:24:10: - 23| if n > 0 - 24| then throwAfterB recurse (n - 1) - | ^ - 25| else if recurse + at /pwd/lang/eval-fail-mutual-recursion.nix:25:7: + 24| if n > 0 then + 25| throwAfterB recurse (n - 1) + | ^ + 26| else if recurse then (19 duplicate frames omitted) … from call site - at /pwd/lang/eval-fail-mutual-recursion.nix:26:10: - 25| else if recurse - 26| then throwAfterA false 10 - | ^ - 27| else throw "Uh oh!"; + at /pwd/lang/eval-fail-mutual-recursion.nix:27:7: + 26| else if recurse then + 27| throwAfterA false 10 + | ^ + 28| else (21 duplicate frames omitted) … while calling the 'throw' builtin - at /pwd/lang/eval-fail-mutual-recursion.nix:34:10: - 33| then throwAfterB true 10 - 34| else throw "Uh oh!"; - | ^ - 35| in + at /pwd/lang/eval-fail-mutual-recursion.nix:38:7: + 37| else + 38| throw "Uh oh!"; + | ^ + 39| in error: Uh oh! diff --git a/tests/functional/lang/eval-fail-nested-list-items.err.exp b/tests/functional/lang/eval-fail-nested-list-items.err.exp index 90d43906165..1169b8326ca 100644 --- a/tests/functional/lang/eval-fail-nested-list-items.err.exp +++ b/tests/functional/lang/eval-fail-nested-list-items.err.exp @@ -1,9 +1,9 @@ error: … while evaluating a path segment - at /pwd/lang/eval-fail-nested-list-items.nix:11:6: - 10| - 11| "" + (let v = [ [ 1 2 3 4 5 6 7 8 ] [1 2 3 4]]; in builtins.deepSeq v v) - | ^ - 12| + at /pwd/lang/eval-fail-nested-list-items.nix:12:3: + 11| "" + 12| + ( + | ^ + 13| let error: cannot coerce a list to a string: [ [ 1 2 3 4 5 6 7 8 ] [ 1 «3 items elided» ] ] diff --git a/tests/functional/lang/eval-fail-not-throws.err.exp b/tests/functional/lang/eval-fail-not-throws.err.exp index fc81f7277e1..b49ed7b0048 100644 --- a/tests/functional/lang/eval-fail-not-throws.err.exp +++ b/tests/functional/lang/eval-fail-not-throws.err.exp @@ -1,14 +1,14 @@ error: … in the argument of the not operator - at /pwd/lang/eval-fail-not-throws.nix:1:4: - 1| ! (throw "uh oh!") - | ^ + at /pwd/lang/eval-fail-not-throws.nix:1:3: + 1| !(throw "uh oh!") + | ^ 2| … while calling the 'throw' builtin - at /pwd/lang/eval-fail-not-throws.nix:1:4: - 1| ! (throw "uh oh!") - | ^ + at /pwd/lang/eval-fail-not-throws.nix:1:3: + 1| !(throw "uh oh!") + | ^ 2| error: uh oh! diff --git a/tests/functional/lang/eval-fail-overflowing-add.err.exp b/tests/functional/lang/eval-fail-overflowing-add.err.exp index 6458cf1c933..5a77e9c9d97 100644 --- a/tests/functional/lang/eval-fail-overflowing-add.err.exp +++ b/tests/functional/lang/eval-fail-overflowing-add.err.exp @@ -1,6 +1,6 @@ error: integer overflow in adding 9223372036854775807 + 1 - at /pwd/lang/eval-fail-overflowing-add.nix:4:8: - 3| b = 1; - 4| in a + b - | ^ - 5| + at /pwd/lang/eval-fail-overflowing-add.nix:5:5: + 4| in + 5| a + b + | ^ + 6| diff --git a/tests/functional/lang/eval-fail-overflowing-div.err.exp b/tests/functional/lang/eval-fail-overflowing-div.err.exp index 8ce07d4d662..812c6056b76 100644 --- a/tests/functional/lang/eval-fail-overflowing-div.err.exp +++ b/tests/functional/lang/eval-fail-overflowing-div.err.exp @@ -1,23 +1,23 @@ error: … while calling the 'seq' builtin - at /pwd/lang/eval-fail-overflowing-div.nix:7:4: - 6| b = -1; - 7| in builtins.seq intMin (builtins.seq b (intMin / b)) - | ^ - 8| + at /pwd/lang/eval-fail-overflowing-div.nix:8:1: + 7| in + 8| builtins.seq intMin (builtins.seq b (intMin / b)) + | ^ + 9| … while calling the 'seq' builtin - at /pwd/lang/eval-fail-overflowing-div.nix:7:25: - 6| b = -1; - 7| in builtins.seq intMin (builtins.seq b (intMin / b)) - | ^ - 8| + at /pwd/lang/eval-fail-overflowing-div.nix:8:22: + 7| in + 8| builtins.seq intMin (builtins.seq b (intMin / b)) + | ^ + 9| … while calling the 'div' builtin - at /pwd/lang/eval-fail-overflowing-div.nix:7:48: - 6| b = -1; - 7| in builtins.seq intMin (builtins.seq b (intMin / b)) - | ^ - 8| + at /pwd/lang/eval-fail-overflowing-div.nix:8:45: + 7| in + 8| builtins.seq intMin (builtins.seq b (intMin / b)) + | ^ + 9| error: integer overflow in dividing -9223372036854775808 / -1 diff --git a/tests/functional/lang/eval-fail-overflowing-mul.err.exp b/tests/functional/lang/eval-fail-overflowing-mul.err.exp index f42b39d4db9..aaae4b7bd86 100644 --- a/tests/functional/lang/eval-fail-overflowing-mul.err.exp +++ b/tests/functional/lang/eval-fail-overflowing-mul.err.exp @@ -1,16 +1,16 @@ error: … while calling the 'mul' builtin - at /pwd/lang/eval-fail-overflowing-mul.nix:3:10: - 2| a = 4294967297; - 3| in a * a * a - | ^ - 4| + at /pwd/lang/eval-fail-overflowing-mul.nix:4:7: + 3| in + 4| a * a * a + | ^ + 5| … while calling the 'mul' builtin - at /pwd/lang/eval-fail-overflowing-mul.nix:3:6: - 2| a = 4294967297; - 3| in a * a * a - | ^ - 4| + at /pwd/lang/eval-fail-overflowing-mul.nix:4:3: + 3| in + 4| a * a * a + | ^ + 5| error: integer overflow in multiplying 4294967297 * 4294967297 diff --git a/tests/functional/lang/eval-fail-overflowing-sub.err.exp b/tests/functional/lang/eval-fail-overflowing-sub.err.exp index 66a3a03f885..5904c8dcc9d 100644 --- a/tests/functional/lang/eval-fail-overflowing-sub.err.exp +++ b/tests/functional/lang/eval-fail-overflowing-sub.err.exp @@ -1,9 +1,9 @@ error: … while calling the 'sub' builtin - at /pwd/lang/eval-fail-overflowing-sub.nix:4:6: - 3| b = 2; - 4| in a - b - | ^ - 5| + at /pwd/lang/eval-fail-overflowing-sub.nix:5:3: + 4| in + 5| a - b + | ^ + 6| error: integer overflow in subtracting -9223372036854775807 - 2 diff --git a/tests/functional/lang/eval-fail-recursion.err.exp b/tests/functional/lang/eval-fail-recursion.err.exp index 19380dc6536..8bfb4e12e47 100644 --- a/tests/functional/lang/eval-fail-recursion.err.exp +++ b/tests/functional/lang/eval-fail-recursion.err.exp @@ -1,12 +1,14 @@ error: … in the right operand of the update (//) operator - at /pwd/lang/eval-fail-recursion.nix:1:12: - 1| let a = {} // a; in a.foo - | ^ - 2| + at /pwd/lang/eval-fail-recursion.nix:2:11: + 1| let + 2| a = { } // a; + | ^ + 3| in error: infinite recursion encountered - at /pwd/lang/eval-fail-recursion.nix:1:15: - 1| let a = {} // a; in a.foo - | ^ - 2| + at /pwd/lang/eval-fail-recursion.nix:2:14: + 1| let + 2| a = { } // a; + | ^ + 3| in diff --git a/tests/functional/lang/eval-fail-remove.err.exp b/tests/functional/lang/eval-fail-remove.err.exp index 292b3c3f33a..0e087688a25 100644 --- a/tests/functional/lang/eval-fail-remove.err.exp +++ b/tests/functional/lang/eval-fail-remove.err.exp @@ -1,15 +1,15 @@ error: … while evaluating the attribute 'body' - at /pwd/lang/eval-fail-remove.nix:4:3: - 3| - 4| body = (removeAttrs attrs ["x"]).x; + at /pwd/lang/eval-fail-remove.nix:7:3: + 6| + 7| body = (removeAttrs attrs [ "x" ]).x; | ^ - 5| } + 8| } error: attribute 'x' missing - at /pwd/lang/eval-fail-remove.nix:4:10: - 3| - 4| body = (removeAttrs attrs ["x"]).x; + at /pwd/lang/eval-fail-remove.nix:7:10: + 6| + 7| body = (removeAttrs attrs [ "x" ]).x; | ^ - 5| } + 8| } Did you mean y? diff --git a/tests/functional/lang/eval-fail-scope-5.err.exp b/tests/functional/lang/eval-fail-scope-5.err.exp index b0b05cad737..6edc85f4f16 100644 --- a/tests/functional/lang/eval-fail-scope-5.err.exp +++ b/tests/functional/lang/eval-fail-scope-5.err.exp @@ -1,28 +1,28 @@ error: … while evaluating the attribute 'body' - at /pwd/lang/eval-fail-scope-5.nix:8:3: - 7| - 8| body = f {}; + at /pwd/lang/eval-fail-scope-5.nix:13:3: + 12| + 13| body = f { }; | ^ - 9| + 14| … from call site - at /pwd/lang/eval-fail-scope-5.nix:8:10: - 7| - 8| body = f {}; + at /pwd/lang/eval-fail-scope-5.nix:13:10: + 12| + 13| body = f { }; | ^ - 9| + 14| … while calling 'f' - at /pwd/lang/eval-fail-scope-5.nix:6:7: - 5| - 6| f = {x ? y, y ? x}: x + y; - | ^ - 7| + at /pwd/lang/eval-fail-scope-5.nix:7:5: + 6| f = + 7| { + | ^ + 8| x ? y, error: infinite recursion encountered - at /pwd/lang/eval-fail-scope-5.nix:6:12: - 5| - 6| f = {x ? y, y ? x}: x + y; - | ^ - 7| + at /pwd/lang/eval-fail-scope-5.nix:8:11: + 7| { + 8| x ? y, + | ^ + 9| y ? x, diff --git a/tests/functional/lang/eval-fail-undeclared-arg.err.exp b/tests/functional/lang/eval-fail-undeclared-arg.err.exp index 6e13a138eb7..353894d01e6 100644 --- a/tests/functional/lang/eval-fail-undeclared-arg.err.exp +++ b/tests/functional/lang/eval-fail-undeclared-arg.err.exp @@ -1,13 +1,13 @@ error: … from call site at /pwd/lang/eval-fail-undeclared-arg.nix:1:1: - 1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";} + 1| ({ x, z }: x + z) { | ^ - 2| + 2| x = "foo"; error: function 'anonymous lambda' called with unexpected argument 'y' at /pwd/lang/eval-fail-undeclared-arg.nix:1:2: - 1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";} + 1| ({ x, z }: x + z) { | ^ - 2| + 2| x = "foo"; Did you mean one of x or z? diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp index 4326c965008..9a59f37f35e 100644 --- a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp @@ -1,14 +1,14 @@ error: … while evaluating an attribute name - at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10: - 4| in - 5| attr.${key} - | ^ - 6| + at /pwd/lang/eval-fail-using-set-as-attr-name.nix:7:8: + 6| in + 7| attr.${key} + | ^ + 8| error: expected a string but found a set: { } - at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10: - 4| in - 5| attr.${key} - | ^ - 6| + at /pwd/lang/eval-fail-using-set-as-attr-name.nix:7:8: + 6| in + 7| attr.${key} + | ^ + 8| diff --git a/tests/functional/repl/doc-comment-curried-args.expected b/tests/functional/repl/doc-comment-curried-args.expected index 56607e911e8..d2a5bf32853 100644 --- a/tests/functional/repl/doc-comment-curried-args.expected +++ b/tests/functional/repl/doc-comment-curried-args.expected @@ -6,7 +6,8 @@ Added variables. nix-repl> :doc curriedArgs Function `curriedArgs`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:48:5 + … defined at /path/to/tests/functional/repl/doc-comments.nix:87:5 + A documented function. @@ -17,7 +18,8 @@ nix-repl> "Note that users may not expect this to behave as it currently does" nix-repl> :doc x Function `curriedArgs`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:50:5 + … defined at /path/to/tests/functional/repl/doc-comments.nix:91:5 + The function returned by applying once diff --git a/tests/functional/repl/doc-comment-formals.expected b/tests/functional/repl/doc-comment-formals.expected index 1024919f4b9..357cf998680 100644 --- a/tests/functional/repl/doc-comment-formals.expected +++ b/tests/functional/repl/doc-comment-formals.expected @@ -9,6 +9,7 @@ nix-repl> "Note that this is not yet complete" nix-repl> :doc documentedFormals Function `documentedFormals`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:57:5 + … defined at /path/to/tests/functional/repl/doc-comments.nix:104:5 + Finds x diff --git a/tests/functional/repl/doc-comment-function.expected b/tests/functional/repl/doc-comment-function.expected index 3889c4f7860..030cfc3265a 100644 --- a/tests/functional/repl/doc-comment-function.expected +++ b/tests/functional/repl/doc-comment-function.expected @@ -2,6 +2,7 @@ Nix Type :? for help. nix-repl> :doc import ./doc-comment-function.nix -Function defined at /path/to/tests/functional/repl/doc-comment-function.nix:2:1 +Function defined at /path/to/tests/functional/repl/doc-comment-function.nix:4:1 + A doc comment for a file that only contains a function diff --git a/tests/functional/repl/doc-compact.expected b/tests/functional/repl/doc-compact.expected index 79f1fd44f59..276de2e60b5 100644 --- a/tests/functional/repl/doc-compact.expected +++ b/tests/functional/repl/doc-compact.expected @@ -6,6 +6,7 @@ Added variables. nix-repl> :doc compact Function `compact`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:18:20 + … defined at /path/to/tests/functional/repl/doc-comments.nix:27:5 + boom diff --git a/tests/functional/repl/doc-constant.expected b/tests/functional/repl/doc-constant.expected index 5787e04dc19..a68188b25ab 100644 --- a/tests/functional/repl/doc-constant.expected +++ b/tests/functional/repl/doc-constant.expected @@ -10,25 +10,27 @@ error: value does not have documentation nix-repl> :doc lib.version Attribute `version` - … defined at /path/to/tests/functional/repl/doc-comments.nix:30:3 + … defined at /path/to/tests/functional/repl/doc-comments.nix:47:3 + Immovably fixed. nix-repl> :doc lib.attr.empty Attribute `empty` - … defined at /path/to/tests/functional/repl/doc-comments.nix:33:3 + … defined at /path/to/tests/functional/repl/doc-comments.nix:52:3 + Unchangeably constant. nix-repl> :doc lib.attr.undocument error: … while evaluating the attribute 'attr.undocument' - at /path/to/tests/functional/repl/doc-comments.nix:33:3: - 32| /** Unchangeably constant. */ - 33| lib.attr.empty = { }; + at /path/to/tests/functional/repl/doc-comments.nix:52:3: + 51| */ + 52| lib.attr.empty = { }; | ^ - 34| + 53| error: attribute 'undocument' missing at «string»:1:1: @@ -39,28 +41,31 @@ error: nix-repl> :doc (import ./doc-comments.nix).constant Attribute `constant` - … defined at /path/to/tests/functional/repl/doc-comments.nix:27:3 + … defined at /path/to/tests/functional/repl/doc-comments.nix:42:3 + Firmly rigid. nix-repl> :doc (import ./doc-comments.nix).lib.version Attribute `version` - … defined at /path/to/tests/functional/repl/doc-comments.nix:30:3 + … defined at /path/to/tests/functional/repl/doc-comments.nix:47:3 + Immovably fixed. nix-repl> :doc (import ./doc-comments.nix).lib.attr.empty Attribute `empty` - … defined at /path/to/tests/functional/repl/doc-comments.nix:33:3 + … defined at /path/to/tests/functional/repl/doc-comments.nix:52:3 + Unchangeably constant. nix-repl> :doc (import ./doc-comments.nix).lib.attr.undocumented Attribute `undocumented` - … defined at /path/to/tests/functional/repl/doc-comments.nix:35:3 + … defined at /path/to/tests/functional/repl/doc-comments.nix:54:3 No documentation found. @@ -97,11 +102,11 @@ error: attribute 'missing' missing nix-repl> :doc lib.attr.undocumental error: … while evaluating the attribute 'attr.undocumental' - at /path/to/tests/functional/repl/doc-comments.nix:33:3: - 32| /** Unchangeably constant. */ - 33| lib.attr.empty = { }; + at /path/to/tests/functional/repl/doc-comments.nix:52:3: + 51| */ + 52| lib.attr.empty = { }; | ^ - 34| + 53| error: attribute 'undocumental' missing at «string»:1:1: diff --git a/tests/functional/repl/doc-floatedIn.expected b/tests/functional/repl/doc-floatedIn.expected index 82bb80b9501..3bf1c40715b 100644 --- a/tests/functional/repl/doc-floatedIn.expected +++ b/tests/functional/repl/doc-floatedIn.expected @@ -6,6 +6,7 @@ Added variables. nix-repl> :doc floatedIn Function `floatedIn`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:16:5 + … defined at /path/to/tests/functional/repl/doc-comments.nix:21:5 + This also works. diff --git a/tests/functional/repl/doc-functor.expected b/tests/functional/repl/doc-functor.expected index 8cb2706ef0f..503fb807368 100644 --- a/tests/functional/repl/doc-functor.expected +++ b/tests/functional/repl/doc-functor.expected @@ -20,7 +20,7 @@ Look, it's just like a function! nix-repl> :doc recursive Function `__functor`\ - … defined at /path/to/tests/functional/repl/doc-functor.nix:77:23 + … defined at /path/to/tests/functional/repl/doc-functor.nix:82:23 This looks bad, but the docs are ok because of the eta expansion. @@ -30,27 +30,27 @@ error: … while partially calling '__functor' to retrieve documentation … while calling '__functor' - at /path/to/tests/functional/repl/doc-functor.nix:85:17: - 84| */ - 85| __functor = self: self.__functor self; + at /path/to/tests/functional/repl/doc-functor.nix:90:17: + 89| */ + 90| __functor = self: self.__functor self; | ^ - 86| }; + 91| }; … from call site - at /path/to/tests/functional/repl/doc-functor.nix:85:23: - 84| */ - 85| __functor = self: self.__functor self; + at /path/to/tests/functional/repl/doc-functor.nix:90:23: + 89| */ + 90| __functor = self: self.__functor self; | ^ - 86| }; + 91| }; (19999 duplicate frames omitted) error: stack overflow; max-call-depth exceeded - at /path/to/tests/functional/repl/doc-functor.nix:85:23: - 84| */ - 85| __functor = self: self.__functor self; + at /path/to/tests/functional/repl/doc-functor.nix:90:23: + 89| */ + 90| __functor = self: self.__functor self; | ^ - 86| }; + 91| }; nix-repl> :doc diverging error: @@ -59,18 +59,18 @@ error: (10000 duplicate frames omitted) … while calling '__functor' - at /path/to/tests/functional/repl/doc-functor.nix:97:19: - 96| f = x: { - 97| __functor = self: (f (x + 1)); - | ^ - 98| }; + at /path/to/tests/functional/repl/doc-functor.nix:103:21: + 102| f = x: { + 103| __functor = self: (f (x + 1)); + | ^ + 104| }; error: stack overflow; max-call-depth exceeded - at /path/to/tests/functional/repl/doc-functor.nix:97:26: - 96| f = x: { - 97| __functor = self: (f (x + 1)); - | ^ - 98| }; + at /path/to/tests/functional/repl/doc-functor.nix:103:28: + 102| f = x: { + 103| __functor = self: (f (x + 1)); + | ^ + 104| }; nix-repl> :doc helper Function `square`\ @@ -81,21 +81,21 @@ Compute x^2 nix-repl> :doc helper2 Function `__functor`\ - … defined at /path/to/tests/functional/repl/doc-functor.nix:45:23 + … defined at /path/to/tests/functional/repl/doc-functor.nix:46:13 This is a function that can be overridden. nix-repl> :doc lib.helper3 Function `__functor`\ - … defined at /path/to/tests/functional/repl/doc-functor.nix:45:23 + … defined at /path/to/tests/functional/repl/doc-functor.nix:46:13 This is a function that can be overridden. nix-repl> :doc helper3 Function `__functor`\ - … defined at /path/to/tests/functional/repl/doc-functor.nix:45:23 + … defined at /path/to/tests/functional/repl/doc-functor.nix:46:13 This is a function that can be overridden. diff --git a/tests/functional/repl/doc-lambda-flavors.expected b/tests/functional/repl/doc-lambda-flavors.expected index ab5c956390f..437c09d2b31 100644 --- a/tests/functional/repl/doc-lambda-flavors.expected +++ b/tests/functional/repl/doc-lambda-flavors.expected @@ -6,24 +6,28 @@ Added variables. nix-repl> :doc nonStrict Function `nonStrict`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:37:70 + … defined at /path/to/tests/functional/repl/doc-comments.nix:60:5 + My syntax is not strict, but I'm strict anyway. nix-repl> :doc strict Function `strict`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:38:63 + … defined at /path/to/tests/functional/repl/doc-comments.nix:65:5 + I don't have to be strict, but I am anyway. nix-repl> :doc strictPre Function `strictPre`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:40:48 + … defined at /path/to/tests/functional/repl/doc-comments.nix:71:5 + Here's one way to do this nix-repl> :doc strictPost Function `strictPost`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:41:53 + … defined at /path/to/tests/functional/repl/doc-comments.nix:76:5 + Here's another way to do this diff --git a/tests/functional/repl/doc-measurement.expected b/tests/functional/repl/doc-measurement.expected index 555cac9a2a0..862697613be 100644 --- a/tests/functional/repl/doc-measurement.expected +++ b/tests/functional/repl/doc-measurement.expected @@ -6,6 +6,7 @@ Added variables. nix-repl> :doc measurement Function `measurement`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:13:17 + … defined at /path/to/tests/functional/repl/doc-comments.nix:15:17 + 👈 precisely this wide 👉 diff --git a/tests/functional/repl/doc-unambiguous.expected b/tests/functional/repl/doc-unambiguous.expected index 0db5505d781..32ca9aef22a 100644 --- a/tests/functional/repl/doc-unambiguous.expected +++ b/tests/functional/repl/doc-unambiguous.expected @@ -6,6 +6,7 @@ Added variables. nix-repl> :doc unambiguous Function `unambiguous`\ - … defined at /path/to/tests/functional/repl/doc-comments.nix:24:5 + … defined at /path/to/tests/functional/repl/doc-comments.nix:37:5 + Very close From 791d6cf4332d62da6edd88eb5d20c9cef34c7b92 Mon Sep 17 00:00:00 2001 From: Ben Millwood Date: Thu, 10 Oct 2024 16:05:50 +0100 Subject: [PATCH 126/361] Improve "illegal path references in fixed output derivation" error The main improvement is that the new message gives an example of a path that is referenced, which should make it easier to track down. While there, I also clarified the wording, saying exactly why the paths in question were illegal. (cherry picked from commit 4e5d1b281e503641d649ddba22d49361e6295e2e) --- src/libstore/unix/build/local-derivation-goal.cc | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index 06a2f85be84..5b9bc0bb011 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -2657,10 +2657,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() wanted.to_string(HashFormat::SRI, true), got.to_string(HashFormat::SRI, true))); } - if (!newInfo0.references.empty()) + if (!newInfo0.references.empty()) { + auto numViolations = newInfo.references.size(); delayedException = std::make_exception_ptr( - BuildError("illegal path references in fixed-output derivation '%s'", - worker.store.printStorePath(drvPath))); + BuildError("fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'", + worker.store.printStorePath(drvPath), + numViolations, + worker.store.printStorePath(*newInfo.references.begin()))); + } return newInfo0; }, From 527e68ac3eeb4822d863ff6ac2557cc2fc3268be Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 27 Jan 2025 12:32:46 +0100 Subject: [PATCH 127/361] refactor: Extract EvalState::realiseString (cherry picked from commit 7465fbe9264e46c556b456226e8fb980fcfd7e66) --- src/libexpr-c/nix_api_value.cc | 6 +----- src/libexpr/eval.hh | 9 +++++++++ src/libexpr/primops.cc | 9 +++++++++ 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/src/libexpr-c/nix_api_value.cc b/src/libexpr-c/nix_api_value.cc index bae078d312f..448f4a58a78 100644 --- a/src/libexpr-c/nix_api_value.cc +++ b/src/libexpr-c/nix_api_value.cc @@ -613,12 +613,8 @@ nix_realised_string * nix_string_realise(nix_c_context * context, EvalState * st context->last_err_code = NIX_OK; try { auto & v = check_value_in(value); - nix::NixStringContext stringContext; - auto rawStr = state->state.coerceToString(nix::noPos, v, stringContext, "while realising a string").toOwned(); nix::StorePathSet storePaths; - auto rewrites = state->state.realiseContext(stringContext, &storePaths); - - auto s = nix::rewriteStrings(rawStr, rewrites); + auto s = state->state.realiseString(v, &storePaths, isIFD); // Convert to the C API StorePath type and convert to vector for index-based access std::vector vec; diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 84b7d823c36..767578343d9 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -820,6 +820,15 @@ public: */ [[nodiscard]] StringMap realiseContext(const NixStringContext & context, StorePathSet * maybePaths = nullptr, bool isIFD = true); + /** + * Realise the given string with context, and return the string with outputs instead of downstream output placeholders. + * @param[in] str the string to realise + * @param[out] paths all referenced store paths will be added to this set + * @return the realised string + * @throw EvalError if the value is not a string, path or derivation (see `coerceToString`) + */ + std::string realiseString(Value & str, StorePathSet * storePathsOutMaybe, bool isIFD = true, const PosIdx pos = noPos); + /* Call the binary path filter predicate used builtins.path etc. */ bool callPathFilter( Value * filterFun, diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index a0e2753b5ec..e6f6f1dda24 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -47,6 +47,15 @@ static inline Value * mkString(EvalState & state, const std::csub_match & match) return v; } +std::string EvalState::realiseString(Value & s, StorePathSet * storePathsOutMaybe, bool isIFD, const PosIdx pos) +{ + nix::NixStringContext stringContext; + auto rawStr = coerceToString(pos, s, stringContext, "while realising a string").toOwned(); + auto rewrites = realiseContext(stringContext, storePathsOutMaybe, isIFD); + + return nix::rewriteStrings(rawStr, rewrites); +} + StringMap EvalState::realiseContext(const NixStringContext & context, StorePathSet * maybePathsOut, bool isIFD) { std::vector drvs; From 605bd06ca4512c401573053c3605287b3275e8b8 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 27 Jan 2025 14:25:35 +0100 Subject: [PATCH 128/361] packages.default: Add meta.mainProgram (cherry picked from commit 0d7418b4feebcfb3e0e66798398d3ecf618c1e58) --- packaging/everything.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/packaging/everything.nix b/packaging/everything.nix index 2b47c31bbf5..0974a34df50 100644 --- a/packaging/everything.nix +++ b/packaging/everything.nix @@ -93,6 +93,7 @@ let libs = throw "`nix.dev.libs` is not meant to be used; use `nix.libs` instead."; }; meta = { + mainProgram = "nix"; pkgConfigModules = [ "nix-cmd" "nix-expr" From a75cf5770280e14998097c7bbed0521b924dab91 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 27 Jan 2025 14:26:05 +0100 Subject: [PATCH 129/361] packages.nix-cli: Add meta.mainProgram (cherry picked from commit 850329dea59358db6e8ea572d769eb706715c508) --- src/nix/package.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/src/nix/package.nix b/src/nix/package.nix index 89c52c3bb05..6e59adc3800 100644 --- a/src/nix/package.nix +++ b/src/nix/package.nix @@ -103,6 +103,7 @@ mkMesonExecutable (finalAttrs: { ]; meta = { + mainProgram = "nix"; platforms = lib.platforms.unix ++ lib.platforms.windows; }; From 1c1f8b2343b15e88b7023adc01529d0496d92014 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Jan 2025 11:27:24 +0100 Subject: [PATCH 130/361] Fix duplicate setPathDisplay() Fixes messages like 'copying /tmp/repo/tmp/repo to the store'. The PosixSourceAccessor already sets the prefix. Setting the prefix twice shouldn't be a problem, but GitRepoImpl::getAccessor() returns a wrapped accessor so it's not actually idempotent. (cherry picked from commit 102d90ebf07b1f268a3551daf5457131ae063d4a) --- src/libfetchers/git.cc | 2 -- tests/functional/fetchGit.sh | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index b411e112f5f..e8698709af2 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -737,8 +737,6 @@ struct GitInputScheme : InputScheme exportIgnore, makeNotAllowedError(repoInfo.locationToArg())); - accessor->setPathDisplay(repoInfo.locationToArg()); - /* If the repo has submodules, return a mounted input accessor consisting of the accessor for the top-level repo and the accessors for the submodule workdirs. */ diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index 78925b5cdd6..f3eda54dcdf 100755 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -37,6 +37,7 @@ nix-instantiate --eval -E "builtins.readFile ((builtins.fetchGit file://$TEST_RO # Fetch a worktree. unset _NIX_FORCE_HTTP +expectStderr 0 nix eval -vvvv --impure --raw --expr "(builtins.fetchGit file://$TEST_ROOT/worktree).outPath" | grepQuiet "copying '$TEST_ROOT/worktree/' to the store" path0=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$TEST_ROOT/worktree).outPath") path0_=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; url = file://$TEST_ROOT/worktree; }).outPath") [[ $path0 = $path0_ ]] From 28684af74b56fba5bbcfa976b5c37fe355ea88af Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Jan 2025 12:41:02 +0100 Subject: [PATCH 131/361] =?UTF-8?q?GitExportIgnoreSourceAccessor:=20Don't?= =?UTF-8?q?=20show=20=C2=ABunknown=C2=BB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In general we should set the path display prefix on the inner accessor, so we now pass the display prefix to getAccessor(). (cherry picked from commit 3032512425a09fc58f2d658442043894e0aab256) --- src/libfetchers/git-utils.cc | 21 +++++++++++++-------- src/libfetchers/git-utils.hh | 5 ++++- src/libfetchers/git.cc | 4 +--- src/libfetchers/github.cc | 7 ++++--- src/libfetchers/tarball.cc | 12 +++++++----- 5 files changed, 29 insertions(+), 20 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 6a75daf6124..a6b13fb31c8 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -508,7 +508,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this */ ref getRawAccessor(const Hash & rev); - ref getAccessor(const Hash & rev, bool exportIgnore) override; + ref getAccessor( + const Hash & rev, + bool exportIgnore, + std::string displayPrefix) override; ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override; @@ -627,7 +630,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this Hash treeHashToNarHash(const Hash & treeHash) override { - auto accessor = getAccessor(treeHash, false); + auto accessor = getAccessor(treeHash, false, ""); fetchers::Cache::Key cacheKey{"treeHashToNarHash", {{"treeHash", treeHash.gitRev()}}}; @@ -1194,16 +1197,18 @@ ref GitRepoImpl::getRawAccessor(const Hash & rev) return make_ref(self, rev); } -ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) +ref GitRepoImpl::getAccessor( + const Hash & rev, + bool exportIgnore, + std::string displayPrefix) { auto self = ref(shared_from_this()); ref rawGitAccessor = getRawAccessor(rev); - if (exportIgnore) { + rawGitAccessor->setPathDisplay(std::move(displayPrefix)); + if (exportIgnore) return make_ref(self, rawGitAccessor, rev); - } - else { + else return rawGitAccessor; - } } ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) @@ -1236,7 +1241,7 @@ std::vector> GitRepoImpl::getSubmodules /* Read the .gitmodules files from this revision. */ CanonPath modulesFile(".gitmodules"); - auto accessor = getAccessor(rev, exportIgnore); + auto accessor = getAccessor(rev, exportIgnore, ""); if (!accessor->pathExists(modulesFile)) return {}; /* Parse it and get the revision of each submodule. */ diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index ff115143fc7..9677f507923 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -86,7 +86,10 @@ struct GitRepo virtual bool hasObject(const Hash & oid) = 0; - virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0; + virtual ref getAccessor( + const Hash & rev, + bool exportIgnore, + std::string displayPrefix) = 0; virtual ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0; diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index e8698709af2..e40afb865eb 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -672,9 +672,7 @@ struct GitInputScheme : InputScheme verifyCommit(input, repo); bool exportIgnore = getExportIgnoreAttr(input); - auto accessor = repo->getAccessor(rev, exportIgnore); - - accessor->setPathDisplay("«" + input.to_string() + "»"); + auto accessor = repo->getAccessor(rev, exportIgnore, "«" + input.to_string() + "»"); /* If the repo has submodules, fetch them and return a mounted input accessor consisting of the accessor for the top-level diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 18594198847..ec469df7cd3 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -294,9 +294,10 @@ struct GitArchiveInputScheme : InputScheme #endif input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified)); - auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash, false); - - accessor->setPathDisplay("«" + input.to_string() + "»"); + auto accessor = getTarballCache()->getAccessor( + tarballInfo.treeHash, + false, + "«" + input.to_string() + "»"); return {accessor, input}; } diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 28574e7b1e7..699612e250c 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -105,7 +105,8 @@ DownloadFileResult downloadFile( static DownloadTarballResult downloadTarball_( const std::string & url, - const Headers & headers) + const Headers & headers, + const std::string & displayPrefix) { Cache::Key cacheKey{"tarball", {{"url", url}}}; @@ -118,7 +119,7 @@ static DownloadTarballResult downloadTarball_( .treeHash = treeHash, .lastModified = (time_t) getIntAttr(infoAttrs, "lastModified"), .immutableUrl = maybeGetStrAttr(infoAttrs, "immutableUrl"), - .accessor = getTarballCache()->getAccessor(treeHash, false), + .accessor = getTarballCache()->getAccessor(treeHash, false, displayPrefix), }; }; @@ -371,9 +372,10 @@ struct TarballInputScheme : CurlInputScheme { auto input(_input); - auto result = downloadTarball_(getStrAttr(input.attrs, "url"), {}); - - result.accessor->setPathDisplay("«" + input.to_string() + "»"); + auto result = downloadTarball_( + getStrAttr(input.attrs, "url"), + {}, + "«" + input.to_string() + "»"); if (result.immutableUrl) { auto immutableInput = Input::fromURL(*input.settings, *result.immutableUrl); From 491aaaf116cdf36a5f97316f61066fdeb6f29e68 Mon Sep 17 00:00:00 2001 From: Illia Bobyr Date: Mon, 13 Jan 2025 18:19:16 -0800 Subject: [PATCH 132/361] nix-profile.fish: Typo NIX_SS{H => L}_CERT_FILE (cherry picked from commit 803fb83f7ffb3bd5e2e1ee3bb9ce3ea3001bec2c) # Conflicts: # scripts/nix-profile.fish.in --- scripts/nix-profile.fish.in | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/scripts/nix-profile.fish.in b/scripts/nix-profile.fish.in index 619df52b895..becc5efd0d9 100644 --- a/scripts/nix-profile.fish.in +++ b/scripts/nix-profile.fish.in @@ -56,4 +56,36 @@ if test -n "$HOME" && test -n "$USER" set --erase NIX_LINK end +<<<<<<< HEAD +======= +# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work. +if test -n "$NIX_SSL_CERT_FILE" + : # Allow users to override the NIX_SSL_CERT_FILE +else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch + set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt +else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed + set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem +else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS + set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt +else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS + set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt +else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile + set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" +else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile + set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt" +end + +# Only use MANPATH if it is already set. In general `man` will just simply +# pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin` +# which is in the $PATH. For more info, run `manpath -d`. +if set --query MANPATH + set --export --prepend --path MANPATH "$NIX_LINK/share/man" +end + +add_path "$NIX_LINK/bin" +set --erase NIX_LINK + +# Cleanup + +>>>>>>> 803fb83f7 (nix-profile.fish: Typo NIX_SS{H => L}_CERT_FILE) functions -e add_path From 727cf59997c33a03558dab84071767983d57d892 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Jan 2025 11:47:41 +0100 Subject: [PATCH 133/361] Git fetcher: Don't pass URL query parameters for file:// URLs Git interprets them as part of the file name, so passing parameters like 'rev' breaks. Only relevant for testing (when _NIX_FORCE_HTTP is set) and local bare repos. (cherry picked from commit 9f72d5bce9205c9f45dcb0e06b9573ccca5724ac) --- src/libfetchers/git.cc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index e40afb865eb..a1f65c0db24 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -459,8 +459,14 @@ struct GitInputScheme : InputScheme url); } repoInfo.location = std::filesystem::absolute(url.path); - } else + } else { + if (url.scheme == "file") + /* Query parameters are meaningless for file://, but + Git interprets them as part of the file name. So get + rid of them. */ + url.query.clear(); repoInfo.location = url; + } // If this is a local directory and no ref or revision is // given, then allow the use of an unclean working tree. From 1fe33c13d94744e071c7a4d5fe6cd93f12dab40a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Jan 2025 18:23:27 +0100 Subject: [PATCH 134/361] Git fetcher: Don't use refspec : This causes Git to create a local ref named refs/head/, e.g. $ git -C ~/.cache/nix/gitv3/11irpim06vj4h6c0w8yls6kx4hvl0qd0gr1fvk47n76g6wf1s1vk ls-remote --symref . 5c4410e3b9891c05ab40d723de78c6f0be45ad30 refs/heads/5c4410e3b9891c05ab40d723de78c6f0be45ad30 7f6bde8a20de4cccc2256f088bc5af9dbe38881d refs/heads/7f6bde8a20de4cccc2256f088bc5af9dbe38881d which confuses readHead(), leading to errors like fatal: Refusing to point HEAD outside of refs/ warning: could not update cached head 'd275d93aa0bb8a004939b2f1e87f559f989453be' for 'file:///tmp/repo' (cherry picked from commit ee9fa0d3603165631e65c8e694a033c47872267a) --- src/libfetchers/git.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index a1f65c0db24..758bb3653a0 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -611,16 +611,16 @@ struct GitInputScheme : InputScheme try { auto fetchRef = getAllRefsAttr(input) - ? "refs/*" + ? "refs/*:refs/*" : input.getRev() ? input.getRev()->gitRev() : ref.compare(0, 5, "refs/") == 0 - ? ref + ? fmt("%1%:%1%", ref) : ref == "HEAD" ? ref - : "refs/heads/" + ref; + : fmt("%1%:%1%", "refs/heads/" + ref); - repo->fetch(repoUrl.to_string(), fmt("%s:%s", fetchRef, fetchRef), getShallowAttr(input)); + repo->fetch(repoUrl.to_string(), fetchRef, getShallowAttr(input)); } catch (Error & e) { if (!std::filesystem::exists(localRefFile)) throw; logError(e.info()); From 8e4cd2f5370e2083b99cbc231f4a2180ab813b5a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Jan 2025 18:57:43 +0100 Subject: [PATCH 135/361] readHead(): Make sure we're returning the HEAD ref line If we previously fetched by revision, the output of "git ls-remote" won't start with the expected line like ref: refs/heads/master HEAD but will be something like 5c4410e3b9891c05ab40d723de78c6f0be45ad30 refs/heads/5c4410e3b9891c05ab40d723de78c6f0be45ad30 This then causes Nix to treat that revision as a refname, which then leads to warnings like warning: could not update cached head '5c4410e3b9891c05ab40d723de78c6f0be45ad30' for 'file:///tmp/repo' (cherry picked from commit c8b22643ba13b12f493e8b90dfa4b416bf267553) --- src/libfetchers/git.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 758bb3653a0..0d423a7a39f 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -69,7 +69,7 @@ std::optional readHead(const Path & path) std::string_view line = output; line = line.substr(0, line.find("\n")); - if (const auto parseResult = git::parseLsRemoteLine(line)) { + if (const auto parseResult = git::parseLsRemoteLine(line); parseResult && parseResult->reference == "HEAD") { switch (parseResult->kind) { case git::LsRemoteRefLine::Kind::Symbolic: debug("resolved HEAD ref '%s' for repo '%s'", parseResult->target, path); From 30435e0559ae2d6784a115b7ffea266964fcb25d Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 24 Jan 2025 16:37:09 +0100 Subject: [PATCH 136/361] pre-commit/check-merge-conflicts-2: fix use outside dev shell Note that this is just a script that is meant to run outside a derivation (but also can be called by a derivation builder). `touch $out` does not belong in it. `touch $out` worked accidentally in the derivation-based check, and also in the dev shell, but if pre-commit is invoked without the dev shell it would fail. --- maintainers/flake-module.nix | 1 - 1 file changed, 1 deletion(-) diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix index 9b2c6dcbf80..2f19072eeef 100644 --- a/maintainers/flake-module.nix +++ b/maintainers/flake-module.nix @@ -35,7 +35,6 @@ echo "ERROR: found merge/patch conflicts in files" exit 1 fi - touch $out ''}"; }; clang-format = { From df8d5e61ad736653486c0d0a2fbd81d9b08f008b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 29 Jan 2025 21:53:12 +0100 Subject: [PATCH 137/361] test: Fix shellcheck by giving git-hashing scripts shebangs This seems to be the way to do it now, even though I can't run them without setting at least one env var. I'll only fix shellcheck for now. Don't shoot the messenger. It isn't quite clear to me why the previous commit masked this problem, but I'm glad shellcheck has an effect or more effect now. --- tests/functional/git-hashing/simple.sh | 2 ++ 1 file changed, 2 insertions(+) mode change 100644 => 100755 tests/functional/git-hashing/simple.sh diff --git a/tests/functional/git-hashing/simple.sh b/tests/functional/git-hashing/simple.sh old mode 100644 new mode 100755 index f43168eb214..e02d8b29761 --- a/tests/functional/git-hashing/simple.sh +++ b/tests/functional/git-hashing/simple.sh @@ -1,3 +1,5 @@ +#!/usr/bin/env bash + source common.sh repo="$TEST_ROOT/scratch" From 0531f1299c467b083638aef3656c3d88a25b86ec Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 10 Feb 2025 16:01:13 +0100 Subject: [PATCH 138/361] Resolve conflict --- scripts/nix-profile.fish.in | 34 +--------------------------------- 1 file changed, 1 insertion(+), 33 deletions(-) diff --git a/scripts/nix-profile.fish.in b/scripts/nix-profile.fish.in index becc5efd0d9..3a8c234adee 100644 --- a/scripts/nix-profile.fish.in +++ b/scripts/nix-profile.fish.in @@ -29,7 +29,7 @@ if test -n "$HOME" && test -n "$USER" end # Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work. - if test -n "$NIX_SSH_CERT_FILE" + if test -n "$NIX_SSL_CERT_FILE" : # Allow users to override the NIX_SSL_CERT_FILE else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt @@ -56,36 +56,4 @@ if test -n "$HOME" && test -n "$USER" set --erase NIX_LINK end -<<<<<<< HEAD -======= -# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work. -if test -n "$NIX_SSL_CERT_FILE" - : # Allow users to override the NIX_SSL_CERT_FILE -else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch - set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt -else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed - set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem -else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS - set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt -else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS - set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt -else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile - set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" -else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile - set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt" -end - -# Only use MANPATH if it is already set. In general `man` will just simply -# pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin` -# which is in the $PATH. For more info, run `manpath -d`. -if set --query MANPATH - set --export --prepend --path MANPATH "$NIX_LINK/share/man" -end - -add_path "$NIX_LINK/bin" -set --erase NIX_LINK - -# Cleanup - ->>>>>>> 803fb83f7 (nix-profile.fish: Typo NIX_SS{H => L}_CERT_FILE) functions -e add_path From 83306bb841cff73723b813905c2e7dab76c6bfcc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 11 Feb 2025 20:36:28 +0100 Subject: [PATCH 139/361] copyPathToStore(): Preserve symlinks E.g. in a derivation attribute `foo = ./bar`, if ./bar is a symlink, we should copy the symlink to the store, not its target. This restores the behaviour of Nix <= 2.19. (cherry picked from commit 26b87e78b5dd62d9cca7c7d08a697dd2d22ae38c) --- src/libexpr/eval.cc | 2 +- tests/functional/meson.build | 1 + tests/functional/simple.sh | 2 +- tests/functional/symlinks.sh | 16 ++++++++++++++++ 4 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 tests/functional/symlinks.sh diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 19ca1a3591e..dee764429e9 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2384,7 +2384,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat : [&]() { auto dstPath = fetchToStore( *store, - path.resolveSymlinks(), + path.resolveSymlinks(SymlinkResolution::Ancestors), settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy, path.baseName(), ContentAddressMethod::Raw::NixArchive, diff --git a/tests/functional/meson.build b/tests/functional/meson.build index 83e08c4f5ad..03a07bc54e5 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -164,6 +164,7 @@ suites = [ 'debugger.sh', 'extra-sandbox-profile.sh', 'help.sh', + 'symlinks.sh', ], 'workdir': meson.current_source_dir(), }, diff --git a/tests/functional/simple.sh b/tests/functional/simple.sh index 8afa369c2e2..c1f2eef411e 100755 --- a/tests/functional/simple.sh +++ b/tests/functional/simple.sh @@ -15,7 +15,7 @@ echo "output path is $outPath" [[ ! -w $outPath ]] text=$(cat "$outPath/hello") -if test "$text" != "Hello World!"; then exit 1; fi +[[ "$text" = "Hello World!" ]] TODO_NixOS diff --git a/tests/functional/symlinks.sh b/tests/functional/symlinks.sh new file mode 100644 index 00000000000..5eb22b3f901 --- /dev/null +++ b/tests/functional/symlinks.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +source common.sh + +# Check that when we have a derivation attribute that refers to a +# symlink, we copy the symlink, not its target. +# shellcheck disable=SC2016 +nix build --impure --no-link --expr ' + with import ./config.nix; + + mkDerivation { + name = "simple"; + builder = builtins.toFile "builder.sh" "[[ -L \"$symlink\" ]]; mkdir $out"; + symlink = ./lang/symlink-resolution/foo/overlays; + } +' From d3082284974e8028fd406909651fdec8f23f19d4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 11 Feb 2025 22:42:36 +0100 Subject: [PATCH 140/361] Don't import a symlink This is a workaround to avoid differing evaluation results between Nix 2.19 and >= 2.20 (#12449). (cherry picked from commit 2e20a5f8220c736681752587193d36b7955f6cbc) --- packaging/components.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packaging/components.nix b/packaging/components.nix index d1bfe83bf0e..07bb209cd4f 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -56,7 +56,7 @@ in nix-cli = callPackage ../src/nix/package.nix { version = fineVersion; }; - nix-functional-tests = callPackage ../src/nix-functional-tests/package.nix { + nix-functional-tests = callPackage ../tests/functional/package.nix { version = fineVersion; }; From dffcc184d7ab8a39085015181e7e693b0de5433a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 12 Feb 2025 14:53:04 +0100 Subject: [PATCH 141/361] lockFlake(): When refetching a locked flake, use the locked ref Otherwise we may accidentally update a lock when we shouldn't. Fixes #12445. (cherry picked from commit 5c552b62fc1b45e614b86bb93c7b6ef4f14bff18) # Conflicts: # src/libflake/flake/flake.cc --- src/libflake/flake/flake.cc | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index 06260c67a5d..5827668a2c5 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -554,12 +554,18 @@ LockedFlake lockFlake( /* Get the input flake, resolve 'path:./...' flakerefs relative to the parent flake. */ - auto getInputFlake = [&]() + auto getInputFlake = [&](const FlakeRef & ref) { if (auto resolvedPath = resolveRelativePath()) { +<<<<<<< HEAD return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputPath); } else { return getFlake(state, *input.ref, useRegistries, flakeCache, inputPath); +======= + return readFlake(state, ref, ref, ref, *resolvedPath, inputAttrPath); + } else { + return getFlake(state, ref, useRegistries, flakeCache, inputAttrPath); +>>>>>>> 5c552b62f (lockFlake(): When refetching a locked flake, use the locked ref) } }; @@ -640,7 +646,7 @@ LockedFlake lockFlake( } if (mustRefetch) { - auto inputFlake = getInputFlake(); + auto inputFlake = getInputFlake(oldLock->lockedRef); nodePaths.emplace(childNode, inputFlake.path.parent()); computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, followsPrefix, inputFlake.path, false); @@ -668,7 +674,7 @@ LockedFlake lockFlake( auto ref = (input2.ref && explicitCliOverrides.contains(inputPath)) ? *input2.ref : *input.ref; if (input.isFlake) { - auto inputFlake = getInputFlake(); + auto inputFlake = getInputFlake(*input.ref); auto childNode = make_ref( inputFlake.lockedRef, From 0ff190107f2de65247b8f2c2f7c7995737c72e16 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 12 Feb 2025 16:54:48 +0100 Subject: [PATCH 142/361] Resolve merge conflict --- src/libflake/flake/flake.cc | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index 5827668a2c5..507bef769b1 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -557,15 +557,9 @@ LockedFlake lockFlake( auto getInputFlake = [&](const FlakeRef & ref) { if (auto resolvedPath = resolveRelativePath()) { -<<<<<<< HEAD - return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputPath); + return readFlake(state, ref, ref, ref, *resolvedPath, inputPath); } else { - return getFlake(state, *input.ref, useRegistries, flakeCache, inputPath); -======= - return readFlake(state, ref, ref, ref, *resolvedPath, inputAttrPath); - } else { - return getFlake(state, ref, useRegistries, flakeCache, inputAttrPath); ->>>>>>> 5c552b62f (lockFlake(): When refetching a locked flake, use the locked ref) + return getFlake(state, ref, useRegistries, flakeCache, inputPath); } }; From 970942f45836172fda410a638853382952189eb9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 12 Feb 2025 21:50:20 +0100 Subject: [PATCH 143/361] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index ed1d6005085..3953e8ad504 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.26.2 +2.26.3 From 86ccad698eb1c0679fc2be8ac59149211371358e Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 20 May 2024 08:36:58 -0400 Subject: [PATCH 144/361] Expose a bunch of things in the Legacy SSH Store for Hydra (cherry picked from commit 5eade4825221d3284fc6555cb20de2c7aa171d72) --- src/libstore/legacy-ssh-store.cc | 99 +++++++++++++++++++++++++++----- src/libstore/legacy-ssh-store.hh | 55 ++++++++++++++++++ 2 files changed, 140 insertions(+), 14 deletions(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index eac360a4f7a..3f62794efc9 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -69,7 +69,7 @@ ref LegacySSHStore::openConnection() command.push_back("--store"); command.push_back(remoteStore.get()); } - conn->sshConn = master.startCommand(std::move(command)); + conn->sshConn = master.startCommand(std::move(command), std::list{extraSshArgs}); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); @@ -100,19 +100,31 @@ std::string LegacySSHStore::getUri() return *uriSchemes().begin() + "://" + host; } +std::map LegacySSHStore::queryPathInfosUncached( + const StorePathSet & paths) +{ + auto conn(connections->get()); + + /* No longer support missing NAR hash */ + assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4); + + debug("querying remote host '%s' for info on '%s'", host, concatStringsSep(", ", printStorePathSet(paths))); + + auto infos = conn->queryPathInfos(*this, paths); + + for (const auto & [_, info] : infos) { + if (info.narHash == Hash::dummy) + throw Error("NAR hash is now mandatory"); + } + + return infos; +} void LegacySSHStore::queryPathInfoUncached(const StorePath & path, Callback> callback) noexcept { try { - auto conn(connections->get()); - - /* No longer support missing NAR hash */ - assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4); - - debug("querying remote host '%s' for info on '%s'", host, printStorePath(path)); - - auto infos = conn->queryPathInfos(*this, {path}); + auto infos = queryPathInfosUncached({path}); switch (infos.size()) { case 0: @@ -120,9 +132,6 @@ void LegacySSHStore::queryPathInfoUncached(const StorePath & path, case 1: { auto & [path2, info] = *infos.begin(); - if (info.narHash == Hash::dummy) - throw Error("NAR hash is now mandatory"); - assert(path == path2); return callback(std::make_shared( std::move(path), @@ -193,13 +202,19 @@ void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source, void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink) { - auto conn(connections->get()); - conn->narFromPath(*this, path, [&](auto & source) { + narFromPath(path, [&](auto & source) { copyNAR(source, sink); }); } +void LegacySSHStore::narFromPath(const StorePath & path, std::function fun) +{ + auto conn(connections->get()); + conn->narFromPath(*this, path, fun); +} + + static ServeProto::BuildOptions buildSettings() { return { @@ -223,6 +238,19 @@ BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const Bas return conn->getBuildDerivationResponse(*this); } +std::function LegacySSHStore::buildDerivationAsync( + const StorePath & drvPath, const BasicDerivation & drv, + const ServeProto::BuildOptions & options) +{ + // Until we have C++23 std::move_only_function + auto conn = std::make_shared::Handle>(connections->get()); + (*conn)->putBuildDerivationRequest(*this, drvPath, drv, options); + + return [this,conn]() -> BuildResult { + return (*conn)->getBuildDerivationResponse(*this); + }; +} + void LegacySSHStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) { @@ -294,6 +322,32 @@ StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, } +StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, + bool lock, SubstituteFlag maybeSubstitute) +{ + auto conn(connections->get()); + return conn->queryValidPaths(*this, + lock, paths, maybeSubstitute); +} + + +void LegacySSHStore::addMultipleToStoreLegacy(Store & srcStore, const StorePathSet & paths) +{ + auto conn(connections->get()); + conn->to << ServeProto::Command::ImportPaths; + try { + srcStore.exportPaths(paths, conn->to); + } catch (...) { + conn->good = false; + throw; + } + conn->to.flush(); + + if (readInt(conn->from) != 1) + throw Error("remote machine failed to import closure"); +} + + void LegacySSHStore::connect() { auto conn(connections->get()); @@ -307,6 +361,23 @@ unsigned int LegacySSHStore::getProtocol() } +pid_t LegacySSHStore::getConnectionPid() +{ + auto conn(connections->get()); + return conn->sshConn->sshPid; +} + + +LegacySSHStore::ConnectionStats LegacySSHStore::getConnectionStats() +{ + auto conn(connections->get()); + return { + .bytesReceived = conn->from.read, + .bytesSent = conn->to.written, + }; +} + + /** * The legacy ssh protocol doesn't support checking for trusted-user. * Try using ssh-ng:// instead if you want to know. diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index b541455b4e5..2444a7a662e 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -6,6 +6,7 @@ #include "ssh.hh" #include "callback.hh" #include "pool.hh" +#include "serve-protocol.hh" namespace nix { @@ -24,6 +25,11 @@ struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig const Setting maxConnections{this, 1, "max-connections", "Maximum number of concurrent SSH connections."}; + /** + * Hack for hydra + */ + Strings extraSshArgs = {}; + const std::string name() override { return "SSH Store"; } static std::set uriSchemes() { return {"ssh"}; } @@ -60,11 +66,24 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor void queryPathInfoUncached(const StorePath & path, Callback> callback) noexcept override; + std::map queryPathInfosUncached( + const StorePathSet & paths); + void addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) override; void narFromPath(const StorePath & path, Sink & sink) override; + /** + * Hands over the connection temporarily as source to the given + * function. The function must not consume beyond the NAR; it can + * not just blindly try to always read more bytes until it is + * cut-off. + * + * This is exposed for sake of Hydra. + */ + void narFromPath(const StorePath & path, std::function fun); + std::optional queryPathFromHashPart(const std::string & hashPart) override { unsupported("queryPathFromHashPart"); } @@ -93,6 +112,16 @@ public: BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) override; + /** + * Note, the returned function must only be called once, or we'll + * try to read from the connection twice. + * + * @todo Use C++23 `std::move_only_function`. + */ + std::function buildDerivationAsync( + const StorePath & drvPath, const BasicDerivation & drv, + const ServeProto::BuildOptions & options); + void buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) override; void ensurePath(const StorePath & path) override @@ -119,10 +148,36 @@ public: StorePathSet queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute = NoSubstitute) override; + /** + * Custom variation that atomically creates temp locks on the remote + * side. + * + * This exists to prevent a race where the remote host + * garbage-collects paths that are already there. Optionally, ask + * the remote host to substitute missing paths. + */ + StorePathSet queryValidPaths(const StorePathSet & paths, + bool lock, + SubstituteFlag maybeSubstitute = NoSubstitute); + + /** + * Just exists because this is exactly what Hydra was doing, and we + * don't yet want an algorithmic change. + */ + void addMultipleToStoreLegacy(Store & srcStore, const StorePathSet & paths); + void connect() override; unsigned int getProtocol() override; + struct ConnectionStats { + size_t bytesReceived, bytesSent; + }; + + ConnectionStats getConnectionStats(); + + pid_t getConnectionPid(); + /** * The legacy ssh protocol doesn't support checking for trusted-user. * Try using ssh-ng:// instead if you want to know. From 7112f8294c162db536b15f9d527033c9d641e057 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 23 May 2024 11:53:17 -0400 Subject: [PATCH 145/361] Add `SSHMaster::Connection::trySetBufferSize` It is unused in Nix currently, but will be used in Hydra. This reflects what Hydra does in https://github.com/NixOS/hydra/pull/1387. We may probably to use it more widely for better SSH store performance, but this needs to be subject to more testing before we do that. (cherry picked from commit 0d25cc65417647c454e3095650b87bc88351b384) --- src/libstore/ssh.cc | 15 +++++++++++++++ src/libstore/ssh.hh | 12 ++++++++++++ 2 files changed, 27 insertions(+) diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 116a480bacc..f47cfbbec2c 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -240,4 +240,19 @@ Path SSHMaster::startMaster() #endif +void SSHMaster::Connection::trySetBufferSize(size_t size) +{ +#ifdef F_SETPIPE_SZ + /* This `fcntl` method of doing this takes a positive `int`. Check + and convert accordingly. + + The function overall still takes `size_t` because this is more + portable for a platform-agnostic interface. */ + assert(size <= INT_MAX); + int pipesize = size; + fcntl(in.get(), F_SETPIPE_SZ, pipesize); + fcntl(out.get(), F_SETPIPE_SZ, pipesize); +#endif +} + } diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index 85be704ec9d..eb05df01174 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -54,6 +54,18 @@ public: Pid sshPid; #endif AutoCloseFD out, in; + + /** + * Try to set the buffer size in both directions to the + * designated amount, if possible. If not possible, does + * nothing. + * + * Current implementation is to use `fcntl` with `F_SETPIPE_SZ`, + * which is Linux-only. For this implementation, `size` must + * convertable to an `int`. In other words, it must be within + * `[0, INT_MAX]`. + */ + void trySetBufferSize(size_t size); }; /** From fa7f0d6d07bdbedd06904d52bd111e58cb3d64c9 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 16 Feb 2025 20:01:03 -0500 Subject: [PATCH 146/361] Allow setting `ssh://` pipe size Exposed for Hydra. We could make it fancier but with (a) new store settings (b) switch to `ssh-ng://` both in the works, it doesn't seem worth it. (cherry picked from commit 94a7c34b2f8285650e3130e5dc6ff5333eaa6dc8) --- src/libstore/legacy-ssh-store.cc | 3 +++ src/libstore/legacy-ssh-store.hh | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 3f62794efc9..3849f088dd5 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -70,6 +70,9 @@ ref LegacySSHStore::openConnection() command.push_back(remoteStore.get()); } conn->sshConn = master.startCommand(std::move(command), std::list{extraSshArgs}); + if (connPipeSize) { + conn->sshConn->trySetBufferSize(*connPipeSize); + } conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index 2444a7a662e..92aa4ae56d1 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -30,6 +30,11 @@ struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig */ Strings extraSshArgs = {}; + /** + * Exposed for hydra + */ + std::optional connPipeSize; + const std::string name() override { return "SSH Store"; } static std::set uriSchemes() { return {"ssh"}; } From 7d168db83cde11e3cf9872f7275fd6664f594740 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Feb 2025 11:36:47 +0100 Subject: [PATCH 147/361] getDefaultNixPath(): Don't add symlinks if the target doesn't exist (cherry picked from commit 8ac49ea5de0b763175af5b266dd258c544192036) --- src/libexpr/eval-settings.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc index 4cbcb39b9e0..ade0abf9af6 100644 --- a/src/libexpr/eval-settings.cc +++ b/src/libexpr/eval-settings.cc @@ -57,7 +57,7 @@ Strings EvalSettings::getDefaultNixPath() { Strings res; auto add = [&](const Path & p, const std::string & s = std::string()) { - if (pathAccessible(p)) { + if (std::filesystem::exists(p)) { if (s.empty()) { res.push_back(p); } else { From 640ce50da12e81c078142d344c89a9b3494a3ccb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Feb 2025 11:50:54 +0100 Subject: [PATCH 148/361] resolveLookupPathPath(): Fix caching of negative lookups This avoids spamming in case the missing search path entry does not exist (#12480). (cherry picked from commit df08e1e204d04924bc546ed3ebb2fabf936aa5be) --- src/libexpr/eval.cc | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index dee764429e9..8aef85dc594 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -3070,8 +3070,11 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pat auto i = lookupPathResolved.find(value); if (i != lookupPathResolved.end()) return i->second; - auto finish = [&](SourcePath res) { - debug("resolved search path element '%s' to '%s'", value, res); + auto finish = [&](std::optional res) { + if (res) + debug("resolved search path element '%s' to '%s'", value, *res); + else + debug("failed to resolve search path element '%s'", value); lookupPathResolved.emplace(value, res); return res; }; @@ -3123,8 +3126,7 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pat } } - debug("failed to resolve search path element '%s'", value); - return std::nullopt; + return finish(std::nullopt); } From 80db87bd4c0ec214be8cc1705e6b5f11212605a1 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman <145775305+xokdvium@users.noreply.github.com> Date: Tue, 18 Feb 2025 01:57:33 +0300 Subject: [PATCH 149/361] Move code related to NIX_MAN_DIR from libstore to nix-cli This is a prerequisite to properly fixing man-pages once and for all [1]. Note that this patch leaves manpages for legacy commands in a borked state, pending the movement of manpages from nix-manual to nix-cli [2]. [1]: https://www.github.com/NixOS/nix/issues/12382 [2]: https://www.github.com/NixOS/nix/issues/12382#issuecomment-2663782043 (cherry picked from commit b1a38b3efe214b093910776f4a92cad0fc125a3e) --- src/libmain/shared.cc | 14 --------- src/libmain/shared.hh | 5 ---- src/libstore/globals.cc | 1 - src/libstore/globals.hh | 5 ---- src/nix-build/nix-build.cc | 1 + src/nix-channel/nix-channel.cc | 1 + .../nix-collect-garbage.cc | 1 + src/nix-copy-closure/nix-copy-closure.cc | 1 + src/nix-env/nix-env.cc | 1 + src/nix-instantiate/nix-instantiate.cc | 1 + src/nix-store/nix-store.cc | 1 + src/nix/hash.cc | 1 + src/nix/man-pages.cc | 29 +++++++++++++++++++ src/nix/man-pages.hh | 28 ++++++++++++++++++ src/nix/meson.build | 12 ++++++++ src/nix/prefetch.cc | 1 + src/nix/unix/daemon.cc | 1 + 17 files changed, 79 insertions(+), 25 deletions(-) create mode 100644 src/nix/man-pages.cc create mode 100644 src/nix/man-pages.hh diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 50f90bfb314..30e76c349ca 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -315,20 +315,6 @@ void printVersion(const std::string & programName) throw Exit(); } - -void showManPage(const std::string & name) -{ - restoreProcessContext(); - setEnv("MANPATH", settings.nixManDir.c_str()); - execlp("man", "man", name.c_str(), nullptr); - if (errno == ENOENT) { - // Not SysError because we don't want to suffix the errno, aka No such file or directory. - throw Error("The '%1%' command was not found, but it is needed for '%2%' and some other '%3%' commands' help text. Perhaps you could install the '%1%' command?", "man", name.c_str(), "nix-*"); - } - throw SysError("command 'man %1%' failed", name.c_str()); -} - - int handleExceptions(const std::string & programName, std::function fun) { ReceiveInterrupts receiveInterrupts; // FIXME: need better place for this diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index 712b404d3e1..a6a18ceb068 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -70,11 +70,6 @@ struct LegacyArgs : public MixCommonArgs, public RootArgs }; -/** - * Show the manual page for the specified program. - */ -void showManPage(const std::string & name); - /** * The constructor of this class starts a pager if standard output is a * terminal and $PAGER is set. Standard output is redirected to the diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index b64e73c265b..e908fc81f17 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -65,7 +65,6 @@ Settings::Settings() , nixStateDir(canonPath(getEnvNonEmpty("NIX_STATE_DIR").value_or(NIX_STATE_DIR))) , nixConfDir(canonPath(getEnvNonEmpty("NIX_CONF_DIR").value_or(NIX_CONF_DIR))) , nixUserConfFiles(getUserConfigFiles()) - , nixManDir(canonPath(NIX_MAN_DIR)) , nixDaemonSocketFile(canonPath(getEnvNonEmpty("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH))) { #ifndef _WIN32 diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index ff3df46ba9e..6b9a87ce36e 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -84,11 +84,6 @@ public: */ std::vector nixUserConfFiles; - /** - * The directory where the man pages are stored. - */ - Path nixManDir; - /** * File name of the socket the daemon listens to. */ diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index de01e1afcde..5410f0cab96 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -27,6 +27,7 @@ #include "users.hh" #include "network-proxy.hh" #include "compatibility-settings.hh" +#include "man-pages.hh" using namespace nix; using namespace std::string_literals; diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 56d1d7abb77..ee61db99488 100644 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -8,6 +8,7 @@ #include "users.hh" #include "tarball.hh" #include "self-exe.hh" +#include "man-pages.hh" #include #include diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc index 20d5161df09..a060a01fd15 100644 --- a/src/nix-collect-garbage/nix-collect-garbage.cc +++ b/src/nix-collect-garbage/nix-collect-garbage.cc @@ -7,6 +7,7 @@ #include "shared.hh" #include "globals.hh" #include "legacy.hh" +#include "man-pages.hh" #include #include diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index b64af758fcb..15bff0a0ad5 100644 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -2,6 +2,7 @@ #include "realisation.hh" #include "store-api.hh" #include "legacy.hh" +#include "man-pages.hh" using namespace nix; diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index c99c1088ebb..aa1edb4c8e3 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -17,6 +17,7 @@ #include "legacy.hh" #include "eval-settings.hh" // for defexpr #include "terminal.hh" +#include "man-pages.hh" #include #include diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 09d35483205..0cf926369e5 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -12,6 +12,7 @@ #include "local-fs-store.hh" #include "common-eval-args.hh" #include "legacy.hh" +#include "man-pages.hh" #include #include diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 99bb2c72601..3fb69a29d5e 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -12,6 +12,7 @@ #include "legacy.hh" #include "posix-source-accessor.hh" #include "path-with-outputs.hh" +#include "man-pages.hh" #ifndef _WIN32 // TODO implement on Windows or provide allowed-to-noop interface # include "local-store.hh" diff --git a/src/nix/hash.cc b/src/nix/hash.cc index eac421d1260..91bba47f42b 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -8,6 +8,7 @@ #include "git.hh" #include "posix-source-accessor.hh" #include "misc-store-flags.hh" +#include "man-pages.hh" using namespace nix; diff --git a/src/nix/man-pages.cc b/src/nix/man-pages.cc new file mode 100644 index 00000000000..a98a771cca1 --- /dev/null +++ b/src/nix/man-pages.cc @@ -0,0 +1,29 @@ +#include "man-pages.hh" +#include "file-system.hh" +#include "current-process.hh" +#include "environment-variables.hh" + +namespace nix { + +std::filesystem::path getNixManDir() +{ + return canonPath(NIX_MAN_DIR); +} + +void showManPage(const std::string & name) +{ + restoreProcessContext(); + setEnv("MANPATH", getNixManDir().c_str()); + execlp("man", "man", name.c_str(), nullptr); + if (errno == ENOENT) { + // Not SysError because we don't want to suffix the errno, aka No such file or directory. + throw Error( + "The '%1%' command was not found, but it is needed for '%2%' and some other '%3%' commands' help text. Perhaps you could install the '%1%' command?", + "man", + name.c_str(), + "nix-*"); + } + throw SysError("command 'man %1%' failed", name.c_str()); +} + +} diff --git a/src/nix/man-pages.hh b/src/nix/man-pages.hh new file mode 100644 index 00000000000..9ba035af816 --- /dev/null +++ b/src/nix/man-pages.hh @@ -0,0 +1,28 @@ +#pragma once +///@file + +#include +#include + +namespace nix { + +/** + * @brief Get path to the nix manual dir. + * + * Nix relies on the man pages being available at a NIX_MAN_DIR for + * displaying help messaged for legacy cli. + * + * NIX_MAN_DIR is a compile-time parameter, so man pages are unlikely to work + * for cases when the nix executable is installed out-of-store or as a static binary. + * + */ +std::filesystem::path getNixManDir(); + +/** + * Show the manual page for the specified program. + * + * @param name Name of the man item. + */ +void showManPage(const std::string & name); + +} diff --git a/src/nix/meson.build b/src/nix/meson.build index 2698cc873da..e8d74080385 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -90,6 +90,7 @@ nix_sources = [config_h] + files( 'ls.cc', 'main.cc', 'make-content-addressed.cc', + 'man-pages.cc', 'nar.cc', 'optimise-store.cc', 'path-from-hash-part.cc', @@ -182,6 +183,16 @@ if host_machine.system() != 'windows' ] endif +fs = import('fs') +prefix = get_option('prefix') + +mandir = get_option('mandir') +mandir = fs.is_absolute(mandir) ? mandir : prefix / mandir + +cpp_args= [ + '-DNIX_MAN_DIR="@0@"'.format(mandir) +] + include_dirs = [include_directories('.')] this_exe = executable( @@ -189,6 +200,7 @@ this_exe = executable( sources, dependencies : deps_private_subproject + deps_private + deps_other, include_directories : include_dirs, + cpp_args : cpp_args, link_args: linker_export_flags, install : true, ) diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index db7d9e4efe6..84c0224e223 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -12,6 +12,7 @@ #include "posix-source-accessor.hh" #include "misc-store-flags.hh" #include "terminal.hh" +#include "man-pages.hh" #include diff --git a/src/nix/unix/daemon.cc b/src/nix/unix/daemon.cc index 746963a0103..b4c7c10edb1 100644 --- a/src/nix/unix/daemon.cc +++ b/src/nix/unix/daemon.cc @@ -15,6 +15,7 @@ #include "finally.hh" #include "legacy.hh" #include "daemon.hh" +#include "man-pages.hh" #include #include From a004c84e85fa03137afeb08d8f93a6c0df7ed6bf Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman <145775305+xokdvium@users.noreply.github.com> Date: Tue, 18 Feb 2025 13:01:39 +0300 Subject: [PATCH 150/361] Don't override default man search paths By appending a colon to MANPATH NIX_MAN_DIR gets prepended to the final MANPATH before default search paths. This makes man still consider default search paths, but prefers NIX_MAN_DIR (if it exists). It still makes sense to point NIX_MAN_DIR to a correct location by moving man pages build from nix-manual.man to nix-cli.man, but this should fix most common use-cases where nix is installed globally. (cherry picked from commit 95f16a3275a3d23afe4f311cb793d7a5d47222e1) --- src/nix/man-pages.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/man-pages.cc b/src/nix/man-pages.cc index a98a771cca1..e9e89bb62a7 100644 --- a/src/nix/man-pages.cc +++ b/src/nix/man-pages.cc @@ -13,7 +13,7 @@ std::filesystem::path getNixManDir() void showManPage(const std::string & name) { restoreProcessContext(); - setEnv("MANPATH", getNixManDir().c_str()); + setEnv("MANPATH", (getNixManDir().string() + ":").c_str()); execlp("man", "man", name.c_str(), nullptr); if (errno == ENOENT) { // Not SysError because we don't want to suffix the errno, aka No such file or directory. From f02a7b880e32015dd165975a4c6c87beab2ee077 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 18 Feb 2025 11:56:19 +0100 Subject: [PATCH 151/361] startDaemon(): Detect if the daemon crashes before creating the socket This avoids timeouts like those seen in https://github.com/NixOS/nix/actions/runs/13376958708/job/37358120348?pr=6962. (cherry picked from commit 11c42cb2e1b5bb44719e40d9c17750fb8a99d750) --- tests/functional/common/functions.sh | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/tests/functional/common/functions.sh b/tests/functional/common/functions.sh index bf3dd2ca861..1b2ec8fe0e8 100644 --- a/tests/functional/common/functions.sh +++ b/tests/functional/common/functions.sh @@ -67,7 +67,7 @@ startDaemon() { die "startDaemon: not supported when testing on NixOS. Is it really needed? If so add conditionals; e.g. if ! isTestOnNixOS; then ..." fi - # Don’t start the daemon twice, as this would just make it loop indefinitely + # Don't start the daemon twice, as this would just make it loop indefinitely. if [[ "${_NIX_TEST_DAEMON_PID-}" != '' ]]; then return fi @@ -76,15 +76,19 @@ startDaemon() { PATH=$DAEMON_PATH nix --extra-experimental-features 'nix-command' daemon & _NIX_TEST_DAEMON_PID=$! export _NIX_TEST_DAEMON_PID - for ((i = 0; i < 300; i++)); do + for ((i = 0; i < 60; i++)); do if [[ -S $NIX_DAEMON_SOCKET_PATH ]]; then DAEMON_STARTED=1 break; fi + if ! kill -0 "$_NIX_TEST_DAEMON_PID"; then + echo "daemon died unexpectedly" >&2 + exit 1 + fi sleep 0.1 done if [[ -z ${DAEMON_STARTED+x} ]]; then - fail "Didn’t manage to start the daemon" + fail "Didn't manage to start the daemon" fi trap "killDaemon" EXIT # Save for if daemon is killed @@ -97,7 +101,7 @@ killDaemon() { die "killDaemon: not supported when testing on NixOS. Is it really needed? If so add conditionals; e.g. if ! isTestOnNixOS; then ..." fi - # Don’t fail trying to stop a non-existant daemon twice + # Don't fail trying to stop a non-existant daemon twice. if [[ "${_NIX_TEST_DAEMON_PID-}" == '' ]]; then return fi @@ -219,7 +223,7 @@ assertStderr() { needLocalStore() { if [[ "$NIX_REMOTE" == "daemon" ]]; then - skipTest "Can’t run through the daemon ($1)" + skipTest "Can't run through the daemon ($1)" fi } From cc3ad9bd3af7da510a1c41eabf6f761c713143fd Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 17 Feb 2025 17:17:37 +0100 Subject: [PATCH 152/361] Support libgit2 1.9.0 For when the overlay is used with nixos-unstable. 1.9.0 has our patches. (cherry picked from commit b0bbb1252a8ae8d925e2cb45d1c778b9c00587e2) --- packaging/dependencies.nix | 51 ++++++++++++++++++++------------------ 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index afbc31fc6df..b23c9cbcd1a 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -138,7 +138,8 @@ let enableParallelBuilding = true; }; in -scope: { +scope: +{ inherit stdenv; aws-sdk-cpp = @@ -174,6 +175,31 @@ scope: { installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase; }); + inherit resolvePath filesetToSource; + + mkMesonDerivation = mkPackageBuilder [ + miscGoodPractice + localSourceLayer + mesonLayer + ]; + mkMesonExecutable = mkPackageBuilder [ + miscGoodPractice + bsdNoLinkAsNeeded + localSourceLayer + mesonLayer + mesonBuildLayer + ]; + mkMesonLibrary = mkPackageBuilder [ + miscGoodPractice + bsdNoLinkAsNeeded + localSourceLayer + mesonLayer + mesonBuildLayer + mesonLibraryLayer + ]; +} +# libgit2: Nixpkgs 24.11 has < 1.9.0 +// lib.optionalAttrs (!lib.versionAtLeast pkgs.libgit2.version "1.9.0") { libgit2 = pkgs.libgit2.overrideAttrs (attrs: { cmakeFlags = attrs.cmakeFlags or [ ] ++ [ "-DUSE_SSH=exec" ]; nativeBuildInputs = @@ -203,27 +229,4 @@ scope: { ./patches/libgit2-packbuilder-callback-interruptible.patch ]; }); - - inherit resolvePath filesetToSource; - - mkMesonDerivation = mkPackageBuilder [ - miscGoodPractice - localSourceLayer - mesonLayer - ]; - mkMesonExecutable = mkPackageBuilder [ - miscGoodPractice - bsdNoLinkAsNeeded - localSourceLayer - mesonLayer - mesonBuildLayer - ]; - mkMesonLibrary = mkPackageBuilder [ - miscGoodPractice - bsdNoLinkAsNeeded - localSourceLayer - mesonLayer - mesonBuildLayer - mesonLibraryLayer - ]; } From 6c61d0ab8ccd47d52859de7af89c5abb6901a63c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 18 Feb 2025 23:52:26 +0100 Subject: [PATCH 153/361] Formatting --- flake.nix | 55 ++++++------- packaging/hydra.nix | 184 ++++++++++++++++++++++---------------------- 2 files changed, 120 insertions(+), 119 deletions(-) diff --git a/flake.nix b/flake.nix index 74b0a9809eb..29111b45382 100644 --- a/flake.nix +++ b/flake.nix @@ -34,7 +34,9 @@ officialRelease = true; - linux32BitSystems = [ /* "i686-linux" */ ]; + linux32BitSystems = [ + # "i686-linux" + ]; linux64BitSystems = [ "x86_64-linux" "aarch64-linux" @@ -345,7 +347,7 @@ # These attributes go right into `packages.`. "${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName}; #"${pkgName}-static" = nixpkgsFor.${system}.static.nixComponents.${pkgName}; - "${pkgName}-llvm" = nixpkgsFor.${system}.llvm.nixComponents.${pkgName}; + #"${pkgName}-llvm" = nixpkgsFor.${system}.llvm.nixComponents.${pkgName}; } // lib.optionalAttrs supportsCross ( flatMapAttrs (lib.genAttrs crossSystems (_: { })) ( @@ -402,35 +404,34 @@ } ) ) - // lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) ( - /* - prefixAttrs "static" ( - forAllStdenvs ( - stdenvName: - makeShell { - pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsStatic; - } + /* + // lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) ( + prefixAttrs "static" ( + forAllStdenvs ( + stdenvName: + makeShell { + pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsStatic; + } + ) ) - ) - // - */ - prefixAttrs "llvm" ( - forAllStdenvs ( - stdenvName: - makeShell { - pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsLLVM; - } + // prefixAttrs "llvm" ( + forAllStdenvs ( + stdenvName: + makeShell { + pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsLLVM; + } + ) ) - ) - // prefixAttrs "cross" ( - forAllCrossSystems ( - crossSystem: - makeShell { - pkgs = nixpkgsFor.${system}.cross.${crossSystem}; - } + // prefixAttrs "cross" ( + forAllCrossSystems ( + crossSystem: + makeShell { + pkgs = nixpkgsFor.${system}.cross.${crossSystem}; + } + ) ) ) - ) + */ // { default = self.devShells.${system}.native-stdenvPackages; } diff --git a/packaging/hydra.nix b/packaging/hydra.nix index be1b69668ee..debd98cf2aa 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -66,62 +66,62 @@ in )) [ "i686-linux" ]; /* - buildStatic = forAllPackages ( - pkgName: - lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.nixComponents.${pkgName}) - ); + buildStatic = forAllPackages ( + pkgName: + lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.nixComponents.${pkgName}) + ); - buildCross = forAllPackages ( - pkgName: - # Hack to avoid non-evaling package - ( - if pkgName == "nix-functional-tests" then - lib.flip builtins.removeAttrs [ "x86_64-w64-mingw32" ] - else - lib.id - ) + buildCross = forAllPackages ( + pkgName: + # Hack to avoid non-evaling package ( - forAllCrossSystems ( - crossSystem: - lib.genAttrs [ "x86_64-linux" ] ( - system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName} + if pkgName == "nix-functional-tests" then + lib.flip builtins.removeAttrs [ "x86_64-w64-mingw32" ] + else + lib.id + ) + ( + forAllCrossSystems ( + crossSystem: + lib.genAttrs [ "x86_64-linux" ] ( + system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName} + ) ) ) - ) - ); + ); - buildNoGc = - let - components = forAllSystems ( - system: - nixpkgsFor.${system}.native.nixComponents.overrideScope ( - self: super: { - nix-expr = super.nix-expr.override { enableGC = false; }; - } - ) - ); - in - forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); + buildNoGc = + let + components = forAllSystems ( + system: + nixpkgsFor.${system}.native.nixComponents.overrideScope ( + self: super: { + nix-expr = super.nix-expr.override { enableGC = false; }; + } + ) + ); + in + forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); - buildNoTests = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-cli); + buildNoTests = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-cli); - # Toggles some settings for better coverage. Windows needs these - # library combinations, and Debian build Nix with GNU readline too. - buildReadlineNoMarkdown = - let - components = forAllSystems ( - system: - nixpkgsFor.${system}.native.nixComponents.overrideScope ( - self: super: { - nix-cmd = super.nix-cmd.override { - enableMarkdown = false; - readlineFlavor = "readline"; - }; - } - ) - ); - in - forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); + # Toggles some settings for better coverage. Windows needs these + # library combinations, and Debian build Nix with GNU readline too. + buildReadlineNoMarkdown = + let + components = forAllSystems ( + system: + nixpkgsFor.${system}.native.nixComponents.overrideScope ( + self: super: { + nix-cmd = super.nix-cmd.override { + enableMarkdown = false; + readlineFlavor = "readline"; + }; + } + ) + ); + in + forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); */ # Perl bindings for various platforms. @@ -135,31 +135,31 @@ in ); /* - binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] ( - system: - forAllCrossSystems ( - crossSystem: - binaryTarball nixpkgsFor.${system}.cross.${crossSystem}.nix - nixpkgsFor.${system}.cross.${crossSystem} - ) - ); + binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] ( + system: + forAllCrossSystems ( + crossSystem: + binaryTarball nixpkgsFor.${system}.cross.${crossSystem}.nix + nixpkgsFor.${system}.cross.${crossSystem} + ) + ); - # The first half of the installation script. This is uploaded - # to https://nixos.org/nix/install. It downloads the binary - # tarball for the user's system and calls the second half of the - # installation script. - installerScript = installScriptFor [ - # Native - self.hydraJobs.binaryTarball."x86_64-linux" - self.hydraJobs.binaryTarball."i686-linux" - self.hydraJobs.binaryTarball."aarch64-linux" - self.hydraJobs.binaryTarball."x86_64-darwin" - self.hydraJobs.binaryTarball."aarch64-darwin" - # Cross - self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf" - self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf" - self.hydraJobs.binaryTarballCross."x86_64-linux"."riscv64-unknown-linux-gnu" - ]; + # The first half of the installation script. This is uploaded + # to https://nixos.org/nix/install. It downloads the binary + # tarball for the user's system and calls the second half of the + # installation script. + installerScript = installScriptFor [ + # Native + self.hydraJobs.binaryTarball."x86_64-linux" + self.hydraJobs.binaryTarball."i686-linux" + self.hydraJobs.binaryTarball."aarch64-linux" + self.hydraJobs.binaryTarball."x86_64-darwin" + self.hydraJobs.binaryTarball."aarch64-darwin" + # Cross + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf" + self.hydraJobs.binaryTarballCross."x86_64-linux"."riscv64-unknown-linux-gnu" + ]; */ installerScriptForGHA = forAllSystems ( @@ -232,25 +232,25 @@ in }; /* - installTests = forAllSystems ( - system: - let - pkgs = nixpkgsFor.${system}.native; - in - pkgs.runCommand "install-tests" { - againstSelf = testNixVersions pkgs pkgs.nix; - againstCurrentLatest = - # FIXME: temporarily disable this on macOS because of #3605. - if system == "x86_64-linux" then testNixVersions pkgs pkgs.nixVersions.latest else null; - # Disabled because the latest stable version doesn't handle - # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work - # againstLatestStable = testNixVersions pkgs pkgs.nixStable; - } "touch $out" - ); + installTests = forAllSystems ( + system: + let + pkgs = nixpkgsFor.${system}.native; + in + pkgs.runCommand "install-tests" { + againstSelf = testNixVersions pkgs pkgs.nix; + againstCurrentLatest = + # FIXME: temporarily disable this on macOS because of #3605. + if system == "x86_64-linux" then testNixVersions pkgs pkgs.nixVersions.latest else null; + # Disabled because the latest stable version doesn't handle + # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work + # againstLatestStable = testNixVersions pkgs pkgs.nixStable; + } "touch $out" + ); - installerTests = import ../tests/installer { - binaryTarballs = self.hydraJobs.binaryTarball; - inherit nixpkgsFor; - }; + installerTests = import ../tests/installer { + binaryTarballs = self.hydraJobs.binaryTarball; + inherit nixpkgsFor; + }; */ } From ed4aeb48750d63ec97518a14deda377b043082fd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 19 Feb 2025 00:24:42 +0100 Subject: [PATCH 154/361] Fix hydraJobs.tests.functional_user --- tests/functional/common/init.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/common/init.sh b/tests/functional/common/init.sh index 63f732d6a17..6e9bffec56d 100755 --- a/tests/functional/common/init.sh +++ b/tests/functional/common/init.sh @@ -12,6 +12,7 @@ if isTestOnNixOS; then ! test -e "$test_nix_conf" cat > "$test_nix_conf" < Date: Wed, 19 Feb 2025 00:25:04 +0100 Subject: [PATCH 155/361] Fix flake-regression dependency --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ad1ee531705..86a673b37f4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -75,7 +75,7 @@ jobs: ; flake_regressions: - needs: vm_tests + needs: build_x86_64-linux runs-on: UbuntuLatest32Cores128G steps: - name: Checkout nix From eabca75d0ce6de7f4e4bd37037c6f2056375e9e3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 19 Feb 2025 00:52:32 +0100 Subject: [PATCH 156/361] Test on x86_64-darwin --- .github/workflows/ci.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 86a673b37f4..fa5f934ff57 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -34,6 +34,11 @@ jobs: with: os: UbuntuLatest32Cores128GArm + build_x86_64-darwin: + uses: ./.github/workflows/build.yml + with: + os: macos-13 + build_aarch64-darwin: uses: ./.github/workflows/build.yml with: @@ -51,6 +56,12 @@ jobs: with: os: UbuntuLatest32Cores128GArm + test_x86_64-darwin: + uses: ./.github/workflows/test.yml + needs: build_aarch64-darwin + with: + os: macos-13 + test_aarch64-darwin: uses: ./.github/workflows/test.yml needs: build_aarch64-darwin From 21998464b7986f50bc20aa4a8ca3ab416b10d536 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 18 Feb 2025 11:22:00 +0100 Subject: [PATCH 157/361] Restore detailed Nix CLI version ... as intended. Requirements: - don't build fresh libraries for each git commit - have git commit in the CLI Bug: - echo ${version} went into the wrong file => use the fact that it's a symlink, not just for reading but also for writing. (cherry picked from commit bba4e6b061f53cbc77d47408468f9bc0f534281b) --- src/libstore/globals.cc | 2 +- src/libstore/globals.hh | 10 +++++++++- src/nix/main.cc | 3 +++ src/nix/meson.build | 3 +++ src/nix/package.nix | 6 +++--- 5 files changed, 19 insertions(+), 5 deletions(-) diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index e908fc81f17..d7c000dfab7 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -242,7 +242,7 @@ Path Settings::getDefaultSSLCertFile() return ""; } -const std::string nixVersion = PACKAGE_VERSION; +std::string nixVersion = PACKAGE_VERSION; NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, { {SandboxMode::smEnabled, true}, diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 6b9a87ce36e..1682d572c81 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -1248,7 +1248,15 @@ void loadConfFile(AbstractConfig & config); // Used by the Settings constructor std::vector getUserConfigFiles(); -extern const std::string nixVersion; +/** + * The version of Nix itself. + * + * This is not `const`, so that the Nix CLI can provide a more detailed version + * number including the git revision, without having to "re-compile" the entire + * set of Nix libraries to include that version, even when those libraries are + * not affected by the change. + */ +extern std::string nixVersion; /** * @param loadConfig Whether to load configuration from `nix.conf`, `NIX_CONFIG`, etc. May be disabled for unit tests. diff --git a/src/nix/main.cc b/src/nix/main.cc index b0e26e093f1..3db17ef6932 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -557,6 +557,9 @@ void mainWrapped(int argc, char * * argv) int main(int argc, char * * argv) { + // The CLI has a more detailed version than the libraries; see nixVersion. + nix::nixVersion = NIX_CLI_VERSION; + // Increase the default stack size for the evaluator and for // libstdc++'s std::regex. nix::setStackSize(64 * 1024 * 1024); diff --git a/src/nix/meson.build b/src/nix/meson.build index e8d74080385..1ad3d5b5a8c 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -35,6 +35,9 @@ subdir('nix-meson-build-support/windows-version') configdata = configuration_data() +# The CLI has a more detailed version string than the libraries; see `nixVersion` +configdata.set_quoted('NIX_CLI_VERSION', meson.project_version()) + fs = import('fs') bindir = get_option('bindir') diff --git a/src/nix/package.nix b/src/nix/package.nix index 6e59adc3800..bb90be1eff2 100644 --- a/src/nix/package.nix +++ b/src/nix/package.nix @@ -92,11 +92,11 @@ mkMesonExecutable (finalAttrs: { ]; preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. + # Update the repo-global .version file. + # Symlink ./.version points there, but by default only workDir is writable. '' chmod u+w ./.version - echo ${version} > ../../../.version + echo ${version} > ./.version ''; mesonFlags = [ From b175e5bb6dcd945316cbab531a0d97574ad3f0b1 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 18 Feb 2025 11:41:35 +0100 Subject: [PATCH 158/361] Write just ./.version on all components This way it's easier to get right. See previous commit. (cherry picked from commit 3556f6bf4cd6aa7ffea760c03b8e91ddbe3fcde8) --- packaging/dependencies.nix | 15 +++++++++++++++ src/libcmd/package.nix | 8 -------- src/libexpr-c/package.nix | 8 -------- src/libexpr-test-support/package.nix | 8 -------- src/libexpr-tests/package.nix | 8 -------- src/libexpr/package.nix | 8 -------- src/libfetchers-tests/package.nix | 8 -------- src/libfetchers/package.nix | 8 -------- src/libflake-c/package.nix | 8 -------- src/libflake-tests/package.nix | 8 -------- src/libflake/package.nix | 8 -------- src/libmain-c/package.nix | 8 -------- src/libmain/package.nix | 8 -------- src/libstore-c/package.nix | 8 -------- src/libstore-test-support/package.nix | 8 -------- src/libstore-tests/package.nix | 8 -------- src/libstore/package.nix | 8 -------- src/libutil-c/package.nix | 8 -------- src/libutil-test-support/package.nix | 8 -------- src/libutil-tests/package.nix | 8 -------- src/libutil/package.nix | 11 ----------- src/nix/package.nix | 8 -------- tests/functional/package.nix | 8 +------- 23 files changed, 16 insertions(+), 178 deletions(-) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index b23c9cbcd1a..20992555c17 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -42,6 +42,18 @@ let mkPackageBuilder = exts: userFn: stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn); + setVersionLayer = finalAttrs: prevAttrs: { + preConfigure = + prevAttrs.prevAttrs or "" + + + # Update the repo-global .version file. + # Symlink ./.version points there, but by default only workDir is writable. + '' + chmod u+w ./.version + echo ${finalAttrs.version} > ./.version + ''; + }; + localSourceLayer = finalAttrs: prevAttrs: let @@ -180,12 +192,14 @@ scope: mkMesonDerivation = mkPackageBuilder [ miscGoodPractice localSourceLayer + setVersionLayer mesonLayer ]; mkMesonExecutable = mkPackageBuilder [ miscGoodPractice bsdNoLinkAsNeeded localSourceLayer + setVersionLayer mesonLayer mesonBuildLayer ]; @@ -194,6 +208,7 @@ scope: bsdNoLinkAsNeeded localSourceLayer mesonLayer + setVersionLayer mesonBuildLayer mesonLibraryLayer ]; diff --git a/src/libcmd/package.nix b/src/libcmd/package.nix index d155d9f1e62..d459d1c20fb 100644 --- a/src/libcmd/package.nix +++ b/src/libcmd/package.nix @@ -64,14 +64,6 @@ mkMesonLibrary (finalAttrs: { nlohmann_json ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ (lib.mesonEnable "markdown" enableMarkdown) (lib.mesonOption "readline-flavor" readlineFlavor) diff --git a/src/libexpr-c/package.nix b/src/libexpr-c/package.nix index ad1ea371c2d..694fbc1fe78 100644 --- a/src/libexpr-c/package.nix +++ b/src/libexpr-c/package.nix @@ -36,14 +36,6 @@ mkMesonLibrary (finalAttrs: { nix-expr ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libexpr-test-support/package.nix b/src/libexpr-test-support/package.nix index 5628d606a45..44b0ff38631 100644 --- a/src/libexpr-test-support/package.nix +++ b/src/libexpr-test-support/package.nix @@ -40,14 +40,6 @@ mkMesonLibrary (finalAttrs: { rapidcheck ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libexpr-tests/package.nix b/src/libexpr-tests/package.nix index bb5acb7c873..51d52e935bf 100644 --- a/src/libexpr-tests/package.nix +++ b/src/libexpr-tests/package.nix @@ -46,14 +46,6 @@ mkMesonExecutable (finalAttrs: { gtest ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libexpr/package.nix b/src/libexpr/package.nix index afd01c3846e..533dae9f253 100644 --- a/src/libexpr/package.nix +++ b/src/libexpr/package.nix @@ -77,14 +77,6 @@ mkMesonLibrary (finalAttrs: { nlohmann_json ] ++ lib.optional enableGC boehmgc; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ (lib.mesonEnable "gc" enableGC) ]; diff --git a/src/libfetchers-tests/package.nix b/src/libfetchers-tests/package.nix index f2680e9b3c1..1e379fc5ade 100644 --- a/src/libfetchers-tests/package.nix +++ b/src/libfetchers-tests/package.nix @@ -44,14 +44,6 @@ mkMesonExecutable (finalAttrs: { gtest ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libfetchers/package.nix b/src/libfetchers/package.nix index b0aecd04979..3f52e987800 100644 --- a/src/libfetchers/package.nix +++ b/src/libfetchers/package.nix @@ -41,14 +41,6 @@ mkMesonLibrary (finalAttrs: { nlohmann_json ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libflake-c/package.nix b/src/libflake-c/package.nix index f0615a42798..1149508523e 100644 --- a/src/libflake-c/package.nix +++ b/src/libflake-c/package.nix @@ -38,14 +38,6 @@ mkMesonLibrary (finalAttrs: { nix-flake ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libflake-tests/package.nix b/src/libflake-tests/package.nix index f9d9b0bc0c6..714f3791ad9 100644 --- a/src/libflake-tests/package.nix +++ b/src/libflake-tests/package.nix @@ -46,14 +46,6 @@ mkMesonExecutable (finalAttrs: { gtest ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libflake/package.nix b/src/libflake/package.nix index ebd38e140d3..5240ce5e396 100644 --- a/src/libflake/package.nix +++ b/src/libflake/package.nix @@ -40,14 +40,6 @@ mkMesonLibrary (finalAttrs: { nlohmann_json ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libmain-c/package.nix b/src/libmain-c/package.nix index cf710e03b0d..f019a917d36 100644 --- a/src/libmain-c/package.nix +++ b/src/libmain-c/package.nix @@ -40,14 +40,6 @@ mkMesonLibrary (finalAttrs: { nix-main ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libmain/package.nix b/src/libmain/package.nix index 046b505dfd4..c03697c48da 100644 --- a/src/libmain/package.nix +++ b/src/libmain/package.nix @@ -37,14 +37,6 @@ mkMesonLibrary (finalAttrs: { openssl ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libstore-c/package.nix b/src/libstore-c/package.nix index 89abeaab870..fde17c78e01 100644 --- a/src/libstore-c/package.nix +++ b/src/libstore-c/package.nix @@ -36,14 +36,6 @@ mkMesonLibrary (finalAttrs: { nix-store ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libstore-test-support/package.nix b/src/libstore-test-support/package.nix index 7cc29795c19..ccac25ee16a 100644 --- a/src/libstore-test-support/package.nix +++ b/src/libstore-test-support/package.nix @@ -40,14 +40,6 @@ mkMesonLibrary (finalAttrs: { rapidcheck ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index 670386c4a6f..b39ee7fa73c 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -52,14 +52,6 @@ mkMesonExecutable (finalAttrs: { nix-store-test-support ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libstore/package.nix b/src/libstore/package.nix index c982b44f0b7..31867d331b9 100644 --- a/src/libstore/package.nix +++ b/src/libstore/package.nix @@ -69,14 +69,6 @@ mkMesonLibrary (finalAttrs: { nlohmann_json ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ (lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux) diff --git a/src/libutil-c/package.nix b/src/libutil-c/package.nix index 72f57d6f9c6..f26f57775d4 100644 --- a/src/libutil-c/package.nix +++ b/src/libutil-c/package.nix @@ -34,14 +34,6 @@ mkMesonLibrary (finalAttrs: { nix-util ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libutil-test-support/package.nix b/src/libutil-test-support/package.nix index 33cd5217def..fafd47c86c5 100644 --- a/src/libutil-test-support/package.nix +++ b/src/libutil-test-support/package.nix @@ -38,14 +38,6 @@ mkMesonLibrary (finalAttrs: { rapidcheck ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libutil-tests/package.nix b/src/libutil-tests/package.nix index d89c544539e..c06de6894af 100644 --- a/src/libutil-tests/package.nix +++ b/src/libutil-tests/package.nix @@ -45,14 +45,6 @@ mkMesonExecutable (finalAttrs: { gtest ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ ]; diff --git a/src/libutil/package.nix b/src/libutil/package.nix index 586119a6e5d..47dcb54a26f 100644 --- a/src/libutil/package.nix +++ b/src/libutil/package.nix @@ -52,17 +52,6 @@ mkMesonLibrary (finalAttrs: { nlohmann_json ]; - preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - # - # TODO: change release process to add `pre` in `.version`, remove it - # before tagging, and restore after. - '' - chmod u+w ./.version - echo ${version} > ../../.version - ''; - mesonFlags = [ (lib.mesonEnable "cpuid" stdenv.hostPlatform.isx86_64) ]; diff --git a/src/nix/package.nix b/src/nix/package.nix index bb90be1eff2..40a28043785 100644 --- a/src/nix/package.nix +++ b/src/nix/package.nix @@ -91,14 +91,6 @@ mkMesonExecutable (finalAttrs: { nix-cmd ]; - preConfigure = - # Update the repo-global .version file. - # Symlink ./.version points there, but by default only workDir is writable. - '' - chmod u+w ./.version - echo ${version} > ./.version - ''; - mesonFlags = [ ]; diff --git a/tests/functional/package.nix b/tests/functional/package.nix index 74c034196fd..64ffa540a60 100644 --- a/tests/functional/package.nix +++ b/tests/functional/package.nix @@ -75,16 +75,10 @@ mkMesonDerivation ( ]; preConfigure = - # "Inline" .version so it's not a symlink, and includes the suffix. - # Do the meson utils, without modification. - '' - chmod u+w ./.version - echo ${version} > ../../../.version - '' # TEMP hack for Meson before make is gone, where # `src/nix-functional-tests` is during the transition a symlink and # not the actual directory directory. - + '' + '' cd $(readlink -e $PWD) echo $PWD | grep tests/functional ''; From fd062585acde7178d9df9bc3121691eafe3dffa6 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 18 Feb 2025 15:55:19 +0100 Subject: [PATCH 159/361] tests: Fix installTests (cherry picked from commit 2b5365bcf73bd7584af79b1c5afc84935a2df536) --- tests/functional/misc.sh | 2 +- tests/functional/package.nix | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/functional/misc.sh b/tests/functional/misc.sh index 7d63756b7f4..cb4d4139f4c 100755 --- a/tests/functional/misc.sh +++ b/tests/functional/misc.sh @@ -11,7 +11,7 @@ source common.sh #nix-hash --help | grepQuiet base32 # Can we ask for the version number? -nix-env --version | grep "$version" +nix-env --version | grep -F "${_NIX_TEST_CLIENT_VERSION:-$version}" nix_env=$(type -P nix-env) (PATH=""; ! $nix_env --help 2>&1 ) | grepQuiet -F "The 'man' command was not found, but it is needed for 'nix-env' and some other 'nix-*' commands' help text. Perhaps you could install the 'man' command?" diff --git a/tests/functional/package.nix b/tests/functional/package.nix index 64ffa540a60..a84ad1791f7 100644 --- a/tests/functional/package.nix +++ b/tests/functional/package.nix @@ -99,6 +99,8 @@ mkMesonDerivation ( } // lib.optionalAttrs (test-daemon != null) { + # TODO rename to _NIX_TEST_DAEMON_PACKAGE NIX_DAEMON_PACKAGE = test-daemon; + _NIX_TEST_CLIENT_VERSION = nix-cli.version; } ) From 540e8cb90809863e32a4fe588e49be388f4a67e4 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 18 Feb 2025 12:19:48 +0100 Subject: [PATCH 160/361] packaging: Move layers from dependencies to components This makes it easier to implement batch overriding for the components. (cherry picked from commit cca01407a7e661e589de165d9a873210ce91353f) --- flake.nix | 2 + packaging/components.nix | 160 ++++++++++++++++++++++++++++++++++++- packaging/dependencies.nix | 146 --------------------------------- 3 files changed, 161 insertions(+), 147 deletions(-) diff --git a/flake.nix b/flake.nix index eafb6535302..7158f1ac81b 100644 --- a/flake.nix +++ b/flake.nix @@ -165,6 +165,8 @@ f = import ./packaging/components.nix { inherit (final) lib; inherit officialRelease; + inherit stdenv; + pkgs = final; src = self; }; }; diff --git a/packaging/components.nix b/packaging/components.nix index 07bb209cd4f..5c03408dd82 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -1,13 +1,22 @@ { lib, + pkgs, src, + stdenv, officialRelease, }: scope: let - inherit (scope) callPackage; + inherit (scope) + callPackage + ; + inherit (pkgs.buildPackages) + meson + ninja + pkg-config + ; baseVersion = lib.fileContents ../.version; @@ -20,6 +29,129 @@ let }_${src.shortRev or "dirty"}"; fineVersion = baseVersion + fineVersionSuffix; + + root = ../.; + + # Nixpkgs implements this by returning a subpath into the fetched Nix sources. + resolvePath = p: p; + + # Indirection for Nixpkgs to override when package.nix files are vendored + filesetToSource = lib.fileset.toSource; + + /** + Given a set of layers, create a mkDerivation-like function + */ + mkPackageBuilder = + exts: userFn: stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn); + + setVersionLayer = finalAttrs: prevAttrs: { + preConfigure = + prevAttrs.prevAttrs or "" + + + # Update the repo-global .version file. + # Symlink ./.version points there, but by default only workDir is writable. + '' + chmod u+w ./.version + echo ${finalAttrs.version} > ./.version + ''; + }; + + localSourceLayer = + finalAttrs: prevAttrs: + let + workDirPath = + # Ideally we'd pick finalAttrs.workDir, but for now `mkDerivation` has + # the requirement that everything except passthru and meta must be + # serialized by mkDerivation, which doesn't work for this. + prevAttrs.workDir; + + workDirSubpath = lib.path.removePrefix root workDirPath; + sources = + assert prevAttrs.fileset._type == "fileset"; + prevAttrs.fileset; + src = lib.fileset.toSource { + fileset = sources; + inherit root; + }; + + in + { + sourceRoot = "${src.name}/" + workDirSubpath; + inherit src; + + # Clear what `derivation` can't/shouldn't serialize; see prevAttrs.workDir. + fileset = null; + workDir = null; + }; + + mesonLayer = finalAttrs: prevAttrs: { + # NOTE: + # As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26, + # `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default. + # More on build types here: https://mesonbuild.com/Builtin-options.html#details-for-buildtype. + mesonBuildType = "release"; + # NOTE: + # Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the + # guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10. + # For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable. + preConfigure = + prevAttrs.preConfigure or "" + + + lib.optionalString + ( + !stdenv.hostPlatform.isWindows + # build failure + && !stdenv.hostPlatform.isStatic + # LTO breaks exception handling on x86-64-darwin. + && stdenv.system != "x86_64-darwin" + ) + '' + case "$mesonBuildType" in + release|minsize) appendToVar mesonFlags "-Db_lto=true" ;; + *) appendToVar mesonFlags "-Db_lto=false" ;; + esac + ''; + nativeBuildInputs = [ + meson + ninja + ] ++ prevAttrs.nativeBuildInputs or [ ]; + mesonCheckFlags = prevAttrs.mesonCheckFlags or [ ] ++ [ + "--print-errorlogs" + ]; + }; + + mesonBuildLayer = finalAttrs: prevAttrs: { + nativeBuildInputs = prevAttrs.nativeBuildInputs or [ ] ++ [ + pkg-config + ]; + separateDebugInfo = !stdenv.hostPlatform.isStatic; + hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; + env = + prevAttrs.env or { } + // lib.optionalAttrs ( + stdenv.isLinux + && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux") + && !(stdenv.hostPlatform.useLLVM or false) + ) { LDFLAGS = "-fuse-ld=gold"; }; + }; + + mesonLibraryLayer = finalAttrs: prevAttrs: { + outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ]; + }; + + # Work around weird `--as-needed` linker behavior with BSD, see + # https://github.com/mesonbuild/meson/issues/3593 + bsdNoLinkAsNeeded = + finalAttrs: prevAttrs: + lib.optionalAttrs stdenv.hostPlatform.isBSD { + mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or [ ]; + }; + + miscGoodPractice = finalAttrs: prevAttrs: { + strictDeps = prevAttrs.strictDeps or true; + enableParallelBuilding = true; + }; + in # This becomes the pkgs.nixComponents attribute set @@ -27,6 +159,32 @@ in version = baseVersion + versionSuffix; inherit versionSuffix; + inherit resolvePath filesetToSource; + + mkMesonDerivation = mkPackageBuilder [ + miscGoodPractice + localSourceLayer + setVersionLayer + mesonLayer + ]; + mkMesonExecutable = mkPackageBuilder [ + miscGoodPractice + bsdNoLinkAsNeeded + localSourceLayer + setVersionLayer + mesonLayer + mesonBuildLayer + ]; + mkMesonLibrary = mkPackageBuilder [ + miscGoodPractice + bsdNoLinkAsNeeded + localSourceLayer + mesonLayer + setVersionLayer + mesonBuildLayer + mesonLibraryLayer + ]; + nix-util = callPackage ../src/libutil/package.nix { }; nix-util-c = callPackage ../src/libutil-c/package.nix { }; nix-util-test-support = callPackage ../src/libutil-test-support/package.nix { }; diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 20992555c17..2060672f795 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -17,8 +17,6 @@ in let inherit (pkgs) lib; - root = ../.; - stdenv = if prevStdenv.isDarwin && prevStdenv.isx86_64 then darwinStdenv else prevStdenv; # Fix the following error with the default x86_64-darwin SDK: @@ -30,125 +28,6 @@ let # all the way back to 10.6. darwinStdenv = pkgs.overrideSDK prevStdenv { darwinMinVersion = "10.13"; }; - # Nixpkgs implements this by returning a subpath into the fetched Nix sources. - resolvePath = p: p; - - # Indirection for Nixpkgs to override when package.nix files are vendored - filesetToSource = lib.fileset.toSource; - - /** - Given a set of layers, create a mkDerivation-like function - */ - mkPackageBuilder = - exts: userFn: stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn); - - setVersionLayer = finalAttrs: prevAttrs: { - preConfigure = - prevAttrs.prevAttrs or "" - + - # Update the repo-global .version file. - # Symlink ./.version points there, but by default only workDir is writable. - '' - chmod u+w ./.version - echo ${finalAttrs.version} > ./.version - ''; - }; - - localSourceLayer = - finalAttrs: prevAttrs: - let - workDirPath = - # Ideally we'd pick finalAttrs.workDir, but for now `mkDerivation` has - # the requirement that everything except passthru and meta must be - # serialized by mkDerivation, which doesn't work for this. - prevAttrs.workDir; - - workDirSubpath = lib.path.removePrefix root workDirPath; - sources = - assert prevAttrs.fileset._type == "fileset"; - prevAttrs.fileset; - src = lib.fileset.toSource { - fileset = sources; - inherit root; - }; - - in - { - sourceRoot = "${src.name}/" + workDirSubpath; - inherit src; - - # Clear what `derivation` can't/shouldn't serialize; see prevAttrs.workDir. - fileset = null; - workDir = null; - }; - - mesonLayer = finalAttrs: prevAttrs: { - # NOTE: - # As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26, - # `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default. - # More on build types here: https://mesonbuild.com/Builtin-options.html#details-for-buildtype. - mesonBuildType = "release"; - # NOTE: - # Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the - # guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10. - # For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable. - preConfigure = - prevAttrs.preConfigure or "" - + - lib.optionalString - ( - !stdenv.hostPlatform.isWindows - # build failure - && !stdenv.hostPlatform.isStatic - # LTO breaks exception handling on x86-64-darwin. - && stdenv.system != "x86_64-darwin" - ) - '' - case "$mesonBuildType" in - release|minsize) appendToVar mesonFlags "-Db_lto=true" ;; - *) appendToVar mesonFlags "-Db_lto=false" ;; - esac - ''; - nativeBuildInputs = [ - pkgs.buildPackages.meson - pkgs.buildPackages.ninja - ] ++ prevAttrs.nativeBuildInputs or [ ]; - mesonCheckFlags = prevAttrs.mesonCheckFlags or [ ] ++ [ - "--print-errorlogs" - ]; - }; - - mesonBuildLayer = finalAttrs: prevAttrs: { - nativeBuildInputs = prevAttrs.nativeBuildInputs or [ ] ++ [ - pkgs.buildPackages.pkg-config - ]; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - env = - prevAttrs.env or { } - // lib.optionalAttrs ( - stdenv.isLinux - && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux") - && !(stdenv.hostPlatform.useLLVM or false) - ) { LDFLAGS = "-fuse-ld=gold"; }; - }; - - mesonLibraryLayer = finalAttrs: prevAttrs: { - outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ]; - }; - - # Work around weird `--as-needed` linker behavior with BSD, see - # https://github.com/mesonbuild/meson/issues/3593 - bsdNoLinkAsNeeded = - finalAttrs: prevAttrs: - lib.optionalAttrs stdenv.hostPlatform.isBSD { - mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or [ ]; - }; - - miscGoodPractice = finalAttrs: prevAttrs: { - strictDeps = prevAttrs.strictDeps or true; - enableParallelBuilding = true; - }; in scope: { @@ -187,31 +66,6 @@ scope: installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase; }); - inherit resolvePath filesetToSource; - - mkMesonDerivation = mkPackageBuilder [ - miscGoodPractice - localSourceLayer - setVersionLayer - mesonLayer - ]; - mkMesonExecutable = mkPackageBuilder [ - miscGoodPractice - bsdNoLinkAsNeeded - localSourceLayer - setVersionLayer - mesonLayer - mesonBuildLayer - ]; - mkMesonLibrary = mkPackageBuilder [ - miscGoodPractice - bsdNoLinkAsNeeded - localSourceLayer - mesonLayer - setVersionLayer - mesonBuildLayer - mesonLibraryLayer - ]; } # libgit2: Nixpkgs 24.11 has < 1.9.0 // lib.optionalAttrs (!lib.versionAtLeast pkgs.libgit2.version "1.9.0") { From 44fb6479860f831a0d34540d3b4bae335cb39a59 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 18 Feb 2025 13:35:12 +0100 Subject: [PATCH 161/361] packaging: Add overrideAllMesonComponents (cherry picked from commit f31d86284f1027edf173d92967b609de67e1bb2e) --- packaging/components.nix | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/packaging/components.nix b/packaging/components.nix index 5c03408dd82..546d5829dac 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -161,11 +161,28 @@ in inherit resolvePath filesetToSource; + /** + A user-provided extension function to apply to each component derivation. + */ + mesonComponentOverrides = finalAttrs: prevAttrs: { }; + + /** + Apply an extension function (i.e. overlay-shaped) to all component derivations. + */ + overrideAllMesonComponents = + f: + scope.overrideScope ( + finalScope: prevScope: { + mesonComponentOverrides = lib.composeExtensions scope.mesonComponentOverrides f; + } + ); + mkMesonDerivation = mkPackageBuilder [ miscGoodPractice localSourceLayer setVersionLayer mesonLayer + scope.mesonComponentOverrides ]; mkMesonExecutable = mkPackageBuilder [ miscGoodPractice @@ -174,6 +191,7 @@ in setVersionLayer mesonLayer mesonBuildLayer + scope.mesonComponentOverrides ]; mkMesonLibrary = mkPackageBuilder [ miscGoodPractice @@ -183,6 +201,7 @@ in setVersionLayer mesonBuildLayer mesonLibraryLayer + scope.mesonComponentOverrides ]; nix-util = callPackage ../src/libutil/package.nix { }; @@ -224,5 +243,18 @@ in nix-perl-bindings = callPackage ../src/perl/package.nix { }; - nix-everything = callPackage ../packaging/everything.nix { }; + nix-everything = callPackage ../packaging/everything.nix { } // { + # Note: no `passthru.overrideAllMesonComponents` + # This would propagate into `nix.overrideAttrs f`, but then discard + # `f` when `.overrideAllMesonComponents` is used. + # Both "methods" should be views on the same fixpoint overriding mechanism + # for that to work. For now, we intentionally don't support the broken + # two-fixpoint solution. + /** + Apply an extension function (i.e. overlay-shaped) to all component derivations, and return the nix package. + */ + overrideAllMesonComponents = f: (scope.overrideAllMesonComponents f).nix-everything; + + scope = scope; + }; } From cc3fb612496a08c35fd8daf31101e7c2279ca032 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 18 Feb 2025 15:31:55 +0100 Subject: [PATCH 162/361] packaging: Add source overriding "methods" (cherry picked from commit 48fb6fdde955afd1078ea7bb7f0e8c73e0185f8f) --- flake.nix | 21 +++++++ packaging/components.nix | 123 ++++++++++++++++++++++++++++++++++++--- 2 files changed, 136 insertions(+), 8 deletions(-) diff --git a/flake.nix b/flake.nix index 7158f1ac81b..a92fd74fc58 100644 --- a/flake.nix +++ b/flake.nix @@ -237,6 +237,27 @@ LANG=C.UTF-8 ${pkgs.changelog-d}/bin/changelog-d ${./doc/manual/rl-next} >$out ''; repl-completion = nixpkgsFor.${system}.native.callPackage ./tests/repl-completion.nix { }; + + /** + Checks for our packaging expressions. + This shouldn't build anything significant; just check that things + (including derivations) are _set up_ correctly. + */ + packaging-overriding = + let + pkgs = nixpkgsFor.${system}.native; + nix = self.packages.${system}.nix; + in + assert (nix.appendPatches [ pkgs.emptyFile ]).libs.nix-util.src.patches == [ pkgs.emptyFile ]; + # If this fails, something might be wrong with how we've wired the scope, + # or something could be broken in Nixpkgs. + pkgs.testers.testEqualContents { + assertion = "trivial patch does not change source contents"; + expected = "${./.}"; + actual = + # Same for all components; nix-util is an arbitrary pick + (nix.appendPatches [ pkgs.emptyFile ]).libs.nix-util.src; + }; } // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) { dockerImage = self.hydraJobs.dockerImage.${system}; diff --git a/packaging/components.nix b/packaging/components.nix index 546d5829dac..de02f052bbe 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -32,9 +32,6 @@ let root = ../.; - # Nixpkgs implements this by returning a subpath into the fetched Nix sources. - resolvePath = p: p; - # Indirection for Nixpkgs to override when package.nix files are vendored filesetToSource = lib.fileset.toSource; @@ -84,6 +81,31 @@ let workDir = null; }; + resolveRelPath = p: lib.path.removePrefix root p; + + makeFetchedSourceLayer = + finalScope: finalAttrs: prevAttrs: + let + workDirPath = + # Ideally we'd pick finalAttrs.workDir, but for now `mkDerivation` has + # the requirement that everything except passthru and meta must be + # serialized by mkDerivation, which doesn't work for this. + prevAttrs.workDir; + + workDirSubpath = resolveRelPath workDirPath; + # sources = assert prevAttrs.fileset._type == "fileset"; prevAttrs.fileset; + # src = lib.fileset.toSource { fileset = sources; inherit root; }; + + in + { + sourceRoot = "${finalScope.patchedSrc.name}/" + workDirSubpath; + src = finalScope.patchedSrc; + + # Clear what `derivation` can't/shouldn't serialize; see prevAttrs.workDir. + fileset = null; + workDir = null; + }; + mesonLayer = finalAttrs: prevAttrs: { # NOTE: # As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26, @@ -152,6 +174,17 @@ let enableParallelBuilding = true; }; + /** + Append patches to the source layer. + */ + appendPatches = + scope: patches: + scope.overrideScope ( + finalScope: prevScope: { + patches = prevScope.patches ++ patches; + } + ); + in # This becomes the pkgs.nixComponents attribute set @@ -159,13 +192,24 @@ in version = baseVersion + versionSuffix; inherit versionSuffix; - inherit resolvePath filesetToSource; + inherit filesetToSource; /** A user-provided extension function to apply to each component derivation. */ mesonComponentOverrides = finalAttrs: prevAttrs: { }; + /** + An overridable derivation layer for handling the sources. + */ + sourceLayer = localSourceLayer; + + /** + Resolve a path value to either itself or a path in the `src`, depending + whether `overrideSource` was called. + */ + resolvePath = p: p; + /** Apply an extension function (i.e. overlay-shaped) to all component derivations. */ @@ -177,9 +221,57 @@ in } ); + /** + Provide an alternate source. This allows the expressions to be vendored without copying the sources, + but it does make the build non-granular; all components will use a complete source. + + Packaging expressions will be ignored. + */ + overrideSource = + src: + scope.overrideScope ( + finalScope: prevScope: { + sourceLayer = makeFetchedSourceLayer finalScope; + /** + Unpatched source for the build of Nix. Packaging expressions will be ignored. + */ + src = src; + /** + Patches for the whole Nix source. Changes to packaging expressions will be ignored. + */ + patches = [ ]; + /** + Fetched and patched source to be used in component derivations. + */ + patchedSrc = + if finalScope.patches == [ ] then + src + else + pkgs.buildPackages.srcOnly ( + pkgs.buildPackages.stdenvNoCC.mkDerivation { + name = "${finalScope.src.name or "nix-source"}-patched"; + inherit (finalScope) src patches; + } + ); + resolvePath = p: finalScope.patchedSrc + "/${resolveRelPath p}"; + appendPatches = appendPatches finalScope; + } + ); + + /** + Append patches to be applied to the whole Nix source. + This affects all components. + + Changes to the packaging expressions will be ignored. + */ + appendPatches = + patches: + # switch to "fetched" source first, so that patches apply to the whole tree. + (scope.overrideSource "${./..}").appendPatches patches; + mkMesonDerivation = mkPackageBuilder [ miscGoodPractice - localSourceLayer + scope.sourceLayer setVersionLayer mesonLayer scope.mesonComponentOverrides @@ -187,7 +279,7 @@ in mkMesonExecutable = mkPackageBuilder [ miscGoodPractice bsdNoLinkAsNeeded - localSourceLayer + scope.sourceLayer setVersionLayer mesonLayer mesonBuildLayer @@ -196,7 +288,7 @@ in mkMesonLibrary = mkPackageBuilder [ miscGoodPractice bsdNoLinkAsNeeded - localSourceLayer + scope.sourceLayer mesonLayer setVersionLayer mesonBuildLayer @@ -255,6 +347,21 @@ in */ overrideAllMesonComponents = f: (scope.overrideAllMesonComponents f).nix-everything; - scope = scope; + /** + Append patches to be applied to the whole Nix source. + This affects all components. + + Changes to the packaging expressions will be ignored. + */ + appendPatches = ps: (scope.appendPatches ps).nix-everything; + + /** + Provide an alternate source. This allows the expressions to be vendored without copying the sources, + but it does make the build non-granular; all components will use a complete source. + + Packaging expressions will be ignored. + */ + overrideSource = src: (scope.overrideSource src).nix-everything; + }; } From a4641be4e92737fe213c166705949f570f0cc64c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 18 Feb 2025 17:13:57 +0100 Subject: [PATCH 163/361] test: Ignore packaging-overriding check on darwin for now (cherry picked from commit 03efba30dacc79e64f4107206b13231473bf2670) --- flake.nix | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/flake.nix b/flake.nix index a92fd74fc58..f5c7780d590 100644 --- a/flake.nix +++ b/flake.nix @@ -249,15 +249,18 @@ nix = self.packages.${system}.nix; in assert (nix.appendPatches [ pkgs.emptyFile ]).libs.nix-util.src.patches == [ pkgs.emptyFile ]; - # If this fails, something might be wrong with how we've wired the scope, - # or something could be broken in Nixpkgs. - pkgs.testers.testEqualContents { - assertion = "trivial patch does not change source contents"; - expected = "${./.}"; - actual = - # Same for all components; nix-util is an arbitrary pick - (nix.appendPatches [ pkgs.emptyFile ]).libs.nix-util.src; - }; + if pkgs.stdenv.buildPlatform.isDarwin then + lib.warn "packaging-overriding check currently disabled because of a permissions issue on macOS" pkgs.emptyFile + else + # If this fails, something might be wrong with how we've wired the scope, + # or something could be broken in Nixpkgs. + pkgs.testers.testEqualContents { + assertion = "trivial patch does not change source contents"; + expected = "${./.}"; + actual = + # Same for all components; nix-util is an arbitrary pick + (nix.appendPatches [ pkgs.emptyFile ]).libs.nix-util.src; + }; } // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) { dockerImage = self.hydraJobs.dockerImage.${system}; From bfb6f37b374eb71747419afb12de372fe02e51a7 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 18 Feb 2025 19:01:58 +0100 Subject: [PATCH 164/361] packaging: Add patch count to version (cherry picked from commit 0dbe28ad9d5f82d11bc5626310822a404f07eb60) --- packaging/components.nix | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packaging/components.nix b/packaging/components.nix index de02f052bbe..c26b4b9a800 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -100,6 +100,11 @@ let { sourceRoot = "${finalScope.patchedSrc.name}/" + workDirSubpath; src = finalScope.patchedSrc; + version = + let + n = lib.count (p: p != null) finalScope.patches; + in + if n == 0 then finalAttrs.version else finalAttrs.version + "+${toString n}"; # Clear what `derivation` can't/shouldn't serialize; see prevAttrs.workDir. fileset = null; From dade40573e63528070a510d09a41980993e3724c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 18 Feb 2025 19:06:36 +0100 Subject: [PATCH 165/361] packaging: Make patch count lazier This makes `nix.version` quicker to evaluate, which should speed up package listing operations. If you want an accurate count, use `lib.optionals` in your override instead of `null` values. (cherry picked from commit d47e3c95762881e35e894ca1ba1f77c00f8b7ba3) --- packaging/components.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packaging/components.nix b/packaging/components.nix index c26b4b9a800..bec4dc86578 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -102,7 +102,7 @@ let src = finalScope.patchedSrc; version = let - n = lib.count (p: p != null) finalScope.patches; + n = lib.length finalScope.patches; in if n == 0 then finalAttrs.version else finalAttrs.version + "+${toString n}"; From 244735270a4a1d5f06edd569012cdb1dd222ec4a Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 19 Feb 2025 00:10:05 +0100 Subject: [PATCH 166/361] packaging: Remove dead code ... from nixpkgs, my bad. (cherry picked from commit f0bdb652161f142999134dd7756e41a3942f57b6) --- packaging/components.nix | 2 -- 1 file changed, 2 deletions(-) diff --git a/packaging/components.nix b/packaging/components.nix index bec4dc86578..b1ef38302f5 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -93,8 +93,6 @@ let prevAttrs.workDir; workDirSubpath = resolveRelPath workDirPath; - # sources = assert prevAttrs.fileset._type == "fileset"; prevAttrs.fileset; - # src = lib.fileset.toSource { fileset = sources; inherit root; }; in { From 0339ba582ea095d211bb9cf3713978c9ac805413 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 19 Feb 2025 09:09:57 +0100 Subject: [PATCH 167/361] magic-nix-cache-action -> flakehub-cache-action --- .github/workflows/build.yml | 4 ++-- .github/workflows/ci.yml | 4 ++-- .github/workflows/test.yml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ef7174c3090..230d4590dd8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -17,5 +17,5 @@ jobs: - uses: DeterminateSystems/nix-installer-action@main with: flakehub: true - - uses: DeterminateSystems/magic-nix-cache-action@main - - run: nix build + - uses: DeterminateSystems/flakehub-cache-action@main + - run: nix build -L diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fa5f934ff57..8d3aa5d01ba 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -76,7 +76,7 @@ jobs: - uses: DeterminateSystems/nix-installer-action@main with: flakehub: true - - uses: DeterminateSystems/magic-nix-cache-action@main + - uses: DeterminateSystems/flakehub-cache-action@main - run: | nix build -L \ .#hydraJobs.tests.functional_user \ @@ -104,5 +104,5 @@ jobs: - uses: DeterminateSystems/nix-installer-action@main with: flakehub: true - - uses: DeterminateSystems/magic-nix-cache-action@main + - uses: DeterminateSystems/flakehub-cache-action@main - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 14e4c5fa58d..e58827a9c06 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -17,5 +17,5 @@ jobs: - uses: DeterminateSystems/nix-installer-action@main with: flakehub: true - - uses: DeterminateSystems/magic-nix-cache-action@main + - uses: DeterminateSystems/flakehub-cache-action@main - run: nix flake check -L From 8028579060d5ddb05ab1e998827341f82438ee18 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 19 Feb 2025 00:36:29 +0100 Subject: [PATCH 168/361] packaging: Restore libgit2 USE_SSH=exec ... when nixpkgs is nixos-unstable or the overlay is used. (cherry picked from commit 5488e29d2f0b77c3106fb295a9464ba2dd326d9a) --- packaging/dependencies.nix | 70 ++++++++++++++++++++------------------ 1 file changed, 36 insertions(+), 34 deletions(-) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 2060672f795..535b3ff3739 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -29,8 +29,7 @@ let darwinStdenv = pkgs.overrideSDK prevStdenv { darwinMinVersion = "10.13"; }; in -scope: -{ +scope: { inherit stdenv; aws-sdk-cpp = @@ -66,36 +65,39 @@ scope: installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase; }); -} -# libgit2: Nixpkgs 24.11 has < 1.9.0 -// lib.optionalAttrs (!lib.versionAtLeast pkgs.libgit2.version "1.9.0") { - libgit2 = pkgs.libgit2.overrideAttrs (attrs: { - cmakeFlags = attrs.cmakeFlags or [ ] ++ [ "-DUSE_SSH=exec" ]; - nativeBuildInputs = - attrs.nativeBuildInputs or [ ] - # gitMinimal does not build on Windows. See packbuilder patch. - ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ - # Needed for `git apply`; see `prePatch` - pkgs.buildPackages.gitMinimal - ]; - # Only `git apply` can handle git binary patches - prePatch = - attrs.prePatch or "" - + lib.optionalString (!stdenv.hostPlatform.isWindows) '' - patch() { - git apply - } - ''; - patches = - attrs.patches or [ ] - ++ [ - ./patches/libgit2-mempack-thin-packfile.patch - ] - # gitMinimal does not build on Windows, but fortunately this patch only - # impacts interruptibility - ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ - # binary patch; see `prePatch` - ./patches/libgit2-packbuilder-callback-interruptible.patch - ]; - }); + libgit2 = pkgs.libgit2.overrideAttrs ( + attrs: + { + cmakeFlags = attrs.cmakeFlags or [ ] ++ [ "-DUSE_SSH=exec" ]; + } + # libgit2: Nixpkgs 24.11 has < 1.9.0, which needs our patches + // lib.optionalAttrs (!lib.versionAtLeast pkgs.libgit2.version "1.9.0") { + nativeBuildInputs = + attrs.nativeBuildInputs or [ ] + # gitMinimal does not build on Windows. See packbuilder patch. + ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ + # Needed for `git apply`; see `prePatch` + pkgs.buildPackages.gitMinimal + ]; + # Only `git apply` can handle git binary patches + prePatch = + attrs.prePatch or "" + + lib.optionalString (!stdenv.hostPlatform.isWindows) '' + patch() { + git apply + } + ''; + patches = + attrs.patches or [ ] + ++ [ + ./patches/libgit2-mempack-thin-packfile.patch + ] + # gitMinimal does not build on Windows, but fortunately this patch only + # impacts interruptibility + ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ + # binary patch; see `prePatch` + ./patches/libgit2-packbuilder-callback-interruptible.patch + ]; + } + ); } From cd149b56c76f886bc0b08d1d6da3f4e5f631d591 Mon Sep 17 00:00:00 2001 From: MaxHearnden Date: Sun, 9 Feb 2025 20:53:58 +0000 Subject: [PATCH 169/361] Set FD_CLOEXEC on sockets created by curl Curl creates sockets without setting FD_CLOEXEC/SOCK_CLOEXEC, this can cause connections to remain open forever when using commands like `nix shell` This change sets the FD_CLOEXEC flag using a CURLOPT_SOCKOPTFUNCTION callback. (cherry picked from commit 12d25272764bf2f9f828d5d129ec26622baf75eb) --- doc/manual/rl-next/curl-cloexec.md | 10 ++++++++++ src/libstore/filetransfer.cc | 12 ++++++++++++ 2 files changed, 22 insertions(+) create mode 100644 doc/manual/rl-next/curl-cloexec.md diff --git a/doc/manual/rl-next/curl-cloexec.md b/doc/manual/rl-next/curl-cloexec.md new file mode 100644 index 00000000000..2fcdfb0d101 --- /dev/null +++ b/doc/manual/rl-next/curl-cloexec.md @@ -0,0 +1,10 @@ +--- +synopsis: Set FD_CLOEXEC on sockets created by curl +issues: [] +prs: [12439] +--- + + +Curl creates sockets without setting FD_CLOEXEC/SOCK_CLOEXEC, this can cause connections to remain open forever when using commands like `nix shell` + +This change sets the FD_CLOEXEC flag using a CURLOPT_SOCKOPTFUNCTION callback. diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 8439cc39cc8..932e1d75684 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -300,6 +300,14 @@ struct curlFileTransfer : public FileTransfer return ((TransferItem *) userp)->readCallback(buffer, size, nitems); } + #if !defined(_WIN32) && LIBCURL_VERSION_NUM >= 0x071000 + static int cloexec_callback(void *, curl_socket_t curlfd, curlsocktype purpose) { + unix::closeOnExec(curlfd); + vomit("cloexec set for fd %i", curlfd); + return CURL_SOCKOPT_OK; + } + #endif + void init() { if (!req) req = curl_easy_init(); @@ -359,6 +367,10 @@ struct curlFileTransfer : public FileTransfer curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0); } + #if !defined(_WIN32) && LIBCURL_VERSION_NUM >= 0x071000 + curl_easy_setopt(req, CURLOPT_SOCKOPTFUNCTION, cloexec_callback); + #endif + curl_easy_setopt(req, CURLOPT_CONNECTTIMEOUT, fileTransferSettings.connectTimeout.get()); curl_easy_setopt(req, CURLOPT_LOW_SPEED_LIMIT, 1L); From a691dcf48f161d47922487170c94ad3105901a8a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Feb 2025 15:36:36 +0100 Subject: [PATCH 170/361] Run all of hydraJobs.tests.* --- .github/workflows/ci.yml | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8d3aa5d01ba..7834c0ea104 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -68,6 +68,7 @@ jobs: with: os: macos-latest + # Build hydraJobs.tests.*. vm_tests: needs: build_x86_64-linux runs-on: UbuntuLatest32Cores128G @@ -78,12 +79,13 @@ jobs: flakehub: true - uses: DeterminateSystems/flakehub-cache-action@main - run: | - nix build -L \ - .#hydraJobs.tests.functional_user \ - .#hydraJobs.tests.githubFlakes \ - .#hydraJobs.tests.nix-docker \ - .#hydraJobs.tests.tarballFlakes \ - ; + nix build -L --keep-going \ + $(nix flake show --json \ + | jq -r ' + .hydraJobs.tests + | with_entries(select(.value.type == "derivation")) + | keys[] + | ".#hydraJobs.tests." + .') flake_regressions: needs: build_x86_64-linux From 65583ca79b9945ef588c03886209243b1d6cc1cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sandro=20J=C3=A4ckel?= Date: Sat, 1 Feb 2025 16:49:31 +0100 Subject: [PATCH 171/361] Only try to chmod /nix/var/nix/profiles/per-user when necessary Co-authored-by: Eelco Dolstra (cherry picked from commit dcbf4dcc09805ea3d1f22a7f8a55f313473338ed) --- src/libstore/local-store.cc | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index f708bd1b008..9a7a941b65a 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -136,7 +136,12 @@ LocalStore::LocalStore( for (auto & perUserDir : {profilesDir + "/per-user", gcRootsDir + "/per-user"}) { createDirs(perUserDir); if (!readOnly) { - if (chmod(perUserDir.c_str(), 0755) == -1) + auto st = lstat(perUserDir); + + // Skip chmod call if the directory already has the correct permissions (0755). + // This is to avoid failing when the executing user lacks permissions to change the directory's permissions + // even if it would be no-op. + if ((st.st_mode & (S_IRWXU | S_IRWXG | S_IRWXO)) != 0755 && chmod(perUserDir.c_str(), 0755) == -1) throw SysError("could not set permissions on '%s' to 755", perUserDir); } } From 856afa27c2f1c352034ec965722510ffebe01b5b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Feb 2025 17:22:42 +0100 Subject: [PATCH 172/361] Build the binary tarball --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 230d4590dd8..7e3c9872d54 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -18,4 +18,4 @@ jobs: with: flakehub: true - uses: DeterminateSystems/flakehub-cache-action@main - - run: nix build -L + - run: nix build . .#binaryTarball -L From d9f742302e9d44ef3a5dd658779c923eae4a0811 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Feb 2025 20:24:51 +0100 Subject: [PATCH 173/361] Add merge queue config --- .github/workflows/ci.yml | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7834c0ea104..9a7c8bbaa48 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,6 +7,7 @@ on: - detsys-main - main - master + merge_group: permissions: id-token: "write" @@ -68,8 +69,25 @@ jobs: with: os: macos-latest - # Build hydraJobs.tests.*. - vm_tests: + vm_tests_smoke: + needs: build_x86_64-linux + runs-on: UbuntuLatest32Cores128G + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - uses: DeterminateSystems/flakehub-cache-action@main + - run: | + nix build -L \ + .#hydraJobs.tests.functional_user \ + .#hydraJobs.tests.githubFlakes \ + .#hydraJobs.tests.nix-docker \ + .#hydraJobs.tests.tarballFlakes \ + ; + + vm_tests_all: + if: github.event_name == 'merge_group' needs: build_x86_64-linux runs-on: UbuntuLatest32Cores128G steps: @@ -85,7 +103,8 @@ jobs: .hydraJobs.tests | with_entries(select(.value.type == "derivation")) | keys[] - | ".#hydraJobs.tests." + .') + | ".#hydraJobs.tests." + .' + | head -n5) # FIXME: for testing the merge queue flake_regressions: needs: build_x86_64-linux From 4c39f29a4a8a8aa02c2296b0a9986b7e760e77be Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Feb 2025 20:33:01 +0100 Subject: [PATCH 174/361] Move more stuff to the merge queue --- .github/workflows/ci.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9a7c8bbaa48..6485288e87b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,11 +31,13 @@ jobs: os: UbuntuLatest32Cores128G build_aarch64-linux: + if: github.event_name == 'merge_group' uses: ./.github/workflows/build.yml with: os: UbuntuLatest32Cores128GArm build_x86_64-darwin: + if: github.event_name == 'merge_group' uses: ./.github/workflows/build.yml with: os: macos-13 @@ -52,18 +54,21 @@ jobs: os: UbuntuLatest32Cores128G test_aarch64-linux: + if: github.event_name == 'merge_group' uses: ./.github/workflows/test.yml needs: build_aarch64-linux with: os: UbuntuLatest32Cores128GArm test_x86_64-darwin: + if: github.event_name == 'merge_group' uses: ./.github/workflows/test.yml needs: build_aarch64-darwin with: os: macos-13 test_aarch64-darwin: + if: github.event_name == 'merge_group' uses: ./.github/workflows/test.yml needs: build_aarch64-darwin with: @@ -107,6 +112,7 @@ jobs: | head -n5) # FIXME: for testing the merge queue flake_regressions: + if: github.event_name == 'merge_group' needs: build_x86_64-linux runs-on: UbuntuLatest32Cores128G steps: @@ -126,4 +132,4 @@ jobs: with: flakehub: true - uses: DeterminateSystems/flakehub-cache-action@main - - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh + - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=50 flake-regressions/eval-all.sh From 10977365ff697143d1688513bd4e0dda377381e1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Feb 2025 20:58:16 +0100 Subject: [PATCH 175/361] Run some jobs on blacksmith --- .github/workflows/ci.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6485288e87b..95d6633fd00 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ permissions: jobs: eval: - runs-on: UbuntuLatest32Cores128G + runs-on: blacksmith-32vcpu-ubuntu-2204 steps: - uses: actions/checkout@v4 with: @@ -28,13 +28,13 @@ jobs: build_x86_64-linux: uses: ./.github/workflows/build.yml with: - os: UbuntuLatest32Cores128G + os: blacksmith-32vcpu-ubuntu-2204 build_aarch64-linux: if: github.event_name == 'merge_group' uses: ./.github/workflows/build.yml with: - os: UbuntuLatest32Cores128GArm + os: blacksmith-32vcpu-ubuntu-2204-arm build_x86_64-darwin: if: github.event_name == 'merge_group' @@ -51,14 +51,14 @@ jobs: uses: ./.github/workflows/test.yml needs: build_x86_64-linux with: - os: UbuntuLatest32Cores128G + os: blacksmith-32vcpu-ubuntu-2204 test_aarch64-linux: if: github.event_name == 'merge_group' uses: ./.github/workflows/test.yml needs: build_aarch64-linux with: - os: UbuntuLatest32Cores128GArm + os: blacksmith-32vcpu-ubuntu-2204-arm test_x86_64-darwin: if: github.event_name == 'merge_group' @@ -76,7 +76,7 @@ jobs: vm_tests_smoke: needs: build_x86_64-linux - runs-on: UbuntuLatest32Cores128G + runs-on: blacksmith-32vcpu-ubuntu-2204 steps: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main @@ -94,7 +94,7 @@ jobs: vm_tests_all: if: github.event_name == 'merge_group' needs: build_x86_64-linux - runs-on: UbuntuLatest32Cores128G + runs-on: blacksmith-32vcpu-ubuntu-2204 steps: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main @@ -114,7 +114,7 @@ jobs: flake_regressions: if: github.event_name == 'merge_group' needs: build_x86_64-linux - runs-on: UbuntuLatest32Cores128G + runs-on: blacksmith-32vcpu-ubuntu-2204 steps: - name: Checkout nix uses: actions/checkout@v4 From 3e39ac4fc9b26cee32d743fafd8895ea0f642887 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Feb 2025 21:06:17 +0100 Subject: [PATCH 176/361] Don't run vm_tests_smoke in the merge queue --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 95d6633fd00..98efc7a579d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -75,6 +75,7 @@ jobs: os: macos-latest vm_tests_smoke: + if: github.event_name != 'merge_group' needs: build_x86_64-linux runs-on: blacksmith-32vcpu-ubuntu-2204 steps: From 835b3b4efe714ea0457ad627a32533e480192959 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Feb 2025 21:39:01 +0100 Subject: [PATCH 177/361] Fix vm_tests_all --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 98efc7a579d..60b75a439b2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -109,7 +109,7 @@ jobs: .hydraJobs.tests | with_entries(select(.value.type == "derivation")) | keys[] - | ".#hydraJobs.tests." + .' + | ".#hydraJobs.tests." + .' \ | head -n5) # FIXME: for testing the merge queue flake_regressions: From 013c09948ebff7b887c5ae9c444db8c17cf09c3e Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Thu, 20 Feb 2025 17:13:19 -0500 Subject: [PATCH 178/361] Create an initial propose-release workflow --- .github/workflows/propose-release.yml | 29 +++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 .github/workflows/propose-release.yml diff --git a/.github/workflows/propose-release.yml b/.github/workflows/propose-release.yml new file mode 100644 index 00000000000..1ba7f43e7db --- /dev/null +++ b/.github/workflows/propose-release.yml @@ -0,0 +1,29 @@ +on: + workflow_dispatch: + inputs: + reference-id: + type: string + required: true + version: + type: string + required: true + +concurrency: + group: ${{ github.workflow }} + cancel-in-progress: true + +jobs: + propose-release: + uses: DeterminateSystems/propose-release/.github/workflows/workflow.yml@main + permissions: + id-token: write + contents: write + pull-requests: write + with: + update-flake: false + reference-id: ${{ inputs.reference-id }} + version: ${{ inputs.version }} + extra-commands-early: | + echo ${{ inputs.version }} > .version-determinate + git add .version-determinate + git commit -m "Set .version-determinate to ${{ inputs.version }}" From 592994d2e1a1f796454a21a05d18495489335e8e Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Thu, 20 Feb 2025 17:15:51 -0500 Subject: [PATCH 179/361] Fixup the release workflow --- .github/workflows/publish.yml | 37 +++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 839ace59492..00ca3ec534b 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,20 +1,23 @@ -name: Publish on FlakeHub +name: Release on: - push: - tags: - - "v*.*.*" + release: + types: + - released -publish: - runs-on: ubuntu-latest - permissions: - contents: read - id-token: write - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - - uses: "DeterminateSystems/flakehub-push@main" - with: - visibility: "private" - name: "DeterminateSystems/nix-priv" - tag: "${{ github.ref_name }}" +jobs: + publish: + if: (!github.repository.fork && (github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || startsWith(github.ref, 'refs/tags/'))) + environment: ${{ github.event_name == 'release' && 'production' || '' }} + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + - uses: "DeterminateSystems/flakehub-push@main" + with: + rolling: ${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} + visibility: "private" + tag: "${{ github.ref_name }}" From 0dc5b249ff05bbfbd659805aee4261065d5826c3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 5 Nov 2024 16:38:52 +0100 Subject: [PATCH 180/361] Build the Nix manual in CI and deploy to Netlify --- .github/workflows/build.yml | 4 ++++ .github/workflows/ci.yml | 27 +++++++++++++++++++++++++++ packaging/hydra.nix | 9 +++++++++ 3 files changed, 40 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7e3c9872d54..441f23c5adb 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,6 +4,10 @@ on: os: required: true type: string + manual: + required: false + type: boolean + default: false jobs: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 60b75a439b2..4397d374721 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -134,3 +134,30 @@ jobs: flakehub: true - uses: DeterminateSystems/flakehub-cache-action@main - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=50 flake-regressions/eval-all.sh + + manual: + if: github.event_name != 'merge_group' + needs: build_x86_64-linux + runs-on: blacksmith + steps: + - name: Checkout nix + uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - uses: DeterminateSystems/flakehub-cache-action@main + - name: Build manual + run: nix build .#hydraJobs.manual + - uses: nwtgck/actions-netlify@v3.0 + with: + publish-dir: './result/share/doc/nix/manual' + production-branch: detsys-main + github-token: ${{ secrets.GITHUB_TOKEN }} + deploy-message: "Deploy from GitHub Actions" + enable-pull-request-comment: true + enable-commit-comment: true + enable-commit-status: true + overwrites-pull-request-comment: true + env: + NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} + NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} diff --git a/packaging/hydra.nix b/packaging/hydra.nix index debd98cf2aa..4f9039cd377 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -181,6 +181,15 @@ in # Nix's manual manual = nixpkgsFor.x86_64-linux.native.nixComponents.nix-manual; + manualTarball = + with nixpkgsFor.x86_64-linux.native; + runCommand "determinate-nix-manual-${self.hydraJobs.manual.version}" + { } + '' + mkdir -p $out/tarballs + tar cvfz $out/tarballs/$name.tar.gz -C ${self.hydraJobs.manual}/share/doc/nix/manual . --transform "s/^./$name/" + ''; + # API docs for Nix's unstable internal C++ interfaces. internal-api-docs = nixpkgsFor.x86_64-linux.native.nixComponents.nix-internal-api-docs; From c23a2cdc4a541a787fc8f3f76dbeddb42b849b02 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 21 Feb 2025 00:50:04 +0100 Subject: [PATCH 181/361] Hack --- .github/workflows/ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4397d374721..483b787dcf4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,6 +12,9 @@ on: permissions: id-token: "write" contents: "read" + pull-requests: "write" + statuses: "write" + deployments: "write" jobs: eval: From 53c03a0161478ce94874110abf34229cb0de1bbd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 21 Feb 2025 01:50:29 +0100 Subject: [PATCH 182/361] Cleanup --- .github/workflows/build.yml | 4 ---- packaging/hydra.nix | 9 --------- 2 files changed, 13 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 441f23c5adb..7e3c9872d54 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,10 +4,6 @@ on: os: required: true type: string - manual: - required: false - type: boolean - default: false jobs: diff --git a/packaging/hydra.nix b/packaging/hydra.nix index 4f9039cd377..debd98cf2aa 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -181,15 +181,6 @@ in # Nix's manual manual = nixpkgsFor.x86_64-linux.native.nixComponents.nix-manual; - manualTarball = - with nixpkgsFor.x86_64-linux.native; - runCommand "determinate-nix-manual-${self.hydraJobs.manual.version}" - { } - '' - mkdir -p $out/tarballs - tar cvfz $out/tarballs/$name.tar.gz -C ${self.hydraJobs.manual}/share/doc/nix/manual . --transform "s/^./$name/" - ''; - # API docs for Nix's unstable internal C++ interfaces. internal-api-docs = nixpkgsFor.x86_64-linux.native.nixComponents.nix-internal-api-docs; From dece94fe2598e82b094d1b761631bb7b9eb2e49c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 21 Feb 2025 02:07:16 +0100 Subject: [PATCH 183/361] Restrict permissions --- .github/workflows/ci.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 483b787dcf4..c3a96704f77 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,9 +12,6 @@ on: permissions: id-token: "write" contents: "read" - pull-requests: "write" - statuses: "write" - deployments: "write" jobs: eval: @@ -142,6 +139,12 @@ jobs: if: github.event_name != 'merge_group' needs: build_x86_64-linux runs-on: blacksmith + permissions: + id-token: "write" + contents: "read" + pull-requests: "write" + statuses: "write" + deployments: "write" steps: - name: Checkout nix uses: actions/checkout@v4 From 3f59f80e6c3246abd7bd85cb59603a596fa448b7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 21 Feb 2025 13:07:37 +0100 Subject: [PATCH 184/361] Fix location of release-notes-determinate --- doc/manual/{src => source}/release-notes-determinate/changes.md | 0 doc/manual/{src => source}/release-notes-determinate/index.md | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename doc/manual/{src => source}/release-notes-determinate/changes.md (100%) rename doc/manual/{src => source}/release-notes-determinate/index.md (100%) diff --git a/doc/manual/src/release-notes-determinate/changes.md b/doc/manual/source/release-notes-determinate/changes.md similarity index 100% rename from doc/manual/src/release-notes-determinate/changes.md rename to doc/manual/source/release-notes-determinate/changes.md diff --git a/doc/manual/src/release-notes-determinate/index.md b/doc/manual/source/release-notes-determinate/index.md similarity index 100% rename from doc/manual/src/release-notes-determinate/index.md rename to doc/manual/source/release-notes-determinate/index.md From 237c9bda798e40eb348637e5e29e0e0518c65759 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 21 Feb 2025 13:20:05 +0100 Subject: [PATCH 185/361] Add release notes for 1.0.0 --- doc/manual/source/SUMMARY.md.in | 2 +- doc/manual/source/release-notes-determinate/rl-1.0.0.md | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 doc/manual/source/release-notes-determinate/rl-1.0.0.md diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index f5d19cc6532..3dd4e0977a4 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -130,7 +130,7 @@ - [Contributing](development/contributing.md) - [Determinate Nix Releases Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) - - [Release 1.0 (2024-11-??)](release-notes-determinate/rl-1.0.md) + - [Release 1.0.0 (2025-??-??)](release-notes-determinate/rl-1.0.0.md) - [Nix Releases Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} - [Release 2.26 (2025-01-22)](release-notes/rl-2.26.md) diff --git a/doc/manual/source/release-notes-determinate/rl-1.0.0.md b/doc/manual/source/release-notes-determinate/rl-1.0.0.md new file mode 100644 index 00000000000..16dcc9d3e9f --- /dev/null +++ b/doc/manual/source/release-notes-determinate/rl-1.0.0.md @@ -0,0 +1,5 @@ +# Release 1.0.0 (2025-??-??) + +* Initial release of Determinate Nix. + +* Based on [upstream Nix 2.26.2](../release-notes/rl-2.26.md). From cd1935468d7e6a38c9dbb7212c87a5122afc47f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20M=C3=B6ller?= Date: Fri, 21 Feb 2025 11:49:00 +0100 Subject: [PATCH 186/361] Fix perl store bindings When #9863 converted the `Nix::Store` free functions into member functions, the implicit `this` argument was not accounted for when iterating over the variable number of arguments in some functions. (cherry picked from commit 5cf9e18167b86f39864e39e5fe129e5f6c1a15e0) --- src/perl/lib/Nix/Store.xs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/perl/lib/Nix/Store.xs b/src/perl/lib/Nix/Store.xs index 172c3500de0..cfc3ac034a3 100644 --- a/src/perl/lib/Nix/Store.xs +++ b/src/perl/lib/Nix/Store.xs @@ -194,7 +194,7 @@ StoreWrapper::computeFSClosure(int flipDirection, int includeOutputs, ...) PPCODE: try { StorePathSet paths; - for (int n = 2; n < items; ++n) + for (int n = 3; n < items; ++n) THIS->store->computeFSClosure(THIS->store->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs); for (auto & i : paths) XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); @@ -208,7 +208,7 @@ StoreWrapper::topoSortPaths(...) PPCODE: try { StorePathSet paths; - for (int n = 0; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); + for (int n = 1; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); auto sorted = THIS->store->topoSortPaths(paths); for (auto & i : sorted) XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); @@ -234,7 +234,7 @@ StoreWrapper::exportPaths(int fd, ...) PPCODE: try { StorePathSet paths; - for (int n = 1; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); + for (int n = 2; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); FdSink sink(fd); THIS->store->exportPaths(paths, sink); } catch (Error & e) { From d712540206fb40d3c26809bdcdd0479a37072df9 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 21 Feb 2025 09:09:05 -0800 Subject: [PATCH 187/361] wip: delete unnecessary CI for now --- .github/workflows/ci.yml | 169 ---------------------------------- .github/workflows/labels.yml | 24 ----- .github/workflows/publish.yml | 23 ----- .github/workflows/test.yml | 21 ----- 4 files changed, 237 deletions(-) delete mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/labels.yml delete mode 100644 .github/workflows/publish.yml delete mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index c3a96704f77..00000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,169 +0,0 @@ -name: "CI" - -on: - pull_request: - push: - branches: - - detsys-main - - main - - master - merge_group: - -permissions: - id-token: "write" - contents: "read" - -jobs: - eval: - runs-on: blacksmith-32vcpu-ubuntu-2204 - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: DeterminateSystems/nix-installer-action@main - with: - flakehub: true - - run: nix flake show --all-systems --json - - build_x86_64-linux: - uses: ./.github/workflows/build.yml - with: - os: blacksmith-32vcpu-ubuntu-2204 - - build_aarch64-linux: - if: github.event_name == 'merge_group' - uses: ./.github/workflows/build.yml - with: - os: blacksmith-32vcpu-ubuntu-2204-arm - - build_x86_64-darwin: - if: github.event_name == 'merge_group' - uses: ./.github/workflows/build.yml - with: - os: macos-13 - - build_aarch64-darwin: - uses: ./.github/workflows/build.yml - with: - os: macos-latest - - test_x86_64-linux: - uses: ./.github/workflows/test.yml - needs: build_x86_64-linux - with: - os: blacksmith-32vcpu-ubuntu-2204 - - test_aarch64-linux: - if: github.event_name == 'merge_group' - uses: ./.github/workflows/test.yml - needs: build_aarch64-linux - with: - os: blacksmith-32vcpu-ubuntu-2204-arm - - test_x86_64-darwin: - if: github.event_name == 'merge_group' - uses: ./.github/workflows/test.yml - needs: build_aarch64-darwin - with: - os: macos-13 - - test_aarch64-darwin: - if: github.event_name == 'merge_group' - uses: ./.github/workflows/test.yml - needs: build_aarch64-darwin - with: - os: macos-latest - - vm_tests_smoke: - if: github.event_name != 'merge_group' - needs: build_x86_64-linux - runs-on: blacksmith-32vcpu-ubuntu-2204 - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - with: - flakehub: true - - uses: DeterminateSystems/flakehub-cache-action@main - - run: | - nix build -L \ - .#hydraJobs.tests.functional_user \ - .#hydraJobs.tests.githubFlakes \ - .#hydraJobs.tests.nix-docker \ - .#hydraJobs.tests.tarballFlakes \ - ; - - vm_tests_all: - if: github.event_name == 'merge_group' - needs: build_x86_64-linux - runs-on: blacksmith-32vcpu-ubuntu-2204 - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - with: - flakehub: true - - uses: DeterminateSystems/flakehub-cache-action@main - - run: | - nix build -L --keep-going \ - $(nix flake show --json \ - | jq -r ' - .hydraJobs.tests - | with_entries(select(.value.type == "derivation")) - | keys[] - | ".#hydraJobs.tests." + .' \ - | head -n5) # FIXME: for testing the merge queue - - flake_regressions: - if: github.event_name == 'merge_group' - needs: build_x86_64-linux - runs-on: blacksmith-32vcpu-ubuntu-2204 - steps: - - name: Checkout nix - uses: actions/checkout@v4 - - name: Checkout flake-regressions - uses: actions/checkout@v4 - with: - repository: DeterminateSystems/flake-regressions - path: flake-regressions - - name: Checkout flake-regressions-data - uses: actions/checkout@v4 - with: - repository: DeterminateSystems/flake-regressions-data - path: flake-regressions/tests - - uses: DeterminateSystems/nix-installer-action@main - with: - flakehub: true - - uses: DeterminateSystems/flakehub-cache-action@main - - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=50 flake-regressions/eval-all.sh - - manual: - if: github.event_name != 'merge_group' - needs: build_x86_64-linux - runs-on: blacksmith - permissions: - id-token: "write" - contents: "read" - pull-requests: "write" - statuses: "write" - deployments: "write" - steps: - - name: Checkout nix - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - with: - flakehub: true - - uses: DeterminateSystems/flakehub-cache-action@main - - name: Build manual - run: nix build .#hydraJobs.manual - - uses: nwtgck/actions-netlify@v3.0 - with: - publish-dir: './result/share/doc/nix/manual' - production-branch: detsys-main - github-token: ${{ secrets.GITHUB_TOKEN }} - deploy-message: "Deploy from GitHub Actions" - enable-pull-request-comment: true - enable-commit-comment: true - enable-commit-status: true - overwrites-pull-request-comment: true - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml deleted file mode 100644 index 23a5d9e51fc..00000000000 --- a/.github/workflows/labels.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: "Label PR" - -on: - pull_request_target: - types: [edited, opened, synchronize, reopened] - -# WARNING: -# When extending this action, be aware that $GITHUB_TOKEN allows some write -# access to the GitHub API. This means that it should not evaluate user input in -# a way that allows code injection. - -permissions: - contents: read - pull-requests: write - -jobs: - labels: - runs-on: ubuntu-24.04 - if: github.repository_owner == 'NixOS' - steps: - - uses: actions/labeler@v5 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - sync-labels: false diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml deleted file mode 100644 index 00ca3ec534b..00000000000 --- a/.github/workflows/publish.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Release - -on: - release: - types: - - released - -jobs: - publish: - if: (!github.repository.fork && (github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || startsWith(github.ref, 'refs/tags/'))) - environment: ${{ github.event_name == 'release' && 'production' || '' }} - runs-on: ubuntu-latest - permissions: - contents: read - id-token: write - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - - uses: "DeterminateSystems/flakehub-push@main" - with: - rolling: ${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} - visibility: "private" - tag: "${{ github.ref_name }}" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index e58827a9c06..00000000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,21 +0,0 @@ -on: - workflow_call: - inputs: - os: - required: true - type: string - -jobs: - - tests: - strategy: - fail-fast: false - runs-on: ${{ inputs.os }} - timeout-minutes: 60 - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - with: - flakehub: true - - uses: DeterminateSystems/flakehub-cache-action@main - - run: nix flake check -L From b9e654819ab30dec579d2860c94d092695ca259e Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 21 Feb 2025 12:26:28 -0500 Subject: [PATCH 188/361] Include only 2.26 in the sidebar release notes --- doc/manual/source/SUMMARY.md.in | 59 +-------------------------------- 1 file changed, 1 insertion(+), 58 deletions(-) diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index f5d19cc6532..066bc04c39d 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -131,63 +131,6 @@ - [Determinate Nix Releases Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) - [Release 1.0 (2024-11-??)](release-notes-determinate/rl-1.0.md) -- [Nix Releases Notes](release-notes/index.md) +- [Nix Release Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} - [Release 2.26 (2025-01-22)](release-notes/rl-2.26.md) - - [Release 2.25 (2024-11-07)](release-notes/rl-2.25.md) - - [Release 2.24 (2024-07-31)](release-notes/rl-2.24.md) - - [Release 2.23 (2024-06-03)](release-notes/rl-2.23.md) - - [Release 2.22 (2024-04-23)](release-notes/rl-2.22.md) - - [Release 2.21 (2024-03-11)](release-notes/rl-2.21.md) - - [Release 2.20 (2024-01-29)](release-notes/rl-2.20.md) - - [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md) - - [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md) - - [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md) - - [Release 2.16 (2023-05-31)](release-notes/rl-2.16.md) - - [Release 2.15 (2023-04-11)](release-notes/rl-2.15.md) - - [Release 2.14 (2023-02-28)](release-notes/rl-2.14.md) - - [Release 2.13 (2023-01-17)](release-notes/rl-2.13.md) - - [Release 2.12 (2022-12-06)](release-notes/rl-2.12.md) - - [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md) - - [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md) - - [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md) - - [Release 2.8 (2022-04-19)](release-notes/rl-2.8.md) - - [Release 2.7 (2022-03-07)](release-notes/rl-2.7.md) - - [Release 2.6 (2022-01-24)](release-notes/rl-2.6.md) - - [Release 2.5 (2021-12-13)](release-notes/rl-2.5.md) - - [Release 2.4 (2021-11-01)](release-notes/rl-2.4.md) - - [Release 2.3 (2019-09-04)](release-notes/rl-2.3.md) - - [Release 2.2 (2019-01-11)](release-notes/rl-2.2.md) - - [Release 2.1 (2018-09-02)](release-notes/rl-2.1.md) - - [Release 2.0 (2018-02-22)](release-notes/rl-2.0.md) - - [Release 1.11.10 (2017-06-12)](release-notes/rl-1.11.10.md) - - [Release 1.11 (2016-01-19)](release-notes/rl-1.11.md) - - [Release 1.10 (2015-09-03)](release-notes/rl-1.10.md) - - [Release 1.9 (2015-06-12)](release-notes/rl-1.9.md) - - [Release 1.8 (2014-12-14)](release-notes/rl-1.8.md) - - [Release 1.7 (2014-04-11)](release-notes/rl-1.7.md) - - [Release 1.6.1 (2013-10-28)](release-notes/rl-1.6.1.md) - - [Release 1.6 (2013-09-10)](release-notes/rl-1.6.md) - - [Release 1.5.2 (2013-05-13)](release-notes/rl-1.5.2.md) - - [Release 1.5 (2013-02-27)](release-notes/rl-1.5.md) - - [Release 1.4 (2013-02-26)](release-notes/rl-1.4.md) - - [Release 1.3 (2013-01-04)](release-notes/rl-1.3.md) - - [Release 1.2 (2012-12-06)](release-notes/rl-1.2.md) - - [Release 1.1 (2012-07-18)](release-notes/rl-1.1.md) - - [Release 1.0 (2012-05-11)](release-notes/rl-1.0.md) - - [Release 0.16 (2010-08-17)](release-notes/rl-0.16.md) - - [Release 0.15 (2010-03-17)](release-notes/rl-0.15.md) - - [Release 0.14 (2010-02-04)](release-notes/rl-0.14.md) - - [Release 0.13 (2009-11-05)](release-notes/rl-0.13.md) - - [Release 0.12 (2008-11-20)](release-notes/rl-0.12.md) - - [Release 0.11 (2007-12-31)](release-notes/rl-0.11.md) - - [Release 0.10.1 (2006-10-11)](release-notes/rl-0.10.1.md) - - [Release 0.10 (2006-10-06)](release-notes/rl-0.10.md) - - [Release 0.9.2 (2005-09-21)](release-notes/rl-0.9.2.md) - - [Release 0.9.1 (2005-09-20)](release-notes/rl-0.9.1.md) - - [Release 0.9 (2005-09-16)](release-notes/rl-0.9.md) - - [Release 0.8.1 (2005-04-13)](release-notes/rl-0.8.1.md) - - [Release 0.8 (2005-04-11)](release-notes/rl-0.8.md) - - [Release 0.7 (2005-01-12)](release-notes/rl-0.7.md) - - [Release 0.6 (2004-11-14)](release-notes/rl-0.6.md) - - [Release 0.5 and earlier](release-notes/rl-0.5.md) From 2f64b0ff101c9dbecb2d3f0822ceb5bcbfd81964 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 21 Feb 2025 12:30:42 -0500 Subject: [PATCH 189/361] Provide external link instead of internal release notes link --- doc/manual/source/development/experimental-features.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/source/development/experimental-features.md b/doc/manual/source/development/experimental-features.md index ad5cffa91ee..ffcd9f1a80f 100644 --- a/doc/manual/source/development/experimental-features.md +++ b/doc/manual/source/development/experimental-features.md @@ -6,7 +6,7 @@ Experimental features are considered unstable, which means that they can be chan Users must explicitly enable them by toggling the associated [experimental feature flags](@docroot@/command-ref/conf-file.md#conf-experimental-features). This allows accessing unstable functionality without unwittingly relying on it. -Experimental feature flags were first introduced in [Nix 2.4](@docroot@/release-notes/rl-2.4.md). +Experimental feature flags were first introduced in [Nix 2.4](https://nix.dev/manual/nix/2.24/release-notes/rl-2.4). Before that, Nix did have experimental features, but they were not guarded by flags and were merely documented as unstable. This was a source of confusion and controversy. From 2f70d15f7f5886a1e3a60124823d2e560070c488 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 21 Feb 2025 12:36:08 -0500 Subject: [PATCH 190/361] Use /latest URL rather than version specific --- doc/manual/source/development/experimental-features.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/source/development/experimental-features.md b/doc/manual/source/development/experimental-features.md index ffcd9f1a80f..56a45b23890 100644 --- a/doc/manual/source/development/experimental-features.md +++ b/doc/manual/source/development/experimental-features.md @@ -6,7 +6,7 @@ Experimental features are considered unstable, which means that they can be chan Users must explicitly enable them by toggling the associated [experimental feature flags](@docroot@/command-ref/conf-file.md#conf-experimental-features). This allows accessing unstable functionality without unwittingly relying on it. -Experimental feature flags were first introduced in [Nix 2.4](https://nix.dev/manual/nix/2.24/release-notes/rl-2.4). +Experimental feature flags were first introduced in [Nix 2.4](https://nix.dev/manual/nix/latest/release-notes/rl-2.4). Before that, Nix did have experimental features, but they were not guarded by flags and were merely documented as unstable. This was a source of confusion and controversy. From e77d1a760eb75dc91a9288f322ba7e30d9de4888 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 21 Feb 2025 12:38:05 -0500 Subject: [PATCH 191/361] Fix release notes version list --- doc/manual/source/SUMMARY.md.in | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 36bc18fde92..a6f55853e19 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -130,8 +130,7 @@ - [Contributing](development/contributing.md) - [Determinate Nix Releases Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) - - [Release 1.0 (2024-11-??)](release-notes-determinate/rl-1.0.md) + - [Release 1.0.0 (2025-??-??)](release-notes-determinate/rl-1.0.0.md) - [Nix Release Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} - - [Release 1.0.0 (2025-??-??)](release-notes-determinate/rl-1.0.0.md) - [Release 2.26 (2025-01-22)](release-notes/rl-2.26.md) From d6bd787e5e4081767a2ee13d9a0f52213ccdaaa8 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 21 Feb 2025 12:45:39 -0500 Subject: [PATCH 192/361] s/releases notes/release notes --- doc/manual/source/SUMMARY.md.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index a6f55853e19..64447e61146 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -128,7 +128,7 @@ - [C++ style guide](development/cxx.md) - [Experimental Features](development/experimental-features.md) - [Contributing](development/contributing.md) -- [Determinate Nix Releases Notes](release-notes-determinate/index.md) +- [Determinate Nix Release Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) - [Release 1.0.0 (2025-??-??)](release-notes-determinate/rl-1.0.0.md) - [Nix Release Notes](release-notes/index.md) From 69553dfc36b650405cf02675873d51f654d23b06 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 21 Feb 2025 18:50:06 +0100 Subject: [PATCH 193/361] Mark the nix CLI as *the* interface in the manual, deprecate nix-* --- doc/manual/source/SUMMARY.md.in | 6 +++--- doc/manual/source/command-ref/experimental-commands.md | 8 -------- doc/manual/source/command-ref/subcommands.md | 3 +++ 3 files changed, 6 insertions(+), 11 deletions(-) delete mode 100644 doc/manual/source/command-ref/experimental-commands.md create mode 100644 doc/manual/source/command-ref/subcommands.md diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 64447e61146..228bbc88206 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -54,7 +54,9 @@ - [Command Reference](command-ref/index.md) - [Common Options](command-ref/opt-common.md) - [Common Environment Variables](command-ref/env-common.md) - - [Main Commands](command-ref/main-commands.md) + - [Subcommands](command-ref/subcommands.md) +{{#include ./command-ref/new-cli/SUMMARY.md}} + - [Deprecated Commands](command-ref/main-commands.md) - [nix-build](command-ref/nix-build.md) - [nix-shell](command-ref/nix-shell.md) - [nix-store](command-ref/nix-store.md) @@ -98,8 +100,6 @@ - [nix-hash](command-ref/nix-hash.md) - [nix-instantiate](command-ref/nix-instantiate.md) - [nix-prefetch-url](command-ref/nix-prefetch-url.md) - - [Experimental Commands](command-ref/experimental-commands.md) -{{#include ./command-ref/new-cli/SUMMARY.md}} - [Files](command-ref/files.md) - [nix.conf](command-ref/conf-file.md) - [Profiles](command-ref/files/profiles.md) diff --git a/doc/manual/source/command-ref/experimental-commands.md b/doc/manual/source/command-ref/experimental-commands.md deleted file mode 100644 index 1190729a230..00000000000 --- a/doc/manual/source/command-ref/experimental-commands.md +++ /dev/null @@ -1,8 +0,0 @@ -# Experimental Commands - -This section lists [experimental commands](@docroot@/development/experimental-features.md#xp-feature-nix-command). - -> **Warning** -> -> These commands may be removed in the future, or their syntax may -> change in incompatible ways. diff --git a/doc/manual/source/command-ref/subcommands.md b/doc/manual/source/command-ref/subcommands.md new file mode 100644 index 00000000000..6a26732338d --- /dev/null +++ b/doc/manual/source/command-ref/subcommands.md @@ -0,0 +1,3 @@ +# Subcommands + +This section lists all the subcommands of the `nix` CLI. From a1d27ff6d21ffbb07411d3f2a2ca3034b7c320a2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 21 Feb 2025 19:13:51 +0100 Subject: [PATCH 194/361] Include Determinate Nix version number in the manual --- doc/manual/{book.toml => book.toml.in} | 2 +- doc/manual/meson.build | 6 +++++- doc/manual/package.nix | 1 + 3 files changed, 7 insertions(+), 2 deletions(-) rename doc/manual/{book.toml => book.toml.in} (95%) diff --git a/doc/manual/book.toml b/doc/manual/book.toml.in similarity index 95% rename from doc/manual/book.toml rename to doc/manual/book.toml.in index 3b4044fbac5..13c553f015a 100644 --- a/doc/manual/book.toml +++ b/doc/manual/book.toml.in @@ -1,5 +1,5 @@ [book] -title = "Determinate Nix Reference Manual" +title = "Determinate Nix Reference Manual @version@" src = "source" [output.html] diff --git a/doc/manual/meson.build b/doc/manual/meson.build index f0e71458a5d..c251fadb15f 100644 --- a/doc/manual/meson.build +++ b/doc/manual/meson.build @@ -4,6 +4,8 @@ project('nix-manual', license : 'LGPL-2.1-or-later', ) +fs = import('fs') + nix = find_program('nix', native : true) mdbook = find_program('mdbook', native : true) @@ -83,6 +85,7 @@ manual = custom_target( ''' @0@ @INPUT0@ @CURRENT_SOURCE_DIR@ > @DEPFILE@ @0@ @INPUT1@ summary @2@ < @CURRENT_SOURCE_DIR@/source/SUMMARY.md.in > @2@/source/SUMMARY.md + sed -e 's|@version@|@3@|g' < @INPUT2@ > @2@/book.toml rsync -r --include='*.md' @CURRENT_SOURCE_DIR@/ @2@/ (cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3 rm -rf @2@/manual @@ -92,12 +95,13 @@ manual = custom_target( python.full_path(), mdbook.full_path(), meson.current_build_dir(), + fs.read('../../.version-determinate').strip(), ), ], input : [ generate_manual_deps, 'substitute.py', - 'book.toml', + 'book.toml.in', 'anchors.jq', 'custom.css', nix3_cli_files, diff --git a/doc/manual/package.nix b/doc/manual/package.nix index 8f5d0dfe137..6d93e6f1a5d 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -30,6 +30,7 @@ mkMesonDerivation (finalAttrs: { fileset.difference (fileset.unions [ ../../.version + ../../.version-determinate # Too many different types of files to filter for now ../../doc/manual ./. From 247ec94041baf5d959ce9b08897819ad4ee85d8a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 21 Feb 2025 19:19:02 +0100 Subject: [PATCH 195/361] Remove unnecessary ./.version-determinate symlink --- src/libstore/.version-determinate | 1 - src/libstore/meson.build | 2 +- src/libstore/package.nix | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) delete mode 120000 src/libstore/.version-determinate diff --git a/src/libstore/.version-determinate b/src/libstore/.version-determinate deleted file mode 120000 index c4121e0c32d..00000000000 --- a/src/libstore/.version-determinate +++ /dev/null @@ -1 +0,0 @@ -../../.version-determinate \ No newline at end of file diff --git a/src/libstore/meson.build b/src/libstore/meson.build index aaaa5956d24..85192c2990f 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -22,7 +22,7 @@ configdata = configuration_data() # TODO rename, because it will conflict with downstream projects configdata.set_quoted('PACKAGE_VERSION', meson.project_version()) -configdata.set_quoted('DETERMINATE_NIX_VERSION', fs.read('.version-determinate').strip()) +configdata.set_quoted('DETERMINATE_NIX_VERSION', fs.read('../../.version-determinate').strip()) configdata.set_quoted('SYSTEM', host_machine.cpu_family() + '-' + host_machine.system()) diff --git a/src/libstore/package.nix b/src/libstore/package.nix index fc68f100b38..543694438fc 100644 --- a/src/libstore/package.nix +++ b/src/libstore/package.nix @@ -39,7 +39,6 @@ mkMesonLibrary (finalAttrs: { ../../.version ./.version ../../.version-determinate - ./.version-determinate ./meson.build ./meson.options ./linux/meson.build From f7aaa319781e708471b751d541953003b6548917 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 21 Feb 2025 19:23:03 +0100 Subject: [PATCH 196/361] Tweak title --- doc/manual/book.toml.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/book.toml.in b/doc/manual/book.toml.in index 13c553f015a..7ecbaab0326 100644 --- a/doc/manual/book.toml.in +++ b/doc/manual/book.toml.in @@ -1,5 +1,5 @@ [book] -title = "Determinate Nix Reference Manual @version@" +title = "Determinate Nix @version@ Reference Manual" src = "source" [output.html] From 86f6902e739295018d933c20fea84b1520463eb7 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 21 Feb 2025 19:09:53 +0000 Subject: [PATCH 197/361] Prepare release v0.37.0 From 2616e857c5ccc2ca02317b5a7b5e18d0dbbb288b Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 21 Feb 2025 19:09:56 +0000 Subject: [PATCH 198/361] Set .version-determinate to 0.37.0 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index 8acdd82b765..0f1a7dfc7c4 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -0.0.1 +0.37.0 From c69d5af1053ed36b3d20c4b2cd84c843ed6f49d2 Mon Sep 17 00:00:00 2001 From: Ivan Trubach Date: Tue, 18 Feb 2025 22:09:05 +0300 Subject: [PATCH 199/361] libstore: fix expected bytes in progress bar (cherry picked from commit eb73bfcf73bae4d6e4d37a4882231cd9cb7fbddd) --- src/libstore/store-api.cc | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 236622eae37..fc3fbcc0fbe 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -230,18 +230,22 @@ void Store::addMultipleToStore( { std::atomic nrDone{0}; std::atomic nrFailed{0}; - std::atomic bytesExpected{0}; std::atomic nrRunning{0}; using PathWithInfo = std::pair>; + uint64_t bytesExpected = 0; + std::map infosMap; StorePathSet storePathsToAdd; for (auto & thingToAdd : pathsToCopy) { + bytesExpected += thingToAdd.first.narSize; infosMap.insert_or_assign(thingToAdd.first.path, &thingToAdd); storePathsToAdd.insert(thingToAdd.first.path); } + act.setExpected(actCopyPath, bytesExpected); + auto showProgress = [&, nrTotal = pathsToCopy.size()]() { act.progress(nrDone, nrTotal, nrRunning, nrFailed); }; @@ -259,9 +263,6 @@ void Store::addMultipleToStore( return StorePathSet(); } - bytesExpected += info.narSize; - act.setExpected(actCopyPath, bytesExpected); - return info.references; }, From 8bf0408d3ca2ff4778afbfdfb878d900a918ef0c Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 21 Feb 2025 14:20:35 -0500 Subject: [PATCH 200/361] Use DetSys logo --- doc/manual/source/favicon.png | Bin 1205 -> 0 bytes doc/manual/source/favicon.svg | 30 +++++++++++++++++++++++++++++- 2 files changed, 29 insertions(+), 1 deletion(-) delete mode 100644 doc/manual/source/favicon.png diff --git a/doc/manual/source/favicon.png b/doc/manual/source/favicon.png deleted file mode 100644 index 1ed2b5fe0fdf7a6144adc5cdfa31b5f553df4610..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1205 zcmeAS@N?(olHy`uVBq!ia0vp^0wB!63?wyl`GbL!Lb6AYF9SoB8UsT^3j@P1pisjL z28L1t28LG&3=CE?7#PG0=IjczVPIf#2=EDU1u6(CKb+Wiwr=O+;uW{c*4&A1Jrz{8 z)4ynQ?dJPcYwy%;xnI8WmUr$t-~6qfnQMIVwv;WuQL*Yq-L5CatL}tUABt^18CZ5O zZ{c;IIY350$$p>|3flP?CbiEsc~jS!_w~D@-8-UT(J^iW{g;gG5$d2=oUQB>1m z@0`tUsjG?>Uj}-le#`xk$|De!wc8%H9(?})|Nq7v4}s!!o9``u`G4xw?{%B*m9M-R zQVF#5P*BAYs3W&O0$NnJ^+EmC2f#3iZ95)VdI0Dcpiyv-WY0P0p1#T_Z>w*?cE6%s z0mXX*O7{cRz=NWC>%+j(ZC*JW9phIsFhqD|Z2-E>D|<5oL#Aiu_JGoT5S2AM9(m_% z^2y&CR-T^|Z`YHa^B;WkcK|~ywEAG>n%i}o z?*m;33^cfn5C+gCK-VVsovq)Bq973HJg^IaA?BI2x8vB0x*d-|{@wK?2;{Xx@L&gd z6Br#}DWLg41A!_7O7;R1&jO{3!1TqiyCldDl>V>-|M%V{f8t%=1*kvR${~LK!l5U+ zE;&bk_oXlWQ^n1wc;Wk}FW#{_g-;-%DvtP?D4mF z3v@eUlDE4HLkFv@2av;A;1O92%udl*pgo2QFoh{WaOgaa%+lY%b!7;P&0 z#MO01K~H({q)lC`X638PfeR-pwsWptDH(9!&Y?@EZXLUJ?%u(R zCvUb+Y&?7Q?A^ncPv1U%{k*+`LB~Wz3l$R`8>LBem9?zY%+A@&Q&cq6v{aosxqG&{ zZmqvtnwWE$-@AKzYU1qb8k-I+dNe7hZPTZ&Q>$J{IWD}l>sQ;cWoc{=r(N6jt?%5r zck}M;`^PElD$%0uJI|=}))z_R^t1Df!;|M3G=KQRx`_L`Zq%NXmz(B1)oy8II+Uax zzII#0=CZrLc%@q#nRs{}%{I%wxhr?~k-oXRTQ>fXWq5hY \ No newline at end of file + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 6b8a7514983103d326da5ca5a6110e07b747550d Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 21 Feb 2025 14:26:12 -0500 Subject: [PATCH 201/361] Make image smaller --- doc/manual/custom.css | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/doc/manual/custom.css b/doc/manual/custom.css index 7af150be391..119c6d12543 100644 --- a/doc/manual/custom.css +++ b/doc/manual/custom.css @@ -1,5 +1,5 @@ :root { - --sidebar-width: 23em; + --sidebar-width: 23em; } h1.menu-title::before { @@ -7,11 +7,10 @@ h1.menu-title::before { background-image: url("./favicon.svg"); padding: 1.25em; background-position: center center; - background-size: 2em; + background-size: 1.5em; background-repeat: no-repeat; } - .menu-bar { padding: 0.5em 0em; } @@ -21,13 +20,13 @@ h1.menu-title::before { } h1:not(:first-of-type) { - margin-top: 1.3em; + margin-top: 1.3em; } h2 { - margin-top: 1em; + margin-top: 1em; } .hljs-meta { - user-select: none; + user-select: none; } From 8bc379cad2f6f6807ad8a6f28c1ea865f7cec4b4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 21 Feb 2025 20:13:11 +0000 Subject: [PATCH 202/361] Prepare release v0.37.1 From 0c1e1e65d6975c32862db3bf133312e212542eda Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 21 Feb 2025 20:13:14 +0000 Subject: [PATCH 203/361] Set .version-determinate to 0.37.1 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index 0f1a7dfc7c4..9b1bb851239 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -0.37.0 +0.37.1 From 90581c9d66173ab1e1b92626a4177620a97f6cf2 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Thu, 20 Feb 2025 14:00:48 -0800 Subject: [PATCH 204/361] Setup uploading PRs, tags, and branches to IDS --- .github/workflows/build.yml | 9 +++- .github/workflows/release-branches.yml | 20 ++++++++ .github/workflows/release-prs.yml | 30 +++++++++++ .github/workflows/release-tags.yml | 18 +++++++ .github/workflows/upload-release.yml | 71 ++++++++++++++++++++++++++ flake.nix | 31 +++++++++++ 6 files changed, 178 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/release-branches.yml create mode 100644 .github/workflows/release-prs.yml create mode 100644 .github/workflows/release-tags.yml create mode 100644 .github/workflows/upload-release.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7e3c9872d54..f041267474c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -18,4 +18,11 @@ jobs: with: flakehub: true - uses: DeterminateSystems/flakehub-cache-action@main - - run: nix build . .#binaryTarball -L + - run: echo "system=$(nix eval --impure --raw --expr 'builtins.currentSystem')" >> "$GITHUB_OUTPUT" + id: system + - run: nix build .# .#binaryTarball --no-link -L + - run: nix build .#binaryTarball --out-link tarball + - uses: actions/upload-artifact@v4 + with: + name: ${{ steps.system.outputs.system }} + path: ./tarball/*.xz diff --git a/.github/workflows/release-branches.yml b/.github/workflows/release-branches.yml new file mode 100644 index 00000000000..38e4044edad --- /dev/null +++ b/.github/workflows/release-branches.yml @@ -0,0 +1,20 @@ +name: Release Branch + +concurrency: + group: release + +on: + push: + branches: + # NOTE: make sure any branches here are also valid directory names, + # otherwise creating the directory and uploading to s3 will fail + - "main" + +permissions: + id-token: "write" + contents: "read" + +jobs: + release-branch: + uses: ./.github/workflows/upload-release.yml + secrets: inherit diff --git a/.github/workflows/release-prs.yml b/.github/workflows/release-prs.yml new file mode 100644 index 00000000000..818083c6835 --- /dev/null +++ b/.github/workflows/release-prs.yml @@ -0,0 +1,30 @@ +name: Release PR + +concurrency: + group: release + +on: + pull_request: + types: + - opened + - reopened + - synchronize + - labeled + +permissions: + id-token: "write" + contents: "read" + +jobs: + release-pr: + # Only intra-repo PRs are allowed to have PR artifacts uploaded + # We only want to trigger once the upload once in the case the upload label is added, not when any label is added + if: | + always() && !failure() && !cancelled() + && github.event.pull_request.head.repo.full_name == 'DeterminateSystems/nix-priv' + && ( + (github.event.action == 'labeled' && github.event.label.name == 'upload to s3') + || (github.event.action != 'labeled' && contains(github.event.pull_request.labels.*.name, 'upload to s3')) + ) + uses: ./.github/workflows/upload-release.yml + secrets: inherit diff --git a/.github/workflows/release-tags.yml b/.github/workflows/release-tags.yml new file mode 100644 index 00000000000..709fbb92a44 --- /dev/null +++ b/.github/workflows/release-tags.yml @@ -0,0 +1,18 @@ +name: Release Tags + +concurrency: + group: release + +on: + push: + tags: + - "v*.*.*" + +permissions: + contents: write # In order to upload artifacts to GitHub releases + id-token: write # In order to request a JWT for AWS auth + +jobs: + release-tag: + uses: ./.github/workflows/upload-release.yml + secrets: inherit diff --git a/.github/workflows/upload-release.yml b/.github/workflows/upload-release.yml new file mode 100644 index 00000000000..bec5816be61 --- /dev/null +++ b/.github/workflows/upload-release.yml @@ -0,0 +1,71 @@ +name: Upload release + +concurrency: + group: upload-release + +on: + workflow_call: + +permissions: + id-token: "write" + contents: "read" + +jobs: + build-x86_64-linux: + uses: ./.github/workflows/build.yml + with: + os: blacksmith-32vcpu-ubuntu-2204 + build-aarch64-linux: + uses: ./.github/workflows/build.yml + with: + os: blacksmith-32vcpu-ubuntu-2204-arm + build-x86_64-darwin: + uses: ./.github/workflows/build.yml + with: + os: macos-13 + build-aarch64-darwin: + uses: ./.github/workflows/build.yml + with: + os: macos-latest + + release: + runs-on: ubuntu-latest + needs: + - build-x86_64-linux + - build-aarch64-linux + - build-x86_64-darwin + - build-aarch64-darwin + steps: + - name: Checkout + uses: actions/checkout@v4 + - uses: "DeterminateSystems/nix-installer-action@main" + with: + determinate: true + + - name: Create artifacts directory + run: mkdir -p ./artifacts + + - name: Fetch artifacts + uses: actions/download-artifact@v4 + with: + path: downloaded + - name: Move downloaded artifacts to artifacts directory + run: | + for dir in ./downloaded/*; do + arch="$(basename "$dir")" + mv "$dir"/*.xz ./artifacts/"${arch}" + done + + - name: Build fallback-paths.nix + run: | + nix build .#fallbackPathsNix --out-link fallback + cat fallback > ./artifacts/fallback-paths.nix + + - uses: DeterminateSystems/push-artifact-ids@main + with: + s3_upload_role: ${{ secrets.AWS_S3_UPLOAD_ROLE_ARN }} + bucket: ${{ secrets.AWS_S3_UPLOAD_BUCKET_NAME }} + directory: ./artifacts + ids_project_name: determinate-nix + ids_binary_prefix: determinate-nix + skip_acl: true diff --git a/flake.nix b/flake.nix index 29111b45382..a499c0dcb07 100644 --- a/flake.nix +++ b/flake.nix @@ -294,6 +294,37 @@ nix-manual = nixpkgsFor.${system}.native.nixComponents.nix-manual; nix-internal-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-internal-api-docs; nix-external-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-external-api-docs; + + fallbackPathsNix = + let + pkgs = nixpkgsFor.${system}.native; + + # NOTE(cole-h): discard string context so that it doesn't try to build, we just care about the outPaths + closures = forAllSystems (system: builtins.unsafeDiscardStringContext self.packages.${system}.default.outPath); + + closures_json = pkgs.runCommand "versions.json" + { + buildInputs = [ pkgs.jq ]; + passAsFile = [ "json" ]; + json = builtins.toJSON closures; + } '' + cat "$jsonPath" | jq . > $out + ''; + + closures_nix = pkgs.runCommand "versions.nix" + { + buildInputs = [ pkgs.jq ]; + passAsFile = [ "template" ]; + jsonPath = closures_json; + template = '' + builtins.fromJSON('''@closures@''') + ''; + } '' + export closures=$(cat "$jsonPath"); + substituteAll "$templatePath" "$out" + ''; + in + closures_nix; } # We need to flatten recursive attribute sets of derivations to pass `flake check`. // From 702bde8bf0577ebb4df9037d213225eae60155cb Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 21 Feb 2025 13:02:04 -0800 Subject: [PATCH 205/361] Revert "wip: delete unnecessary CI for now" This reverts commit d712540206fb40d3c26809bdcdd0479a37072df9. --- .github/workflows/ci.yml | 169 ++++++++++++++++++++++++++++++++++ .github/workflows/labels.yml | 24 +++++ .github/workflows/publish.yml | 23 +++++ .github/workflows/test.yml | 21 +++++ 4 files changed, 237 insertions(+) create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/labels.yml create mode 100644 .github/workflows/publish.yml create mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000000..c3a96704f77 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,169 @@ +name: "CI" + +on: + pull_request: + push: + branches: + - detsys-main + - main + - master + merge_group: + +permissions: + id-token: "write" + contents: "read" + +jobs: + eval: + runs-on: blacksmith-32vcpu-ubuntu-2204 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - run: nix flake show --all-systems --json + + build_x86_64-linux: + uses: ./.github/workflows/build.yml + with: + os: blacksmith-32vcpu-ubuntu-2204 + + build_aarch64-linux: + if: github.event_name == 'merge_group' + uses: ./.github/workflows/build.yml + with: + os: blacksmith-32vcpu-ubuntu-2204-arm + + build_x86_64-darwin: + if: github.event_name == 'merge_group' + uses: ./.github/workflows/build.yml + with: + os: macos-13 + + build_aarch64-darwin: + uses: ./.github/workflows/build.yml + with: + os: macos-latest + + test_x86_64-linux: + uses: ./.github/workflows/test.yml + needs: build_x86_64-linux + with: + os: blacksmith-32vcpu-ubuntu-2204 + + test_aarch64-linux: + if: github.event_name == 'merge_group' + uses: ./.github/workflows/test.yml + needs: build_aarch64-linux + with: + os: blacksmith-32vcpu-ubuntu-2204-arm + + test_x86_64-darwin: + if: github.event_name == 'merge_group' + uses: ./.github/workflows/test.yml + needs: build_aarch64-darwin + with: + os: macos-13 + + test_aarch64-darwin: + if: github.event_name == 'merge_group' + uses: ./.github/workflows/test.yml + needs: build_aarch64-darwin + with: + os: macos-latest + + vm_tests_smoke: + if: github.event_name != 'merge_group' + needs: build_x86_64-linux + runs-on: blacksmith-32vcpu-ubuntu-2204 + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - uses: DeterminateSystems/flakehub-cache-action@main + - run: | + nix build -L \ + .#hydraJobs.tests.functional_user \ + .#hydraJobs.tests.githubFlakes \ + .#hydraJobs.tests.nix-docker \ + .#hydraJobs.tests.tarballFlakes \ + ; + + vm_tests_all: + if: github.event_name == 'merge_group' + needs: build_x86_64-linux + runs-on: blacksmith-32vcpu-ubuntu-2204 + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - uses: DeterminateSystems/flakehub-cache-action@main + - run: | + nix build -L --keep-going \ + $(nix flake show --json \ + | jq -r ' + .hydraJobs.tests + | with_entries(select(.value.type == "derivation")) + | keys[] + | ".#hydraJobs.tests." + .' \ + | head -n5) # FIXME: for testing the merge queue + + flake_regressions: + if: github.event_name == 'merge_group' + needs: build_x86_64-linux + runs-on: blacksmith-32vcpu-ubuntu-2204 + steps: + - name: Checkout nix + uses: actions/checkout@v4 + - name: Checkout flake-regressions + uses: actions/checkout@v4 + with: + repository: DeterminateSystems/flake-regressions + path: flake-regressions + - name: Checkout flake-regressions-data + uses: actions/checkout@v4 + with: + repository: DeterminateSystems/flake-regressions-data + path: flake-regressions/tests + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - uses: DeterminateSystems/flakehub-cache-action@main + - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=50 flake-regressions/eval-all.sh + + manual: + if: github.event_name != 'merge_group' + needs: build_x86_64-linux + runs-on: blacksmith + permissions: + id-token: "write" + contents: "read" + pull-requests: "write" + statuses: "write" + deployments: "write" + steps: + - name: Checkout nix + uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - uses: DeterminateSystems/flakehub-cache-action@main + - name: Build manual + run: nix build .#hydraJobs.manual + - uses: nwtgck/actions-netlify@v3.0 + with: + publish-dir: './result/share/doc/nix/manual' + production-branch: detsys-main + github-token: ${{ secrets.GITHUB_TOKEN }} + deploy-message: "Deploy from GitHub Actions" + enable-pull-request-comment: true + enable-commit-comment: true + enable-commit-status: true + overwrites-pull-request-comment: true + env: + NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} + NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml new file mode 100644 index 00000000000..23a5d9e51fc --- /dev/null +++ b/.github/workflows/labels.yml @@ -0,0 +1,24 @@ +name: "Label PR" + +on: + pull_request_target: + types: [edited, opened, synchronize, reopened] + +# WARNING: +# When extending this action, be aware that $GITHUB_TOKEN allows some write +# access to the GitHub API. This means that it should not evaluate user input in +# a way that allows code injection. + +permissions: + contents: read + pull-requests: write + +jobs: + labels: + runs-on: ubuntu-24.04 + if: github.repository_owner == 'NixOS' + steps: + - uses: actions/labeler@v5 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + sync-labels: false diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000000..00ca3ec534b --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,23 @@ +name: Release + +on: + release: + types: + - released + +jobs: + publish: + if: (!github.repository.fork && (github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || startsWith(github.ref, 'refs/tags/'))) + environment: ${{ github.event_name == 'release' && 'production' || '' }} + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + - uses: "DeterminateSystems/flakehub-push@main" + with: + rolling: ${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} + visibility: "private" + tag: "${{ github.ref_name }}" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000000..e58827a9c06 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,21 @@ +on: + workflow_call: + inputs: + os: + required: true + type: string + +jobs: + + tests: + strategy: + fail-fast: false + runs-on: ${{ inputs.os }} + timeout-minutes: 60 + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - uses: DeterminateSystems/flakehub-cache-action@main + - run: nix flake check -L From dee23a0c1412aa5fb5b1ed35cd7824705c947344 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 21 Feb 2025 13:29:34 -0800 Subject: [PATCH 206/361] Fold the release things into one workflow --- .github/workflows/release-branches.yml | 20 ----------------- .github/workflows/release-prs.yml | 30 -------------------------- .github/workflows/release-tags.yml | 18 ---------------- .github/workflows/upload-release.yml | 13 +++++++++++ 4 files changed, 13 insertions(+), 68 deletions(-) delete mode 100644 .github/workflows/release-branches.yml delete mode 100644 .github/workflows/release-prs.yml delete mode 100644 .github/workflows/release-tags.yml diff --git a/.github/workflows/release-branches.yml b/.github/workflows/release-branches.yml deleted file mode 100644 index 38e4044edad..00000000000 --- a/.github/workflows/release-branches.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Release Branch - -concurrency: - group: release - -on: - push: - branches: - # NOTE: make sure any branches here are also valid directory names, - # otherwise creating the directory and uploading to s3 will fail - - "main" - -permissions: - id-token: "write" - contents: "read" - -jobs: - release-branch: - uses: ./.github/workflows/upload-release.yml - secrets: inherit diff --git a/.github/workflows/release-prs.yml b/.github/workflows/release-prs.yml deleted file mode 100644 index 818083c6835..00000000000 --- a/.github/workflows/release-prs.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: Release PR - -concurrency: - group: release - -on: - pull_request: - types: - - opened - - reopened - - synchronize - - labeled - -permissions: - id-token: "write" - contents: "read" - -jobs: - release-pr: - # Only intra-repo PRs are allowed to have PR artifacts uploaded - # We only want to trigger once the upload once in the case the upload label is added, not when any label is added - if: | - always() && !failure() && !cancelled() - && github.event.pull_request.head.repo.full_name == 'DeterminateSystems/nix-priv' - && ( - (github.event.action == 'labeled' && github.event.label.name == 'upload to s3') - || (github.event.action != 'labeled' && contains(github.event.pull_request.labels.*.name, 'upload to s3')) - ) - uses: ./.github/workflows/upload-release.yml - secrets: inherit diff --git a/.github/workflows/release-tags.yml b/.github/workflows/release-tags.yml deleted file mode 100644 index 709fbb92a44..00000000000 --- a/.github/workflows/release-tags.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: Release Tags - -concurrency: - group: release - -on: - push: - tags: - - "v*.*.*" - -permissions: - contents: write # In order to upload artifacts to GitHub releases - id-token: write # In order to request a JWT for AWS auth - -jobs: - release-tag: - uses: ./.github/workflows/upload-release.yml - secrets: inherit diff --git a/.github/workflows/upload-release.yml b/.github/workflows/upload-release.yml index bec5816be61..cffbb315e10 100644 --- a/.github/workflows/upload-release.yml +++ b/.github/workflows/upload-release.yml @@ -5,6 +5,19 @@ concurrency: on: workflow_call: + push: + branches: + # NOTE: make sure any branches here are also valid directory names, + # otherwise creating the directory and uploading to s3 will fail + - "main" + tags: + - "v*.*.*" + pull_request: + types: + - opened + - reopened + - synchronize + - labeled permissions: id-token: "write" From 14818b0d8817ce50145967768c8b2ade08f9b931 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 21 Feb 2025 13:31:58 -0800 Subject: [PATCH 207/361] fixup: use release not tags --- .github/workflows/upload-release.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/upload-release.yml b/.github/workflows/upload-release.yml index cffbb315e10..6fbf334204c 100644 --- a/.github/workflows/upload-release.yml +++ b/.github/workflows/upload-release.yml @@ -10,14 +10,15 @@ on: # NOTE: make sure any branches here are also valid directory names, # otherwise creating the directory and uploading to s3 will fail - "main" - tags: - - "v*.*.*" pull_request: types: - opened - reopened - synchronize - labeled + release: + types: + - released permissions: id-token: "write" From a341be4d9b8ed69322a281613c2ef7135d9d4578 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 21 Feb 2025 13:33:56 -0800 Subject: [PATCH 208/361] fixup: fold publish.yml into upload-release.yml --- .github/workflows/publish.yml | 23 ----------------------- .github/workflows/upload-release.yml | 18 ++++++++++++++++++ 2 files changed, 18 insertions(+), 23 deletions(-) delete mode 100644 .github/workflows/publish.yml diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml deleted file mode 100644 index 00ca3ec534b..00000000000 --- a/.github/workflows/publish.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Release - -on: - release: - types: - - released - -jobs: - publish: - if: (!github.repository.fork && (github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || startsWith(github.ref, 'refs/tags/'))) - environment: ${{ github.event_name == 'release' && 'production' || '' }} - runs-on: ubuntu-latest - permissions: - contents: read - id-token: write - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - - uses: "DeterminateSystems/flakehub-push@main" - with: - rolling: ${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} - visibility: "private" - tag: "${{ github.ref_name }}" diff --git a/.github/workflows/upload-release.yml b/.github/workflows/upload-release.yml index 6fbf334204c..5e09c010ce7 100644 --- a/.github/workflows/upload-release.yml +++ b/.github/workflows/upload-release.yml @@ -83,3 +83,21 @@ jobs: ids_project_name: determinate-nix ids_binary_prefix: determinate-nix skip_acl: true + + publish: + needs: + - release + if: (!github.repository.fork && (github.ref == format('refs/heads/{0}', github.event.repository.default_branch) || startsWith(github.ref, 'refs/tags/'))) + environment: ${{ github.event_name == 'release' && 'production' || '' }} + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + - uses: "DeterminateSystems/flakehub-push@main" + with: + rolling: ${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} + visibility: "private" + tag: "${{ github.ref_name }}" From a4e9b65c3a065941167bb5567203e4d406d076fb Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 21 Feb 2025 13:50:29 -0800 Subject: [PATCH 209/361] fixup: remove unsafeDiscardStringContext? --- flake.nix | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/flake.nix b/flake.nix index a499c0dcb07..04f3e4d87c9 100644 --- a/flake.nix +++ b/flake.nix @@ -299,8 +299,7 @@ let pkgs = nixpkgsFor.${system}.native; - # NOTE(cole-h): discard string context so that it doesn't try to build, we just care about the outPaths - closures = forAllSystems (system: builtins.unsafeDiscardStringContext self.packages.${system}.default.outPath); + closures = forAllSystems (system: self.packages.${system}.default.outPath); closures_json = pkgs.runCommand "versions.json" { From ec42d3a0777cd5d38d2ea5550a1fc44fc999fd73 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 21 Feb 2025 14:16:36 -0800 Subject: [PATCH 210/361] fixup: default branch name --- .github/workflows/upload-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/upload-release.yml b/.github/workflows/upload-release.yml index 5e09c010ce7..0db501ef5ad 100644 --- a/.github/workflows/upload-release.yml +++ b/.github/workflows/upload-release.yml @@ -9,7 +9,7 @@ on: branches: # NOTE: make sure any branches here are also valid directory names, # otherwise creating the directory and uploading to s3 will fail - - "main" + - "detsys-main" pull_request: types: - opened From 158d79ddb5c705f62f0dd716a138ddc884bb1349 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 22 Feb 2025 16:45:16 +0000 Subject: [PATCH 211/361] Prepare release v0.37.2 From 84fb833d5badaa287b0f02d258c080b816748948 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 22 Feb 2025 16:45:19 +0000 Subject: [PATCH 212/361] Set .version-determinate to 0.37.2 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index 9b1bb851239..8570a3aeb97 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -0.37.1 +0.37.2 From d670380bd9f63d83655a0bde71b285103735b072 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Feb 2025 15:30:30 +0100 Subject: [PATCH 213/361] nix flake archive: Skip relative path inputs Fixes #12438. (cherry picked from commit b4dfeafed5e2b0d8d6fd90bef4d3bed24caa4734) --- src/nix/flake.cc | 4 +++- tests/functional/flakes/relative-paths.sh | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 4d5cad1a8b7..87eaafd1592 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -1088,12 +1088,14 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun nlohmann::json jsonObj2 = json ? json::object() : nlohmann::json(nullptr); for (auto & [inputName, input] : node.inputs) { if (auto inputNode = std::get_if<0>(&input)) { + if ((*inputNode)->lockedRef.input.isRelative()) + continue; auto storePath = dryRun ? (*inputNode)->lockedRef.input.computeStorePath(*store) : (*inputNode)->lockedRef.input.fetchToStore(store).first; if (json) { - auto& jsonObj3 = jsonObj2[inputName]; + auto & jsonObj3 = jsonObj2[inputName]; jsonObj3["path"] = store->printStorePath(storePath); sources.insert(std::move(storePath)); jsonObj3["inputs"] = traverse(**inputNode); diff --git a/tests/functional/flakes/relative-paths.sh b/tests/functional/flakes/relative-paths.sh index 9b93da9c1ca..ac4b07eb274 100644 --- a/tests/functional/flakes/relative-paths.sh +++ b/tests/functional/flakes/relative-paths.sh @@ -76,6 +76,9 @@ if ! isTestOnNixOS; then fi (! grep narHash "$subflake2/flake.lock") +# Test `nix flake archive` with relative path flakes. +nix flake archive --json "$rootFlake" + # Test circular relative path flakes. FIXME: doesn't work at the moment. if false; then From ab493636cd9ae326d8018d11ac7495dca54b7fab Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Mon, 24 Feb 2025 11:19:08 -0800 Subject: [PATCH 214/361] fixup: upload-release needs to configure allowed_branches --- .github/workflows/upload-release.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/upload-release.yml b/.github/workflows/upload-release.yml index 0db501ef5ad..2eaf48d0ece 100644 --- a/.github/workflows/upload-release.yml +++ b/.github/workflows/upload-release.yml @@ -83,6 +83,7 @@ jobs: ids_project_name: determinate-nix ids_binary_prefix: determinate-nix skip_acl: true + allowed_branches: '["detsys-main"]' publish: needs: From 9e87a583142e0dccb04588445d7a807392385903 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 24 Feb 2025 16:44:12 +0100 Subject: [PATCH 215/361] packaging: Use correct stdenv for x86_64-darwin (cherry picked from commit 0772c2e3abc269f5e3aa8dd1fa055fba523d60ee) --- flake.nix | 1 - packaging/components.nix | 10 +++++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/flake.nix b/flake.nix index f5c7780d590..0c0ddfa474d 100644 --- a/flake.nix +++ b/flake.nix @@ -165,7 +165,6 @@ f = import ./packaging/components.nix { inherit (final) lib; inherit officialRelease; - inherit stdenv; pkgs = final; src = self; }; diff --git a/packaging/components.nix b/packaging/components.nix index b1ef38302f5..9da864887cc 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -2,7 +2,6 @@ lib, pkgs, src, - stdenv, officialRelease, }: @@ -12,6 +11,15 @@ let inherit (scope) callPackage ; + inherit + (scope.callPackage ( + { stdenv }: + { + inherit stdenv; + } + ) { }) + stdenv + ; inherit (pkgs.buildPackages) meson ninja From 605b2371f96c020516ee3e9596ff6df3db0f0be5 Mon Sep 17 00:00:00 2001 From: "mergify[bot]" <37929162+mergify[bot]@users.noreply.github.com> Date: Mon, 24 Feb 2025 21:30:35 +0000 Subject: [PATCH 216/361] windows: fix compilation after recent changes (backport #12495) (#12561) * windows: fix compilation after recent changes Specifically last few week's merges involving legacy SSH options and dynamic derivations. (cherry picked from commit e0617d25453760e2f5817ece317914eee9330768) # Conflicts: # src/libstore/build/derivation-creation-and-realisation-goal.hh * Remove unneeded * Remove unneeded --------- Co-authored-by: Brian McKenna Co-authored-by: Eelco Dolstra --- src/libstore/legacy-ssh-store.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 3849f088dd5..480f4105939 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -367,7 +367,12 @@ unsigned int LegacySSHStore::getProtocol() pid_t LegacySSHStore::getConnectionPid() { auto conn(connections->get()); +#ifndef _WIN32 return conn->sshConn->sshPid; +#else + // TODO: Implement + return 0; +#endif } From 91508de3152b4448b44d9e48b749570077ff473f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Feb 2025 17:46:43 +0100 Subject: [PATCH 217/361] nix flake archive: Recurse into relative path inputs We can't ignore them entirely, since we do want to archive their transitive inputs. Fixes #12438. (cherry picked from commit 14c9755462cc8ee61ba7a34da48fcfc34d3b110c) --- src/nix/flake.cc | 22 +++++++++++----------- tests/functional/flakes/common.sh | 14 +++++++++++--- tests/functional/flakes/relative-paths.sh | 14 ++++++++++++-- 3 files changed, 34 insertions(+), 16 deletions(-) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 87eaafd1592..9259743f434 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -1088,21 +1088,21 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun nlohmann::json jsonObj2 = json ? json::object() : nlohmann::json(nullptr); for (auto & [inputName, input] : node.inputs) { if (auto inputNode = std::get_if<0>(&input)) { - if ((*inputNode)->lockedRef.input.isRelative()) - continue; - auto storePath = - dryRun - ? (*inputNode)->lockedRef.input.computeStorePath(*store) - : (*inputNode)->lockedRef.input.fetchToStore(store).first; + std::optional storePath; + if (!(*inputNode)->lockedRef.input.isRelative()) { + storePath = + dryRun + ? (*inputNode)->lockedRef.input.computeStorePath(*store) + : (*inputNode)->lockedRef.input.fetchToStore(store).first; + sources.insert(*storePath); + } if (json) { auto & jsonObj3 = jsonObj2[inputName]; - jsonObj3["path"] = store->printStorePath(storePath); - sources.insert(std::move(storePath)); + if (storePath) + jsonObj3["path"] = store->printStorePath(*storePath); jsonObj3["inputs"] = traverse(**inputNode); - } else { - sources.insert(std::move(storePath)); + } else traverse(**inputNode); - } } } return jsonObj2; diff --git a/tests/functional/flakes/common.sh b/tests/functional/flakes/common.sh index b1c3988e342..06e414e9d03 100644 --- a/tests/functional/flakes/common.sh +++ b/tests/functional/flakes/common.sh @@ -99,6 +99,16 @@ writeTrivialFlake() { EOF } +initGitRepo() { + local repo="$1" + local extraArgs="${2-}" + + # shellcheck disable=SC2086 # word splitting of extraArgs is intended + git -C "$repo" init $extraArgs + git -C "$repo" config user.email "foobar@example.com" + git -C "$repo" config user.name "Foobar" +} + createGitRepo() { local repo="$1" local extraArgs="${2-}" @@ -107,7 +117,5 @@ createGitRepo() { mkdir -p "$repo" # shellcheck disable=SC2086 # word splitting of extraArgs is intended - git -C "$repo" init $extraArgs - git -C "$repo" config user.email "foobar@example.com" - git -C "$repo" config user.name "Foobar" + initGitRepo "$repo" $extraArgs } diff --git a/tests/functional/flakes/relative-paths.sh b/tests/functional/flakes/relative-paths.sh index ac4b07eb274..3f7ca3f4618 100644 --- a/tests/functional/flakes/relative-paths.sh +++ b/tests/functional/flakes/relative-paths.sh @@ -45,7 +45,7 @@ EOF [[ $(nix eval "$rootFlake?dir=sub1#y") = 6 ]] -git init "$rootFlake" +initGitRepo "$rootFlake" git -C "$rootFlake" add flake.nix sub0/flake.nix sub1/flake.nix [[ $(nix eval "$subflake1#y") = 6 ]] @@ -77,7 +77,17 @@ fi (! grep narHash "$subflake2/flake.lock") # Test `nix flake archive` with relative path flakes. -nix flake archive --json "$rootFlake" +git -C "$rootFlake" add flake.lock +git -C "$rootFlake" commit -a -m Foo + +json=$(nix flake archive --json "$rootFlake" --to "$TEST_ROOT/store2") +[[ $(echo "$json" | jq .inputs.sub0.inputs) = {} ]] +[[ -n $(echo "$json" | jq .path) ]] + +nix flake prefetch --out-link "$TEST_ROOT/result" "$rootFlake" +outPath=$(readlink "$TEST_ROOT/result") + +[ -e "$TEST_ROOT/store2/nix/store/$(basename "$outPath")" ] # Test circular relative path flakes. FIXME: doesn't work at the moment. if false; then From 827f760ad7e12dd006e834045d46645869cd4c74 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Feb 2025 23:00:07 +0100 Subject: [PATCH 218/361] Fix test We didn't backport `nix flake prefetch --out-link`. --- tests/functional/flakes/relative-paths.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/functional/flakes/relative-paths.sh b/tests/functional/flakes/relative-paths.sh index 3f7ca3f4618..9c0e6fd4124 100644 --- a/tests/functional/flakes/relative-paths.sh +++ b/tests/functional/flakes/relative-paths.sh @@ -84,10 +84,10 @@ json=$(nix flake archive --json "$rootFlake" --to "$TEST_ROOT/store2") [[ $(echo "$json" | jq .inputs.sub0.inputs) = {} ]] [[ -n $(echo "$json" | jq .path) ]] -nix flake prefetch --out-link "$TEST_ROOT/result" "$rootFlake" -outPath=$(readlink "$TEST_ROOT/result") +#nix flake prefetch --out-link "$TEST_ROOT/result" "$rootFlake" +#outPath=$(readlink "$TEST_ROOT/result") -[ -e "$TEST_ROOT/store2/nix/store/$(basename "$outPath")" ] +#[ -e "$TEST_ROOT/store2/nix/store/$(basename "$outPath")" ] # Test circular relative path flakes. FIXME: doesn't work at the moment. if false; then From 25c6048fa6a658a9be6efb106f57a3049fd4272d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Feb 2025 22:55:44 +0100 Subject: [PATCH 219/361] Bump Determinate Nix version to 3.0.0 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index 8570a3aeb97..4a36342fcab 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -0.37.2 +3.0.0 From 5fc89adf6c6a0a47d054b339d737006f4b2de197 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Feb 2025 21:26:31 +0100 Subject: [PATCH 220/361] Use Determinate version in store path name --- packaging/components.nix | 2 +- packaging/dev-shell.nix | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packaging/components.nix b/packaging/components.nix index 9da864887cc..38634619463 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -26,7 +26,7 @@ let pkg-config ; - baseVersion = lib.fileContents ../.version; + baseVersion = lib.fileContents ../.version-determinate; versionSuffix = lib.optionalString (!officialRelease) "pre"; diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix index 1651a86bee1..a5a2426a439 100644 --- a/packaging/dev-shell.nix +++ b/packaging/dev-shell.nix @@ -23,7 +23,7 @@ pkgs.nixComponents.nix-util.overrideAttrs ( pname = "shell-for-" + attrs.pname; # Remove the version suffix to avoid unnecessary attempts to substitute in nix develop - version = lib.fileContents ../.version; + version = lib.fileContents ../.version-determinate; name = attrs.pname; installFlags = "sysconfdir=$(out)/etc"; From e71a498e2571392d18a3107ed235844130f7d462 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Feb 2025 21:58:29 +0100 Subject: [PATCH 221/361] Disable setVersionLayer This sets .version to finalAttrs.version, so we would end up with `nix --version` showing `nix (Determinate Nix 0.37.2) 0.37.2`. --- packaging/components.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packaging/components.nix b/packaging/components.nix index 38634619463..a3f816c4d5e 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -50,6 +50,7 @@ let exts: userFn: stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn); setVersionLayer = finalAttrs: prevAttrs: { + /* preConfigure = prevAttrs.prevAttrs or "" + @@ -59,6 +60,7 @@ let chmod u+w ./.version echo ${finalAttrs.version} > ./.version ''; + */ }; localSourceLayer = From 94347f4622f54c4ad08ce8c3e35bb230cce08893 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Feb 2025 23:28:09 +0100 Subject: [PATCH 222/361] nix -> determinate-nix --- doc/manual/package.nix | 2 +- packaging/everything.nix | 4 ++-- src/external-api-docs/package.nix | 2 +- src/internal-api-docs/package.nix | 2 +- src/libcmd/package.nix | 2 +- src/libexpr-c/package.nix | 2 +- src/libexpr-test-support/package.nix | 2 +- src/libexpr/package.nix | 2 +- src/libfetchers/package.nix | 2 +- src/libflake-c/package.nix | 2 +- src/libflake/package.nix | 2 +- src/libmain-c/package.nix | 2 +- src/libmain/package.nix | 2 +- src/libstore-c/package.nix | 2 +- src/libstore-test-support/package.nix | 2 +- src/libstore/package.nix | 2 +- src/libutil-c/package.nix | 2 +- src/libutil-test-support/package.nix | 2 +- src/libutil/package.nix | 2 +- src/perl/package.nix | 2 +- 20 files changed, 21 insertions(+), 21 deletions(-) diff --git a/doc/manual/package.nix b/doc/manual/package.nix index 6d93e6f1a5d..778440ac256 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -22,7 +22,7 @@ let in mkMesonDerivation (finalAttrs: { - pname = "nix-manual"; + pname = "determinate-nix-manual"; inherit version; workDir = ./.; diff --git a/packaging/everything.nix b/packaging/everything.nix index 0974a34df50..3637c4d07d1 100644 --- a/packaging/everything.nix +++ b/packaging/everything.nix @@ -69,7 +69,7 @@ let }; dev = stdenv.mkDerivation (finalAttrs: { - name = "nix-${nix-cli.version}-dev"; + name = "determinate-nix-${nix-cli.version}-dev"; pname = "nix"; version = nix-cli.version; dontUnpack = true; @@ -120,7 +120,7 @@ let in (buildEnv { - name = "nix-${nix-cli.version}"; + name = "determinate-nix-${nix-cli.version}"; paths = [ nix-cli nix-manual.man diff --git a/src/external-api-docs/package.nix b/src/external-api-docs/package.nix index b194e16d460..28cde8c09e6 100644 --- a/src/external-api-docs/package.nix +++ b/src/external-api-docs/package.nix @@ -14,7 +14,7 @@ let in mkMesonDerivation (finalAttrs: { - pname = "nix-external-api-docs"; + pname = "determinate-nix-external-api-docs"; inherit version; workDir = ./.; diff --git a/src/internal-api-docs/package.nix b/src/internal-api-docs/package.nix index 6c4f354aee5..636c19653ea 100644 --- a/src/internal-api-docs/package.nix +++ b/src/internal-api-docs/package.nix @@ -14,7 +14,7 @@ let in mkMesonDerivation (finalAttrs: { - pname = "nix-internal-api-docs"; + pname = "determinate-nix-internal-api-docs"; inherit version; workDir = ./.; diff --git a/src/libcmd/package.nix b/src/libcmd/package.nix index d459d1c20fb..5150de249e8 100644 --- a/src/libcmd/package.nix +++ b/src/libcmd/package.nix @@ -35,7 +35,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-cmd"; + pname = "determinate-nix-cmd"; inherit version; workDir = ./.; diff --git a/src/libexpr-c/package.nix b/src/libexpr-c/package.nix index 694fbc1fe78..ec92ecce105 100644 --- a/src/libexpr-c/package.nix +++ b/src/libexpr-c/package.nix @@ -15,7 +15,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-expr-c"; + pname = "determinate-nix-expr-c"; inherit version; workDir = ./.; diff --git a/src/libexpr-test-support/package.nix b/src/libexpr-test-support/package.nix index 44b0ff38631..dbf515370f0 100644 --- a/src/libexpr-test-support/package.nix +++ b/src/libexpr-test-support/package.nix @@ -18,7 +18,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-util-test-support"; + pname = "determinate-nix-util-test-support"; inherit version; workDir = ./.; diff --git a/src/libexpr/package.nix b/src/libexpr/package.nix index 533dae9f253..309d57f9b1a 100644 --- a/src/libexpr/package.nix +++ b/src/libexpr/package.nix @@ -36,7 +36,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-expr"; + pname = "determinate-nix-expr"; inherit version; workDir = ./.; diff --git a/src/libfetchers/package.nix b/src/libfetchers/package.nix index 3f52e987800..5aa096082ed 100644 --- a/src/libfetchers/package.nix +++ b/src/libfetchers/package.nix @@ -17,7 +17,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-fetchers"; + pname = "determinate-nix-fetchers"; inherit version; workDir = ./.; diff --git a/src/libflake-c/package.nix b/src/libflake-c/package.nix index 1149508523e..958cf233e0a 100644 --- a/src/libflake-c/package.nix +++ b/src/libflake-c/package.nix @@ -16,7 +16,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-flake-c"; + pname = "determinate-nix-flake-c"; inherit version; workDir = ./.; diff --git a/src/libflake/package.nix b/src/libflake/package.nix index 5240ce5e396..2c28235f1bd 100644 --- a/src/libflake/package.nix +++ b/src/libflake/package.nix @@ -18,7 +18,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-flake"; + pname = "determinate-nix-flake"; inherit version; workDir = ./.; diff --git a/src/libmain-c/package.nix b/src/libmain-c/package.nix index f019a917d36..17858d56f2e 100644 --- a/src/libmain-c/package.nix +++ b/src/libmain-c/package.nix @@ -17,7 +17,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-main-c"; + pname = "determinate-nix-main-c"; inherit version; workDir = ./.; diff --git a/src/libmain/package.nix b/src/libmain/package.nix index c03697c48da..5ee2e61e41d 100644 --- a/src/libmain/package.nix +++ b/src/libmain/package.nix @@ -17,7 +17,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-main"; + pname = "determinate-nix-main"; inherit version; workDir = ./.; diff --git a/src/libstore-c/package.nix b/src/libstore-c/package.nix index fde17c78e01..0ce37e44c01 100644 --- a/src/libstore-c/package.nix +++ b/src/libstore-c/package.nix @@ -15,7 +15,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-store-c"; + pname = "determinate-nix-store-c"; inherit version; workDir = ./.; diff --git a/src/libstore-test-support/package.nix b/src/libstore-test-support/package.nix index ccac25ee16a..8a4658ae700 100644 --- a/src/libstore-test-support/package.nix +++ b/src/libstore-test-support/package.nix @@ -18,7 +18,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-store-test-support"; + pname = "determinate-nix-store-test-support"; inherit version; workDir = ./.; diff --git a/src/libstore/package.nix b/src/libstore/package.nix index a7d9a0ca110..847e61d09a9 100644 --- a/src/libstore/package.nix +++ b/src/libstore/package.nix @@ -28,7 +28,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-store"; + pname = "determinate-nix-store"; inherit version; workDir = ./.; diff --git a/src/libutil-c/package.nix b/src/libutil-c/package.nix index f26f57775d4..a1605bf5bb8 100644 --- a/src/libutil-c/package.nix +++ b/src/libutil-c/package.nix @@ -14,7 +14,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-util-c"; + pname = "determinate-nix-util-c"; inherit version; workDir = ./.; diff --git a/src/libutil-test-support/package.nix b/src/libutil-test-support/package.nix index fafd47c86c5..3b094ac29bd 100644 --- a/src/libutil-test-support/package.nix +++ b/src/libutil-test-support/package.nix @@ -17,7 +17,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-util-test-support"; + pname = "determinate-nix-util-test-support"; inherit version; workDir = ./.; diff --git a/src/libutil/package.nix b/src/libutil/package.nix index 47dcb54a26f..fcc74c247e1 100644 --- a/src/libutil/package.nix +++ b/src/libutil/package.nix @@ -21,7 +21,7 @@ let in mkMesonLibrary (finalAttrs: { - pname = "nix-util"; + pname = "determinate-nix-util"; inherit version; workDir = ./.; diff --git a/src/perl/package.nix b/src/perl/package.nix index d95d13aa921..d948cbcdcce 100644 --- a/src/perl/package.nix +++ b/src/perl/package.nix @@ -18,7 +18,7 @@ in perl.pkgs.toPerlModule ( mkMesonDerivation (finalAttrs: { - pname = "nix-perl"; + pname = "determinate-nix-perl"; inherit version; workDir = ./.; From ff8da340ae93f053350872e5d7ac301fd7c814ee Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Tue, 25 Feb 2025 14:33:57 -0300 Subject: [PATCH 223/361] Remove references to single-user mode --- doc/manual/redirects.js | 3 --- doc/manual/source/SUMMARY.md.in | 4 +-- doc/manual/source/installation/index.md | 13 ---------- .../source/installation/installing-binary.md | 25 +------------------ .../source/installation/installing-docker.md | 18 ------------- .../source/installation/nix-security.md | 15 ----------- doc/manual/source/installation/single-user.md | 9 ------- doc/manual/source/installation/uninstall.md | 9 ------- doc/manual/source/installation/upgrading.md | 6 ----- 9 files changed, 2 insertions(+), 100 deletions(-) delete mode 100644 doc/manual/source/installation/nix-security.md delete mode 100644 doc/manual/source/installation/single-user.md diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js index dea141391df..36f53cbc82c 100644 --- a/doc/manual/redirects.js +++ b/doc/manual/redirects.js @@ -271,13 +271,10 @@ const redirects = { "sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation", "sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball", "sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url", - "sect-single-user-installation": "installation/installing-binary.html#single-user-installation", "ch-installing-source": "installation/installing-source.html", "ssec-multi-user": "installation/multi-user.html", - "ch-nix-security": "installation/nix-security.html", "sec-obtaining-source": "installation/obtaining-source.html", "sec-prerequisites-source": "installation/prerequisites-source.html", - "sec-single-user": "installation/single-user.html", "ch-supported-platforms": "installation/supported-platforms.html", "ch-upgrading-nix": "installation/upgrading.html", "ch-about-nix": "introduction.html", diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 228bbc88206..9d465e4bb49 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -10,9 +10,7 @@ - [Obtaining a Source Distribution](installation/obtaining-source.md) - [Building Nix from Source](installation/building-source.md) - [Using Nix within Docker](installation/installing-docker.md) - - [Security](installation/nix-security.md) - - [Single-User Mode](installation/single-user.md) - - [Multi-User Mode](installation/multi-user.md) + - [Multi-User Mode](installation/multi-user.md) - [Environment Variables](installation/env-variables.md) - [Upgrading Nix](installation/upgrading.md) - [Uninstalling Nix](installation/uninstall.md) diff --git a/doc/manual/source/installation/index.md b/doc/manual/source/installation/index.md index 48725c1ba74..d71634946d6 100644 --- a/doc/manual/source/installation/index.md +++ b/doc/manual/source/installation/index.md @@ -26,19 +26,6 @@ This option requires either: $ curl -L https://nixos.org/nix/install | sh -s -- --daemon ``` -## Single-user - -> Single-user is not supported on Mac. - -This installation has less requirements than the multi-user install, however it -cannot offer equivalent sharing, isolation, or security. - -This option is suitable for systems without systemd. - -```console -$ curl -L https://nixos.org/nix/install | sh -s -- --no-daemon -``` - ## Distributions The Nix community maintains installers for several distributions. diff --git a/doc/manual/source/installation/installing-binary.md b/doc/manual/source/installation/installing-binary.md index 6a1a5ddcaff..0a2d650a97b 100644 --- a/doc/manual/source/installation/installing-binary.md +++ b/doc/manual/source/installation/installing-binary.md @@ -19,11 +19,6 @@ This performs the default type of installation for your platform: - [Multi-user](#multi-user-installation): - Linux with systemd and without SELinux - macOS -- [Single-user](#single-user-installation): - - Linux without systemd - - Linux with SELinux - -We recommend the multi-user installation if it supports your platform and you can authenticate with `sudo`. The installer can configured with various command line arguments and environment variables. To show available command line flags: @@ -42,7 +37,7 @@ The directory for each version contains the corresponding SHA-256 hash. All installation scripts are invoked the same way: ```console -$ export VERSION=2.19.2 +$ export VERSION=2.19.2 $ curl -L https://releases.nixos.org/nix/nix-$VERSION/install | sh ``` @@ -64,24 +59,6 @@ $ bash <(curl -L https://nixos.org/nix/install) --daemon You can run this under your usual user account or `root`. The script will invoke `sudo` as needed. -# Single User Installation - -To explicitly select a single-user installation on your system: - -```console -$ bash <(curl -L https://nixos.org/nix/install) --no-daemon -``` - -In a single-user installation, `/nix` is owned by the invoking user. -The script will invoke `sudo` to create `/nix` if it doesn’t already exist. -If you don’t have `sudo`, manually create `/nix` as `root`: - -```console -$ su root -# mkdir /nix -# chown alice /nix -``` - # Installing from a binary tarball You can also download a binary tarball that contains Nix and all its dependencies: diff --git a/doc/manual/source/installation/installing-docker.md b/doc/manual/source/installation/installing-docker.md index 9354c1a7228..6f77d6a5708 100644 --- a/doc/manual/source/installation/installing-docker.md +++ b/doc/manual/source/installation/installing-docker.md @@ -57,21 +57,3 @@ $ nix build ./\#hydraJobs.dockerImage.x86_64-linux $ docker load -i ./result/image.tar.gz $ docker run -ti nix:2.5pre20211105 ``` - -# Docker image with non-root Nix - -If you would like to run Nix in a container under a user other than `root`, -you can build an image with a non-root single-user installation of Nix -by specifying the `uid`, `gid`, `uname`, and `gname` arguments to `docker.nix`: - -```console -$ nix build --file docker.nix \ - --arg uid 1000 \ - --arg gid 1000 \ - --argstr uname user \ - --argstr gname user \ - --argstr name nix-user \ - --out-link nix-user.tar.gz -$ docker load -i nix-user.tar.gz -$ docker run -ti nix-user -``` diff --git a/doc/manual/source/installation/nix-security.md b/doc/manual/source/installation/nix-security.md deleted file mode 100644 index 1e9036b68b2..00000000000 --- a/doc/manual/source/installation/nix-security.md +++ /dev/null @@ -1,15 +0,0 @@ -# Security - -Nix has two basic security models. First, it can be used in “single-user -mode”, which is similar to what most other package management tools do: -there is a single user (typically root) who performs all package -management operations. All other users can then use the installed -packages, but they cannot perform package management operations -themselves. - -Alternatively, you can configure Nix in “multi-user mode”. In this -model, all users can perform package management operations — for -instance, every user can install software without requiring root -privileges. Nix ensures that this is secure. For instance, it’s not -possible for one user to overwrite a package used by another user with a -Trojan horse. diff --git a/doc/manual/source/installation/single-user.md b/doc/manual/source/installation/single-user.md deleted file mode 100644 index f9a3b26edf4..00000000000 --- a/doc/manual/source/installation/single-user.md +++ /dev/null @@ -1,9 +0,0 @@ -# Single-User Mode - -In single-user mode, all Nix operations that access the database in -`prefix/var/nix/db` or modify the Nix store in `prefix/store` must be -performed under the user ID that owns those directories. This is -typically root. (If you install from RPM packages, that’s in fact the -default ownership.) However, on single-user machines, it is often -convenient to `chown` those directories to your normal user account so -that you don’t have to `su` to root all the time. diff --git a/doc/manual/source/installation/uninstall.md b/doc/manual/source/installation/uninstall.md index 8d45da6bba0..2762edbf43c 100644 --- a/doc/manual/source/installation/uninstall.md +++ b/doc/manual/source/installation/uninstall.md @@ -154,12 +154,3 @@ which you may remove. > You do not have to reboot to finish uninstalling Nix. > The uninstall is complete. > macOS (Catalina+) directly controls root directories, and its read-only root will prevent you from manually deleting the empty `/nix` mountpoint. - -## Single User - -To remove a [single-user installation](./installing-binary.md#single-user-installation) of Nix, run: - -```console -rm -rf /nix ~/.nix-channels ~/.nix-defexpr ~/.nix-profile -``` -You might also want to manually remove references to Nix from your `~/.profile`. diff --git a/doc/manual/source/installation/upgrading.md b/doc/manual/source/installation/upgrading.md index a433f1d30e6..f0992671d03 100644 --- a/doc/manual/source/installation/upgrading.md +++ b/doc/manual/source/installation/upgrading.md @@ -32,9 +32,3 @@ $ sudo nix-env --install --file '' --attr nix cacert -I nixpkgs=channel $ sudo launchctl remove org.nixos.nix-daemon $ sudo launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist ``` - -## Single-user all platforms - -```console -$ nix-env --install --file '' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable -``` From d0b6f2f26fd06258a6cf10ee9ddf85c7accf4c01 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Tue, 25 Feb 2025 14:40:53 -0300 Subject: [PATCH 224/361] Remove one more reference to single-user mode --- doc/manual/source/installation/index.md | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/doc/manual/source/installation/index.md b/doc/manual/source/installation/index.md index d71634946d6..b2c908053d5 100644 --- a/doc/manual/source/installation/index.md +++ b/doc/manual/source/installation/index.md @@ -1,18 +1,11 @@ # Installation This section describes how to install and configure Nix for first-time use. +Nix follows a [multi-user](./multi-user.md) model on both Linux and macOS. -The current recommended option on Linux and MacOS is [multi-user](#multi-user). - -## Multi-user - -This installation offers better sharing, improved isolation, and more security -over a single user installation. - -This option requires either: - -* Linux running systemd, with SELinux disabled -* MacOS +```console +$ curl -L https://nixos.org/nix/install | sh -s -- --daemon +``` > **Updating to macOS 15 Sequoia** > @@ -22,10 +15,6 @@ This option requires either: > ``` > when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. -```console -$ curl -L https://nixos.org/nix/install | sh -s -- --daemon -``` - ## Distributions The Nix community maintains installers for several distributions. From 4248d5c9a2ce9f5b5cd8dcbae53c5735dff737c1 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Tue, 25 Feb 2025 14:51:05 -0300 Subject: [PATCH 225/361] Restore section about non-root Nix in Docker --- .../source/installation/installing-docker.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/doc/manual/source/installation/installing-docker.md b/doc/manual/source/installation/installing-docker.md index 6f77d6a5708..9354c1a7228 100644 --- a/doc/manual/source/installation/installing-docker.md +++ b/doc/manual/source/installation/installing-docker.md @@ -57,3 +57,21 @@ $ nix build ./\#hydraJobs.dockerImage.x86_64-linux $ docker load -i ./result/image.tar.gz $ docker run -ti nix:2.5pre20211105 ``` + +# Docker image with non-root Nix + +If you would like to run Nix in a container under a user other than `root`, +you can build an image with a non-root single-user installation of Nix +by specifying the `uid`, `gid`, `uname`, and `gname` arguments to `docker.nix`: + +```console +$ nix build --file docker.nix \ + --arg uid 1000 \ + --arg gid 1000 \ + --argstr uname user \ + --argstr gname user \ + --argstr name nix-user \ + --out-link nix-user.tar.gz +$ docker load -i nix-user.tar.gz +$ docker run -ti nix-user +``` From daa7f274f54772473e975519111b296c165e9566 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Tue, 25 Feb 2025 14:56:05 -0300 Subject: [PATCH 226/361] Restore the Nix security doc --- doc/manual/source/installation/nix-security.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 doc/manual/source/installation/nix-security.md diff --git a/doc/manual/source/installation/nix-security.md b/doc/manual/source/installation/nix-security.md new file mode 100644 index 00000000000..1e9036b68b2 --- /dev/null +++ b/doc/manual/source/installation/nix-security.md @@ -0,0 +1,15 @@ +# Security + +Nix has two basic security models. First, it can be used in “single-user +mode”, which is similar to what most other package management tools do: +there is a single user (typically root) who performs all package +management operations. All other users can then use the installed +packages, but they cannot perform package management operations +themselves. + +Alternatively, you can configure Nix in “multi-user mode”. In this +model, all users can perform package management operations — for +instance, every user can install software without requiring root +privileges. Nix ensures that this is secure. For instance, it’s not +possible for one user to overwrite a package used by another user with a +Trojan horse. From 2b7214197e5385e5eec5a64536beb2439c7b96d8 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Tue, 25 Feb 2025 15:18:25 -0300 Subject: [PATCH 227/361] Consolidate docs --- doc/manual/source/SUMMARY.md.in | 2 +- .../source/installation/nix-security.md | 96 ++++++++++++++++--- 2 files changed, 84 insertions(+), 14 deletions(-) diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 9d465e4bb49..d5f8b94df6f 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -10,7 +10,7 @@ - [Obtaining a Source Distribution](installation/obtaining-source.md) - [Building Nix from Source](installation/building-source.md) - [Using Nix within Docker](installation/installing-docker.md) - - [Multi-User Mode](installation/multi-user.md) + - [Security](installation/nix-security.md) - [Environment Variables](installation/env-variables.md) - [Upgrading Nix](installation/upgrading.md) - [Uninstalling Nix](installation/uninstall.md) diff --git a/doc/manual/source/installation/nix-security.md b/doc/manual/source/installation/nix-security.md index 1e9036b68b2..61cad24c2b3 100644 --- a/doc/manual/source/installation/nix-security.md +++ b/doc/manual/source/installation/nix-security.md @@ -1,15 +1,85 @@ # Security -Nix has two basic security models. First, it can be used in “single-user -mode”, which is similar to what most other package management tools do: -there is a single user (typically root) who performs all package -management operations. All other users can then use the installed -packages, but they cannot perform package management operations -themselves. - -Alternatively, you can configure Nix in “multi-user mode”. In this -model, all users can perform package management operations — for -instance, every user can install software without requiring root -privileges. Nix ensures that this is secure. For instance, it’s not -possible for one user to overwrite a package used by another user with a -Trojan horse. +Nix follows a [**multi-user**](#multi-user-model) security model in which all +users can perform package management operations. Every user can, for example, +install software without requiring root privileges, and Nix ensures that this +is secure. It's *not* possible for one user to, for example, overwrite a +package used by another user with a Trojan horse. + +## Multi-User model + +To allow a Nix store to be shared safely among multiple users, it is +important that users are not able to run builders that modify the Nix +store or database in arbitrary ways, or that interfere with builds +started by other users. If they could do so, they could install a Trojan +horse in some package and compromise the accounts of other users. + +To prevent this, the Nix store and database are owned by some privileged +user (usually `root`) and builders are executed under special user +accounts (usually named `nixbld1`, `nixbld2`, etc.). When a unprivileged +user runs a Nix command, actions that operate on the Nix store (such as +builds) are forwarded to a *Nix daemon* running under the owner of the +Nix store/database that performs the operation. + +> **Note** +> +> Multi-user mode has one important limitation: only root and a set of +> trusted users specified in `nix.conf` can specify arbitrary binary +> caches. So while unprivileged users may install packages from +> arbitrary Nix expressions, they may not get pre-built binaries. + +### Setting up the build users + +The *build users* are the special UIDs under which builds are performed. +They should all be members of the *build users group* `nixbld`. This +group should have no other members. The build users should not be +members of any other group. On Linux, you can create the group and users +as follows: + +```console +$ groupadd -r nixbld +$ for n in $(seq 1 10); do useradd -c "Nix build user $n" \ + -d /var/empty -g nixbld -G nixbld -M -N -r -s "$(which nologin)" \ + nixbld$n; done +``` + +This creates 10 build users. There can never be more concurrent builds +than the number of build users, so you may want to increase this if you +expect to do many builds at the same time. + +### Running the daemon + +The [Nix daemon](../command-ref/nix-daemon.md) should be started as +follows (as `root`): + +```console +$ nix-daemon +``` + +You’ll want to put that line somewhere in your system’s boot scripts. + +To let unprivileged users use the daemon, they should set the +[`NIX_REMOTE` environment variable](../command-ref/env-common.md) to +`daemon`. So you should put a line like + +```console +export NIX_REMOTE=daemon +``` + +into the users’ login scripts. + +### Restricting access + +To limit which users can perform Nix operations, you can use the +permissions on the directory `/nix/var/nix/daemon-socket`. For instance, +if you want to restrict the use of Nix to the members of a group called +`nix-users`, do + +```console +$ chgrp nix-users /nix/var/nix/daemon-socket +$ chmod ug=rwx,o= /nix/var/nix/daemon-socket +``` + +This way, users who are not in the `nix-users` group cannot connect to +the Unix domain socket `/nix/var/nix/daemon-socket/socket`, so they +cannot perform Nix operations. From 705a7b9fd809612c88a978a28501e7ef225d633b Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Tue, 25 Feb 2025 15:24:15 -0300 Subject: [PATCH 228/361] Fix broken links --- doc/manual/source/command-ref/env-common.md | 2 +- doc/manual/source/installation/index.md | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/manual/source/command-ref/env-common.md b/doc/manual/source/command-ref/env-common.md index ee3995111e9..9f7f3442343 100644 --- a/doc/manual/source/command-ref/env-common.md +++ b/doc/manual/source/command-ref/env-common.md @@ -102,7 +102,7 @@ Most Nix commands interpret the following environment variables: This variable should be set to `daemon` if you want to use the Nix daemon to execute Nix operations. This is necessary in [multi-user - Nix installations](@docroot@/installation/multi-user.md). If the Nix + Nix installations](@docroot@/installation/security.md#multi-user-model). If the Nix daemon's Unix socket is at some non-standard path, this variable should be set to `unix://path/to/socket`. Otherwise, it should be left unset. diff --git a/doc/manual/source/installation/index.md b/doc/manual/source/installation/index.md index b2c908053d5..f5ad817dfdc 100644 --- a/doc/manual/source/installation/index.md +++ b/doc/manual/source/installation/index.md @@ -1,7 +1,8 @@ # Installation This section describes how to install and configure Nix for first-time use. -Nix follows a [multi-user](./multi-user.md) model on both Linux and macOS. +Nix follows a [multi-user](./security.md#multi-user-model) model on both Linux +and macOS. ```console $ curl -L https://nixos.org/nix/install | sh -s -- --daemon From feb60c54a92efe017bdc388a381c2c682a887b33 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Tue, 25 Feb 2025 15:27:06 -0300 Subject: [PATCH 229/361] Fix links again --- doc/manual/source/command-ref/env-common.md | 2 +- doc/manual/source/installation/index.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/source/command-ref/env-common.md b/doc/manual/source/command-ref/env-common.md index 9f7f3442343..bd428a232eb 100644 --- a/doc/manual/source/command-ref/env-common.md +++ b/doc/manual/source/command-ref/env-common.md @@ -102,7 +102,7 @@ Most Nix commands interpret the following environment variables: This variable should be set to `daemon` if you want to use the Nix daemon to execute Nix operations. This is necessary in [multi-user - Nix installations](@docroot@/installation/security.md#multi-user-model). If the Nix + Nix installations](@docroot@/installation/nix-security.md#multi-user-model). If the Nix daemon's Unix socket is at some non-standard path, this variable should be set to `unix://path/to/socket`. Otherwise, it should be left unset. diff --git a/doc/manual/source/installation/index.md b/doc/manual/source/installation/index.md index f5ad817dfdc..a4e2c5af07f 100644 --- a/doc/manual/source/installation/index.md +++ b/doc/manual/source/installation/index.md @@ -1,7 +1,7 @@ # Installation This section describes how to install and configure Nix for first-time use. -Nix follows a [multi-user](./security.md#multi-user-model) model on both Linux +Nix follows a [multi-user](./nix-security.md#multi-user-model) model on both Linux and macOS. ```console From 2e5d4de3e2d149991a1ac3da479f968a50ddde89 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 25 Feb 2025 21:33:05 +0000 Subject: [PATCH 230/361] Prepare release v0.38.0 From bd097de3a587a9224a9a4985722d7956e7c9c3a1 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 25 Feb 2025 21:33:08 +0000 Subject: [PATCH 231/361] Set .version-determinate to 0.38.0 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index 4a36342fcab..ca75280b09b 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -3.0.0 +0.38.0 From 2da52b19289601437f289fe5fef375f8b714c3e6 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Tue, 25 Feb 2025 16:34:19 -0500 Subject: [PATCH 232/361] Update .version-determinate --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index ca75280b09b..4a36342fcab 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -0.38.0 +3.0.0 From 09d1eb3f8747c591c68f37991eac0fe0f6639cbc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 26 Feb 2025 17:22:43 +0100 Subject: [PATCH 233/361] Run all VM tests --- .github/workflows/ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c3a96704f77..443664e496c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -109,8 +109,7 @@ jobs: .hydraJobs.tests | with_entries(select(.value.type == "derivation")) | keys[] - | ".#hydraJobs.tests." + .' \ - | head -n5) # FIXME: for testing the merge queue + | ".#hydraJobs.tests." + .') flake_regressions: if: github.event_name == 'merge_group' From 53ec907bb145f2df645341615a20e761c981530a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 27 Feb 2025 01:20:50 +0000 Subject: [PATCH 234/361] Prepare release v0.38.1 From 01ee9695817dedf252d097422db6832a8a5a0893 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 27 Feb 2025 01:20:53 +0000 Subject: [PATCH 235/361] Set .version-determinate to 0.38.1 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index 4a36342fcab..bb22182d4f7 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -3.0.0 +0.38.1 From 8e44b48c4f82ef245aee5c3e72fda14a87246222 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Wed, 26 Feb 2025 20:21:27 -0500 Subject: [PATCH 236/361] Apply suggestions from code review --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index bb22182d4f7..4a36342fcab 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -0.38.1 +3.0.0 From 288c5b0abd217b1d247e1c8787ea19da0a511251 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Feb 2025 16:16:53 +0100 Subject: [PATCH 237/361] Include DeterminateNix in the User-Agent header The User-Agent now shows `curl/8.11.0 Nix/2.26.3 DeterminateNix/3.0.0`. This is useful for distinguishing Determinate Nix from upstream Nix in binary cache logs. --- src/libstore/filetransfer.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 932e1d75684..28a437e5641 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -324,7 +324,9 @@ struct curlFileTransfer : public FileTransfer curl_easy_setopt(req, CURLOPT_MAXREDIRS, 10); curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1); curl_easy_setopt(req, CURLOPT_USERAGENT, - ("curl/" LIBCURL_VERSION " Nix/" + nixVersion + + ("curl/" LIBCURL_VERSION + " Nix/" + nixVersion + + " DeterminateNix/" + determinateNixVersion + (fileTransferSettings.userAgentSuffix != "" ? " " + fileTransferSettings.userAgentSuffix.get() : "")).c_str()); #if LIBCURL_VERSION_NUM >= 0x072b00 curl_easy_setopt(req, CURLOPT_PIPEWAIT, 1); From d8606f96eebc18947c5e5318162726c1ba225cc5 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 28 Feb 2025 17:40:32 +0100 Subject: [PATCH 238/361] packaging/everything.nix: Use a multi-output derivation This should fix a few packaging regressions. `dev` also includes a merged `includes/`, which may be helpful until inter-component includes are fixed properly. (cherry picked from commit 41085295ab3717b5ec8d348307dd4c9c1d378846) --- packaging/everything.nix | 216 +++++++++++++++++++++++---------------- 1 file changed, 130 insertions(+), 86 deletions(-) diff --git a/packaging/everything.nix b/packaging/everything.nix index 0974a34df50..c9ad26823b8 100644 --- a/packaging/everything.nix +++ b/packaging/everything.nix @@ -1,6 +1,7 @@ { lib, stdenv, + lndir, buildEnv, nix-util, @@ -38,7 +39,6 @@ nix-perl-bindings, testers, - runCommand, }: let @@ -119,92 +119,136 @@ let }; in -(buildEnv { - name = "nix-${nix-cli.version}"; - paths = [ - nix-cli - nix-manual.man +stdenv.mkDerivation (finalAttrs: { + pname = "nix"; + version = nix-cli.version; + + /** + This package uses a multi-output derivation, even though some outputs could + have been provided directly by the constituent component that provides it. + + This is because not all tooling handles packages composed of arbitrary + outputs yet. This includes nix itself, https://github.com/NixOS/nix/issues/6507. + + `devdoc` is also available, but not listed here, because this attribute is + not an output of the same derivation that provides `out`, `dev`, etc. + */ + outputs = [ + "out" + "dev" + "doc" + "man" ]; - meta.mainProgram = "nix"; -}).overrideAttrs - ( - finalAttrs: prevAttrs: { - doCheck = true; - doInstallCheck = true; - - checkInputs = - [ - # Make sure the unit tests have passed - nix-util-tests.tests.run - nix-store-tests.tests.run - nix-expr-tests.tests.run - nix-fetchers-tests.tests.run - nix-flake-tests.tests.run - - # Make sure the functional tests have passed - nix-functional-tests - - # dev bundle is ok - # (checkInputs must be empty paths??) - (runCommand "check-pkg-config" { checked = dev.tests.pkg-config; } "mkdir $out") - ] - ++ lib.optionals - (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) - [ - # Perl currently fails in static build - # TODO: Split out tests into a separate derivation? - nix-perl-bindings - ]; - passthru = prevAttrs.passthru // { - inherit (nix-cli) version; - - /** - These are the libraries that are part of the Nix project. They are used - by the Nix CLI and other tools. - - If you need to use these libraries in your project, we recommend to use - the `-c` C API libraries exclusively, if possible. - - We also recommend that you build the complete package to ensure that the unit tests pass. - You could do this in CI, or by passing it in an unused environment variable. e.g in a `mkDerivation` call: - - ```nix - buildInputs = [ nix.libs.nix-util-c nix.libs.nix-store-c ]; - # Make sure the nix libs we use are ok - unusedInputsForTests = [ nix ]; - disallowedReferences = nix.all; - ``` - */ - inherit libs; - - tests = prevAttrs.passthru.tests or { } // { - # TODO: create a proper fixpoint and: - # pkg-config = - # testers.hasPkgConfigModules { - # package = finalPackage; - # }; - }; + /** + Unpacking is handled in this package's constituent components + */ + dontUnpack = true; + /** + Building is handled in this package's constituent components + */ + dontBuild = true; + + /** + `doCheck` controles whether tests are added as build gate for the combined package. + This includes both the unit tests and the functional tests, but not the + integration tests that run in CI (the flake's `hydraJobs` and some of the `checks`). + */ + doCheck = true; + + /** + `fixupPhase` currently doesn't understand that a symlink output isn't writable. + + We don't compile or link anything in this derivation, so fixups aren't needed. + */ + dontFixup = true; + + checkInputs = + [ + # Make sure the unit tests have passed + nix-util-tests.tests.run + nix-store-tests.tests.run + nix-expr-tests.tests.run + nix-fetchers-tests.tests.run + nix-flake-tests.tests.run + + # Make sure the functional tests have passed + nix-functional-tests + ] + ++ lib.optionals + (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) + [ + # Perl currently fails in static build + # TODO: Split out tests into a separate derivation? + nix-perl-bindings + ]; - /** - A derivation referencing the `dev` outputs of the Nix libraries. - */ - inherit dev; - inherit devdoc; - doc = nix-manual; - outputs = [ - "out" - "dev" - "devdoc" - "doc" - ]; - all = lib.attrValues ( - lib.genAttrs finalAttrs.passthru.outputs (outName: finalAttrs.finalPackage.${outName}) - ); - }; - meta = prevAttrs.meta // { - description = "The Nix package manager"; - pkgConfigModules = dev.meta.pkgConfigModules; + nativeBuildInputs = [ + lndir + ]; + + installPhase = + let + devPaths = lib.mapAttrsToList (_k: lib.getDev) finalAttrs.finalPackage.libs; + in + '' + mkdir -p $out $dev $doc $man + + # Merged outputs + lndir ${nix-cli} $out + for lib in ${lib.escapeShellArgs devPaths}; do + lndir $lib $dev + done + + # Forwarded outputs + ln -s ${nix-manual} $doc + ln -s ${nix-manual.man} $man + ''; + + passthru = { + inherit (nix-cli) version; + + /** + These are the libraries that are part of the Nix project. They are used + by the Nix CLI and other tools. + + If you need to use these libraries in your project, we recommend to use + the `-c` C API libraries exclusively, if possible. + + We also recommend that you build the complete package to ensure that the unit tests pass. + You could do this in CI, or by passing it in an unused environment variable. e.g in a `mkDerivation` call: + + ```nix + buildInputs = [ nix.libs.nix-util-c nix.libs.nix-store-c ]; + # Make sure the nix libs we use are ok + unusedInputsForTests = [ nix ]; + disallowedReferences = nix.all; + ``` + */ + inherit libs; + + /** + Developer documentation for `nix`, in `share/doc/nix/{internal,external}-api/`. + + This is not a proper output; see `outputs` for context. + */ + inherit devdoc; + + /** + Extra tests that test this package, but do not run as part of the build. + See + */ + tests = { + pkg-config = testers.hasPkgConfigModules { + package = finalAttrs.finalPackage; }; - } - ) + }; + }; + + meta = { + mainProgram = "nix"; + description = "The Nix package manager"; + pkgConfigModules = dev.meta.pkgConfigModules; + }; + +}) From 87bf338612376fa15f6fa0f60ac7d4e79b612901 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 4 Mar 2025 17:21:56 +0100 Subject: [PATCH 239/361] packaging: Typo in setVersionLayer / preConfigure Apparently dead code in our use case, but good to keep nonetheless. Credit: ztzg in https://github.com/NixOS/nix/pull/12498#pullrequestreview-2658031853 (cherry picked from commit dcaea8cb1c6482f0c64649fb8dc99a020351b53a) --- packaging/components.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packaging/components.nix b/packaging/components.nix index 9da864887cc..991d54241f0 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -51,7 +51,7 @@ let setVersionLayer = finalAttrs: prevAttrs: { preConfigure = - prevAttrs.prevAttrs or "" + prevAttrs.preConfigure or "" + # Update the repo-global .version file. # Symlink ./.version points there, but by default only workDir is writable. From 07d8bf7a6607502b443fedd9d4a034785b5c9474 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Wed, 5 Mar 2025 10:09:06 -0800 Subject: [PATCH 240/361] Fixup propose-release: don't puke if the version number didn't change --- .github/workflows/propose-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/propose-release.yml b/.github/workflows/propose-release.yml index 1ba7f43e7db..8b897072cc7 100644 --- a/.github/workflows/propose-release.yml +++ b/.github/workflows/propose-release.yml @@ -26,4 +26,4 @@ jobs: extra-commands-early: | echo ${{ inputs.version }} > .version-determinate git add .version-determinate - git commit -m "Set .version-determinate to ${{ inputs.version }}" + git commit -m "Set .version-determinate to ${{ inputs.version }}" || true From 8b7b413e8bb75542fb6975190eaf35de5095fed8 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 5 Mar 2025 18:44:02 +0000 Subject: [PATCH 241/361] Prepare release v3.0.0 From af2384343408d8bc2722056217e7a4a3319c4e1e Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 16:31:18 -0300 Subject: [PATCH 242/361] Remove macOS sequoia recommandations --- doc/manual/source/installation/index.md | 8 -------- doc/manual/source/installation/installing-binary.md | 8 -------- doc/manual/source/installation/uninstall.md | 8 -------- 3 files changed, 24 deletions(-) diff --git a/doc/manual/source/installation/index.md b/doc/manual/source/installation/index.md index a4e2c5af07f..96b2a1c2456 100644 --- a/doc/manual/source/installation/index.md +++ b/doc/manual/source/installation/index.md @@ -8,14 +8,6 @@ and macOS. $ curl -L https://nixos.org/nix/install | sh -s -- --daemon ``` -> **Updating to macOS 15 Sequoia** -> -> If you recently updated to macOS 15 Sequoia and are getting -> ```console -> error: the user '_nixbld1' in the group 'nixbld' does not exist -> ``` -> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. - ## Distributions The Nix community maintains installers for several distributions. diff --git a/doc/manual/source/installation/installing-binary.md b/doc/manual/source/installation/installing-binary.md index 0a2d650a97b..6d0aa9bf880 100644 --- a/doc/manual/source/installation/installing-binary.md +++ b/doc/manual/source/installation/installing-binary.md @@ -1,13 +1,5 @@ # Installing a Binary Distribution -> **Updating to macOS 15 Sequoia** -> -> If you recently updated to macOS 15 Sequoia and are getting -> ```console -> error: the user '_nixbld1' in the group 'nixbld' does not exist -> ``` -> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. - To install the latest version Nix, run the following command: ```console diff --git a/doc/manual/source/installation/uninstall.md b/doc/manual/source/installation/uninstall.md index 2762edbf43c..cf8f419b656 100644 --- a/doc/manual/source/installation/uninstall.md +++ b/doc/manual/source/installation/uninstall.md @@ -43,14 +43,6 @@ which you may remove. ### macOS -> **Updating to macOS 15 Sequoia** -> -> If you recently updated to macOS 15 Sequoia and are getting -> ```console -> error: the user '_nixbld1' in the group 'nixbld' does not exist -> ``` -> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. - 1. If system-wide shell initialisation files haven't been altered since installing Nix, use the backups made by the installer: ```console From be9fbb04bb34f4c209be8bf81679c028b2a6b110 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 16:35:55 -0300 Subject: [PATCH 243/361] Remove environment variables doc --- doc/manual/source/SUMMARY.md.in | 1 - .../source/installation/env-variables.md | 62 ------------------- doc/manual/source/quick-start.md | 9 ++- 3 files changed, 7 insertions(+), 65 deletions(-) delete mode 100644 doc/manual/source/installation/env-variables.md diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index d5f8b94df6f..e97e072972d 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -11,7 +11,6 @@ - [Building Nix from Source](installation/building-source.md) - [Using Nix within Docker](installation/installing-docker.md) - [Security](installation/nix-security.md) - - [Environment Variables](installation/env-variables.md) - [Upgrading Nix](installation/upgrading.md) - [Uninstalling Nix](installation/uninstall.md) - [Nix Store](store/index.md) diff --git a/doc/manual/source/installation/env-variables.md b/doc/manual/source/installation/env-variables.md deleted file mode 100644 index 0350904211a..00000000000 --- a/doc/manual/source/installation/env-variables.md +++ /dev/null @@ -1,62 +0,0 @@ -# Environment Variables - -To use Nix, some environment variables should be set. In particular, -`PATH` should contain the directories `prefix/bin` and -`~/.nix-profile/bin`. The first directory contains the Nix tools -themselves, while `~/.nix-profile` is a symbolic link to the current -*user environment* (an automatically generated package consisting of -symlinks to installed packages). The simplest way to set the required -environment variables is to include the file -`prefix/etc/profile.d/nix.sh` in your `~/.profile` (or similar), like -this: - -```bash -source prefix/etc/profile.d/nix.sh -``` - -# `NIX_SSL_CERT_FILE` - -If you need to specify a custom certificate bundle to account for an -HTTPS-intercepting man in the middle proxy, you must specify the path to -the certificate bundle in the environment variable `NIX_SSL_CERT_FILE`. - -If you don't specify a `NIX_SSL_CERT_FILE` manually, Nix will install -and use its own certificate bundle. - -Set the environment variable and install Nix - -```console -$ export NIX_SSL_CERT_FILE=/etc/ssl/my-certificate-bundle.crt -$ curl -L https://nixos.org/nix/install | sh -``` - -In the shell profile and rc files (for example, `/etc/bashrc`, -`/etc/zshrc`), add the following line: - -```bash -export NIX_SSL_CERT_FILE=/etc/ssl/my-certificate-bundle.crt -``` - -> **Note** -> -> You must not add the export and then do the install, as the Nix -> installer will detect the presence of Nix configuration, and abort. - -If you use the Nix daemon, you should also add the following to -`/etc/nix/nix.conf`: - -``` -ssl-cert-file = /etc/ssl/my-certificate-bundle.crt -``` - -## Proxy Environment Variables - -The Nix installer has special handling for these proxy-related -environment variables: `http_proxy`, `https_proxy`, `ftp_proxy`, -`all_proxy`, `no_proxy`, `HTTP_PROXY`, `HTTPS_PROXY`, `FTP_PROXY`, -`ALL_PROXY`, `NO_PROXY`. - -If any of these variables are set when running the Nix installer, then -the installer will create an override file at -`/etc/systemd/system/nix-daemon.service.d/override.conf` so `nix-daemon` -will use them. diff --git a/doc/manual/source/quick-start.md b/doc/manual/source/quick-start.md index 9eb7a326590..c8be74e129e 100644 --- a/doc/manual/source/quick-start.md +++ b/doc/manual/source/quick-start.md @@ -3,10 +3,13 @@ This chapter is for impatient people who don't like reading documentation. For more in-depth information you are kindly referred to subsequent chapters. -1. Install Nix: +1. Install Nix. + We recommend that macOS users use [Determinate.pkg][pkg]. + For Linux and Windows Subsystem for Linux (WSL) users: ```console - $ curl -L https://nixos.org/nix/install | sh + $ curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | \ + sh -s -- install --determinate ``` The install script will use `sudo`, so make sure you have sufficient rights. @@ -41,3 +44,5 @@ For more in-depth information you are kindly referred to subsequent chapters. ```console $ nix-collect-garbage ``` + +[pkg]: https://install.determinate.systems/determinate-pkg/stable/Universal From ff691e761b1991f6492f06045406e0818c66e127 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 16:39:03 -0300 Subject: [PATCH 244/361] Delete binary installation doc --- doc/manual/source/SUMMARY.md.in | 1 - doc/manual/source/installation/index.md | 10 +- .../source/installation/installing-binary.md | 127 ------------------ doc/manual/source/quick-start.md | 2 +- 4 files changed, 7 insertions(+), 133 deletions(-) delete mode 100644 doc/manual/source/installation/installing-binary.md diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index e97e072972d..612867c2586 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -4,7 +4,6 @@ - [Quick Start](quick-start.md) - [Installation](installation/index.md) - [Supported Platforms](installation/supported-platforms.md) - - [Installing a Binary Distribution](installation/installing-binary.md) - [Installing Nix from Source](installation/installing-source.md) - [Prerequisites](installation/prerequisites-source.md) - [Obtaining a Source Distribution](installation/obtaining-source.md) diff --git a/doc/manual/source/installation/index.md b/doc/manual/source/installation/index.md index 96b2a1c2456..1a1d4efdc98 100644 --- a/doc/manual/source/installation/index.md +++ b/doc/manual/source/installation/index.md @@ -1,11 +1,11 @@ # Installation -This section describes how to install and configure Nix for first-time use. -Nix follows a [multi-user](./nix-security.md#multi-user-model) model on both Linux -and macOS. +We recommend that macOS users install Determinate Nix using [Determinate.pkg][pkg]. +For Linux and Windows Subsystem for Linux (WSL) users: ```console -$ curl -L https://nixos.org/nix/install | sh -s -- --daemon +$ curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | \ + sh -s -- install --determinate ``` ## Distributions @@ -13,3 +13,5 @@ $ curl -L https://nixos.org/nix/install | sh -s -- --daemon The Nix community maintains installers for several distributions. They can be found in the [`nix-community/nix-installers`](https://github.com/nix-community/nix-installers) repository. + +[pkg]: https://install.determinate.systems/determinate-pkg/stable/Universal diff --git a/doc/manual/source/installation/installing-binary.md b/doc/manual/source/installation/installing-binary.md deleted file mode 100644 index 6d0aa9bf880..00000000000 --- a/doc/manual/source/installation/installing-binary.md +++ /dev/null @@ -1,127 +0,0 @@ -# Installing a Binary Distribution - -To install the latest version Nix, run the following command: - -```console -$ curl -L https://nixos.org/nix/install | sh -``` - -This performs the default type of installation for your platform: - -- [Multi-user](#multi-user-installation): - - Linux with systemd and without SELinux - - macOS - -The installer can configured with various command line arguments and environment variables. -To show available command line flags: - -```console -$ curl -L https://nixos.org/nix/install | sh -s -- --help -``` - -To check what it does and how it can be customised further, [download and edit the second-stage installation script](#installing-from-a-binary-tarball). - -# Installing a pinned Nix version from a URL - -Version-specific installation URLs for all Nix versions since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/). -The directory for each version contains the corresponding SHA-256 hash. - -All installation scripts are invoked the same way: - -```console -$ export VERSION=2.19.2 -$ curl -L https://releases.nixos.org/nix/nix-$VERSION/install | sh -``` - -# Multi User Installation - -The multi-user Nix installation creates system users and a system service for the Nix daemon. - -Supported systems: - -- Linux running systemd, with SELinux disabled -- macOS - -To explicitly instruct the installer to perform a multi-user installation on your system: - -```console -$ bash <(curl -L https://nixos.org/nix/install) --daemon -``` - -You can run this under your usual user account or `root`. -The script will invoke `sudo` as needed. - -# Installing from a binary tarball - -You can also download a binary tarball that contains Nix and all its dependencies: -- Choose a [version](https://releases.nixos.org/?prefix=nix/) and [system type](../development/building.md#platforms) -- Download and unpack the tarball -- Run the installer - -> **Example** -> -> ```console -> $ pushd $(mktemp -d) -> $ export VERSION=2.19.2 -> $ export SYSTEM=x86_64-linux -> $ curl -LO https://releases.nixos.org/nix/nix-$VERSION/nix-$VERSION-$SYSTEM.tar.xz -> $ tar xfj nix-$VERSION-$SYSTEM.tar.xz -> $ cd nix-$VERSION-$SYSTEM -> $ ./install -> $ popd -> ``` - -The installer can be customised with the environment variables declared in the file named `install-multi-user`. - -## Native packages for Linux distributions - -The Nix community maintains installers for some Linux distributions in their native packaging format(https://nix-community.github.io/nix-installers/). - -# macOS Installation - - -[]{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes} - -We believe we have ironed out how to cleanly support the read-only root file system -on modern macOS. New installs will do this automatically. - -This section previously detailed the situation, options, and trade-offs, -but it now only outlines what the installer does. You don't need to know -this to run the installer, but it may help if you run into trouble: - -- create a new APFS volume for your Nix store -- update `/etc/synthetic.conf` to direct macOS to create a "synthetic" - empty root directory to mount your volume -- specify mount options for the volume in `/etc/fstab` - - `rw`: read-write - - `noauto`: prevent the system from auto-mounting the volume (so the - LaunchDaemon mentioned below can control mounting it, and to avoid - masking problems with that mounting service). - - `nobrowse`: prevent the Nix Store volume from showing up on your - desktop; also keeps Spotlight from spending resources to index - this volume - -- if you have FileVault enabled - - generate an encryption password - - put it in your system Keychain - - use it to encrypt the volume -- create a system LaunchDaemon to mount this volume early enough in the - boot process to avoid problems loading or restoring any programs that - need access to your Nix store - diff --git a/doc/manual/source/quick-start.md b/doc/manual/source/quick-start.md index c8be74e129e..428063f97cc 100644 --- a/doc/manual/source/quick-start.md +++ b/doc/manual/source/quick-start.md @@ -4,7 +4,7 @@ This chapter is for impatient people who don't like reading documentation. For more in-depth information you are kindly referred to subsequent chapters. 1. Install Nix. - We recommend that macOS users use [Determinate.pkg][pkg]. + We recommend that macOS users install Determinate Nix using [Determinate.pkg][pkg]. For Linux and Windows Subsystem for Linux (WSL) users: ```console From e09c7fe22dda3b8fee28f349b2d9fdd1e7fa17e5 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 16:44:21 -0300 Subject: [PATCH 245/361] Update GitHub links --- doc/manual/book.toml.in | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/book.toml.in b/doc/manual/book.toml.in index 7ecbaab0326..f3fd2722f3c 100644 --- a/doc/manual/book.toml.in +++ b/doc/manual/book.toml.in @@ -5,8 +5,8 @@ src = "source" [output.html] additional-css = ["custom.css"] additional-js = ["redirects.js"] -edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}" -git-repository-url = "https://github.com/NixOS/nix" +edit-url-template = "https://github.com/DeterminateSystems/nix-src/tree/master/doc/manual/{path}" +git-repository-url = "https://github.com/DeterminateSystems/nix-src" # Handles replacing @docroot@ with a path to ./source relative to that markdown file, # {{#include handlebars}}, and the @generated@ syntax used within these. it mostly From 6381e065378ec5a97597fbfd1f6c784250743c83 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 16:46:46 -0300 Subject: [PATCH 246/361] Reinstate binary doc --- .../source/command-ref/files/profiles.md | 2 +- .../source/installation/installing-binary.md | 135 ++++++++++++++++++ 2 files changed, 136 insertions(+), 1 deletion(-) create mode 100644 doc/manual/source/installation/installing-binary.md diff --git a/doc/manual/source/command-ref/files/profiles.md b/doc/manual/source/command-ref/files/profiles.md index b5c7378800f..e46e2418b4c 100644 --- a/doc/manual/source/command-ref/files/profiles.md +++ b/doc/manual/source/command-ref/files/profiles.md @@ -67,7 +67,7 @@ By default, this symlink points to: - `$NIX_STATE_DIR/profiles/per-user/root/profile` for `root` The `PATH` environment variable should include `/bin` subdirectory of the profile link (e.g. `~/.nix-profile/bin`) for the user environment to be visible to the user. -The [installer](@docroot@/installation/installing-binary.md) sets this up by default, unless you enable [`use-xdg-base-directories`]. +The installer sets this up by default, unless you enable [`use-xdg-base-directories`]. [`nix-env`]: @docroot@/command-ref/nix-env.md [`nix profile`]: @docroot@/command-ref/new-cli/nix3-profile.md diff --git a/doc/manual/source/installation/installing-binary.md b/doc/manual/source/installation/installing-binary.md new file mode 100644 index 00000000000..0a2d650a97b --- /dev/null +++ b/doc/manual/source/installation/installing-binary.md @@ -0,0 +1,135 @@ +# Installing a Binary Distribution + +> **Updating to macOS 15 Sequoia** +> +> If you recently updated to macOS 15 Sequoia and are getting +> ```console +> error: the user '_nixbld1' in the group 'nixbld' does not exist +> ``` +> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. + +To install the latest version Nix, run the following command: + +```console +$ curl -L https://nixos.org/nix/install | sh +``` + +This performs the default type of installation for your platform: + +- [Multi-user](#multi-user-installation): + - Linux with systemd and without SELinux + - macOS + +The installer can configured with various command line arguments and environment variables. +To show available command line flags: + +```console +$ curl -L https://nixos.org/nix/install | sh -s -- --help +``` + +To check what it does and how it can be customised further, [download and edit the second-stage installation script](#installing-from-a-binary-tarball). + +# Installing a pinned Nix version from a URL + +Version-specific installation URLs for all Nix versions since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/). +The directory for each version contains the corresponding SHA-256 hash. + +All installation scripts are invoked the same way: + +```console +$ export VERSION=2.19.2 +$ curl -L https://releases.nixos.org/nix/nix-$VERSION/install | sh +``` + +# Multi User Installation + +The multi-user Nix installation creates system users and a system service for the Nix daemon. + +Supported systems: + +- Linux running systemd, with SELinux disabled +- macOS + +To explicitly instruct the installer to perform a multi-user installation on your system: + +```console +$ bash <(curl -L https://nixos.org/nix/install) --daemon +``` + +You can run this under your usual user account or `root`. +The script will invoke `sudo` as needed. + +# Installing from a binary tarball + +You can also download a binary tarball that contains Nix and all its dependencies: +- Choose a [version](https://releases.nixos.org/?prefix=nix/) and [system type](../development/building.md#platforms) +- Download and unpack the tarball +- Run the installer + +> **Example** +> +> ```console +> $ pushd $(mktemp -d) +> $ export VERSION=2.19.2 +> $ export SYSTEM=x86_64-linux +> $ curl -LO https://releases.nixos.org/nix/nix-$VERSION/nix-$VERSION-$SYSTEM.tar.xz +> $ tar xfj nix-$VERSION-$SYSTEM.tar.xz +> $ cd nix-$VERSION-$SYSTEM +> $ ./install +> $ popd +> ``` + +The installer can be customised with the environment variables declared in the file named `install-multi-user`. + +## Native packages for Linux distributions + +The Nix community maintains installers for some Linux distributions in their native packaging format(https://nix-community.github.io/nix-installers/). + +# macOS Installation + + +[]{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes} + +We believe we have ironed out how to cleanly support the read-only root file system +on modern macOS. New installs will do this automatically. + +This section previously detailed the situation, options, and trade-offs, +but it now only outlines what the installer does. You don't need to know +this to run the installer, but it may help if you run into trouble: + +- create a new APFS volume for your Nix store +- update `/etc/synthetic.conf` to direct macOS to create a "synthetic" + empty root directory to mount your volume +- specify mount options for the volume in `/etc/fstab` + - `rw`: read-write + - `noauto`: prevent the system from auto-mounting the volume (so the + LaunchDaemon mentioned below can control mounting it, and to avoid + masking problems with that mounting service). + - `nobrowse`: prevent the Nix Store volume from showing up on your + desktop; also keeps Spotlight from spending resources to index + this volume + +- if you have FileVault enabled + - generate an encryption password + - put it in your system Keychain + - use it to encrypt the volume +- create a system LaunchDaemon to mount this volume early enough in the + boot process to avoid problems loading or restoring any programs that + need access to your Nix store + From c451f60cc7c2e4a7bc1f93b4251196868ccbab95 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 16:51:55 -0300 Subject: [PATCH 247/361] Revamp uninstallation docs --- doc/manual/source/installation/uninstall.md | 151 +------------------- 1 file changed, 5 insertions(+), 146 deletions(-) diff --git a/doc/manual/source/installation/uninstall.md b/doc/manual/source/installation/uninstall.md index 2762edbf43c..e95634c213a 100644 --- a/doc/manual/source/installation/uninstall.md +++ b/doc/manual/source/installation/uninstall.md @@ -1,156 +1,15 @@ # Uninstalling Nix -## Multi User - -Removing a [multi-user installation](./installing-binary.md#multi-user-installation) depends on the operating system. - -### Linux - -If you are on Linux with systemd: - -1. Remove the Nix daemon service: - - ```console - sudo systemctl stop nix-daemon.service - sudo systemctl disable nix-daemon.socket nix-daemon.service - sudo systemctl daemon-reload - ``` - -Remove files created by Nix: +To uninstall Determinate Nix, use the uninstallation utility built into the [Determinate Nix Installer][installer]: ```console -sudo rm -rf /etc/nix /etc/profile.d/nix.sh /etc/tmpfiles.d/nix-daemon.conf /nix ~root/.nix-channels ~root/.nix-defexpr ~root/.nix-profile ~root/.cache/nix +$ /nix/nix-installer uninstall ``` -Remove build users and their group: +If you're certain that you want to uninstall, you can skip the confirmation step: ```console -for i in $(seq 1 32); do - sudo userdel nixbld$i -done -sudo groupdel nixbld +$ /nix/nix-installer uninstall --no-confirm ``` -There may also be references to Nix in - -- `/etc/bash.bashrc` -- `/etc/bashrc` -- `/etc/profile` -- `/etc/zsh/zshrc` -- `/etc/zshrc` - -which you may remove. - -### macOS - -> **Updating to macOS 15 Sequoia** -> -> If you recently updated to macOS 15 Sequoia and are getting -> ```console -> error: the user '_nixbld1' in the group 'nixbld' does not exist -> ``` -> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. - -1. If system-wide shell initialisation files haven't been altered since installing Nix, use the backups made by the installer: - - ```console - sudo mv /etc/zshrc.backup-before-nix /etc/zshrc - sudo mv /etc/bashrc.backup-before-nix /etc/bashrc - sudo mv /etc/bash.bashrc.backup-before-nix /etc/bash.bashrc - ``` - - Otherwise, edit `/etc/zshrc`, `/etc/bashrc`, and `/etc/bash.bashrc` to remove the lines sourcing `nix-daemon.sh`, which should look like this: - - ```bash - # Nix - if [ -e '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' ]; then - . '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' - fi - # End Nix - ``` - -2. Stop and remove the Nix daemon services: - - ```console - sudo launchctl unload /Library/LaunchDaemons/org.nixos.nix-daemon.plist - sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist - sudo launchctl unload /Library/LaunchDaemons/org.nixos.darwin-store.plist - sudo rm /Library/LaunchDaemons/org.nixos.darwin-store.plist - ``` - - This stops the Nix daemon and prevents it from being started next time you boot the system. - -3. Remove the `nixbld` group and the `_nixbuildN` users: - - ```console - sudo dscl . -delete /Groups/nixbld - for u in $(sudo dscl . -list /Users | grep _nixbld); do sudo dscl . -delete /Users/$u; done - ``` - - This will remove all the build users that no longer serve a purpose. - -4. Edit fstab using `sudo vifs` to remove the line mounting the Nix Store volume on `/nix`, which looks like - - ``` - UUID= /nix apfs rw,noauto,nobrowse,suid,owners - ``` - or - - ``` - LABEL=Nix\040Store /nix apfs rw,nobrowse - ``` - - by setting the cursor on the respective line using the arrow keys, and pressing `dd`, and then `:wq` to save the file. - - This will prevent automatic mounting of the Nix Store volume. - -5. Edit `/etc/synthetic.conf` to remove the `nix` line. - If this is the only line in the file you can remove it entirely: - - ```bash - if [ -f /etc/synthetic.conf ]; then - if [ "$(cat /etc/synthetic.conf)" = "nix" ]; then - sudo rm /etc/synthetic.conf - else - sudo vi /etc/synthetic.conf - fi - fi - ``` - - This will prevent the creation of the empty `/nix` directory. - -6. Remove the files Nix added to your system, except for the store: - - ```console - sudo rm -rf /etc/nix /var/root/.nix-profile /var/root/.nix-defexpr /var/root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels - ``` - - -7. Remove the Nix Store volume: - - ```console - sudo diskutil apfs deleteVolume /nix - ``` - - This will remove the Nix Store volume and everything that was added to the store. - - If the output indicates that the command couldn't remove the volume, you should make sure you don't have an _unmounted_ Nix Store volume. - Look for a "Nix Store" volume in the output of the following command: - - ```console - diskutil list - ``` - - If you _do_ find a "Nix Store" volume, delete it by running `diskutil apfs deleteVolume` with the store volume's `diskXsY` identifier. - - If you get an error that the volume is in use by the kernel, reboot and immediately delete the volume before starting any other process. - -> **Note** -> -> After you complete the steps here, you will still have an empty `/nix` directory. -> This is an expected sign of a successful uninstall. -> The empty `/nix` directory will disappear the next time you reboot. -> -> You do not have to reboot to finish uninstalling Nix. -> The uninstall is complete. -> macOS (Catalina+) directly controls root directories, and its read-only root will prevent you from manually deleting the empty `/nix` mountpoint. +[installer]: https://github.com/DeterminateSystems/nix-installer From 4323868244d0a771c25c21c0e40429dc043c8550 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 16:55:45 -0300 Subject: [PATCH 248/361] Remove links to binary doc --- .../source/installation/installing-binary.md | 135 ------------------ doc/manual/source/installation/uninstall.md | 2 +- doc/manual/source/release-notes/rl-2.19.md | 2 +- 3 files changed, 2 insertions(+), 137 deletions(-) delete mode 100644 doc/manual/source/installation/installing-binary.md diff --git a/doc/manual/source/installation/installing-binary.md b/doc/manual/source/installation/installing-binary.md deleted file mode 100644 index 0a2d650a97b..00000000000 --- a/doc/manual/source/installation/installing-binary.md +++ /dev/null @@ -1,135 +0,0 @@ -# Installing a Binary Distribution - -> **Updating to macOS 15 Sequoia** -> -> If you recently updated to macOS 15 Sequoia and are getting -> ```console -> error: the user '_nixbld1' in the group 'nixbld' does not exist -> ``` -> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. - -To install the latest version Nix, run the following command: - -```console -$ curl -L https://nixos.org/nix/install | sh -``` - -This performs the default type of installation for your platform: - -- [Multi-user](#multi-user-installation): - - Linux with systemd and without SELinux - - macOS - -The installer can configured with various command line arguments and environment variables. -To show available command line flags: - -```console -$ curl -L https://nixos.org/nix/install | sh -s -- --help -``` - -To check what it does and how it can be customised further, [download and edit the second-stage installation script](#installing-from-a-binary-tarball). - -# Installing a pinned Nix version from a URL - -Version-specific installation URLs for all Nix versions since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/). -The directory for each version contains the corresponding SHA-256 hash. - -All installation scripts are invoked the same way: - -```console -$ export VERSION=2.19.2 -$ curl -L https://releases.nixos.org/nix/nix-$VERSION/install | sh -``` - -# Multi User Installation - -The multi-user Nix installation creates system users and a system service for the Nix daemon. - -Supported systems: - -- Linux running systemd, with SELinux disabled -- macOS - -To explicitly instruct the installer to perform a multi-user installation on your system: - -```console -$ bash <(curl -L https://nixos.org/nix/install) --daemon -``` - -You can run this under your usual user account or `root`. -The script will invoke `sudo` as needed. - -# Installing from a binary tarball - -You can also download a binary tarball that contains Nix and all its dependencies: -- Choose a [version](https://releases.nixos.org/?prefix=nix/) and [system type](../development/building.md#platforms) -- Download and unpack the tarball -- Run the installer - -> **Example** -> -> ```console -> $ pushd $(mktemp -d) -> $ export VERSION=2.19.2 -> $ export SYSTEM=x86_64-linux -> $ curl -LO https://releases.nixos.org/nix/nix-$VERSION/nix-$VERSION-$SYSTEM.tar.xz -> $ tar xfj nix-$VERSION-$SYSTEM.tar.xz -> $ cd nix-$VERSION-$SYSTEM -> $ ./install -> $ popd -> ``` - -The installer can be customised with the environment variables declared in the file named `install-multi-user`. - -## Native packages for Linux distributions - -The Nix community maintains installers for some Linux distributions in their native packaging format(https://nix-community.github.io/nix-installers/). - -# macOS Installation - - -[]{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes} - -We believe we have ironed out how to cleanly support the read-only root file system -on modern macOS. New installs will do this automatically. - -This section previously detailed the situation, options, and trade-offs, -but it now only outlines what the installer does. You don't need to know -this to run the installer, but it may help if you run into trouble: - -- create a new APFS volume for your Nix store -- update `/etc/synthetic.conf` to direct macOS to create a "synthetic" - empty root directory to mount your volume -- specify mount options for the volume in `/etc/fstab` - - `rw`: read-write - - `noauto`: prevent the system from auto-mounting the volume (so the - LaunchDaemon mentioned below can control mounting it, and to avoid - masking problems with that mounting service). - - `nobrowse`: prevent the Nix Store volume from showing up on your - desktop; also keeps Spotlight from spending resources to index - this volume - -- if you have FileVault enabled - - generate an encryption password - - put it in your system Keychain - - use it to encrypt the volume -- create a system LaunchDaemon to mount this volume early enough in the - boot process to avoid problems loading or restoring any programs that - need access to your Nix store - diff --git a/doc/manual/source/installation/uninstall.md b/doc/manual/source/installation/uninstall.md index cf8f419b656..385ce2d30ae 100644 --- a/doc/manual/source/installation/uninstall.md +++ b/doc/manual/source/installation/uninstall.md @@ -2,7 +2,7 @@ ## Multi User -Removing a [multi-user installation](./installing-binary.md#multi-user-installation) depends on the operating system. +Removing a multi-user installation depends on the operating system. ### Linux diff --git a/doc/manual/source/release-notes/rl-2.19.md b/doc/manual/source/release-notes/rl-2.19.md index e6a93c7eaae..13e573c1dfc 100644 --- a/doc/manual/source/release-notes/rl-2.19.md +++ b/doc/manual/source/release-notes/rl-2.19.md @@ -69,7 +69,7 @@ This makes it match `nix derivation show`, which also maps store paths to information. -- When Nix is installed using the [binary installer](@docroot@/installation/installing-binary.md), in supported shells (Bash, Zsh, Fish) +- When Nix is installed using the binary installer, in supported shells (Bash, Zsh, Fish) [`XDG_DATA_DIRS`](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html#variables) is now populated with the path to the `/share` subdirectory of the current profile. This means that command completion scripts, `.desktop` files, and similar artifacts installed via [`nix-env`](@docroot@/command-ref/nix-env.md) or [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) (experimental) can be found by any program that follows the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html). From 42606c16ad7df520feeecca12dfe06ce221f4f43 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 16:59:50 -0300 Subject: [PATCH 249/361] Remove one more reference to binary doc --- src/libexpr/eval-settings.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index a8fcce539d7..c61a186c08c 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -96,7 +96,7 @@ struct EvalSettings : Config The current state of all channels for the `root` user. - These files are set up by the [Nix installer](@docroot@/installation/installing-binary.md). + These files are set up by the Nix installer. See [`NIX_STATE_DIR`](@docroot@/command-ref/env-common.md#env-NIX_STATE_DIR) for details on the environment variable. > **Note** From e6a6bcbb737d0394795c5032d195304950e88a3d Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 17:30:47 -0300 Subject: [PATCH 250/361] Move nix-channel under deprecated commands --- doc/manual/source/SUMMARY.md.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 612867c2586..45de9de7c5f 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -54,6 +54,7 @@ {{#include ./command-ref/new-cli/SUMMARY.md}} - [Deprecated Commands](command-ref/main-commands.md) - [nix-build](command-ref/nix-build.md) + - [nix-channel](command-ref/nix-channel.md) - [nix-shell](command-ref/nix-shell.md) - [nix-store](command-ref/nix-store.md) - [nix-store --add-fixed](command-ref/nix-store/add-fixed.md) @@ -89,7 +90,6 @@ - [nix-env --uninstall](command-ref/nix-env/uninstall.md) - [nix-env --upgrade](command-ref/nix-env/upgrade.md) - [Utilities](command-ref/utilities.md) - - [nix-channel](command-ref/nix-channel.md) - [nix-collect-garbage](command-ref/nix-collect-garbage.md) - [nix-copy-closure](command-ref/nix-copy-closure.md) - [nix-daemon](command-ref/nix-daemon.md) From e2bc5e37744a303152935e09fc895ac3469e2e17 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 17:37:59 -0300 Subject: [PATCH 251/361] Remove default Nix expression doc --- doc/manual/source/SUMMARY.md.in | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 45de9de7c5f..c43e4e9f6f0 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -101,7 +101,6 @@ - [Profiles](command-ref/files/profiles.md) - [manifest.nix](command-ref/files/manifest.nix.md) - [manifest.json](command-ref/files/manifest.json.md) - - [Channels](command-ref/files/channels.md) - [Default Nix expression](command-ref/files/default-nix-expression.md) - [Architecture and Design](architecture/architecture.md) - [Formats and Protocols](protocols/index.md) From d67db97abb904470a2d4ee026caa689ccce54c2d Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 17:41:24 -0300 Subject: [PATCH 252/361] Remove channels link --- .../source/command-ref/files/default-nix-expression.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/doc/manual/source/command-ref/files/default-nix-expression.md b/doc/manual/source/command-ref/files/default-nix-expression.md index 2bd45ff5deb..e886e3ff499 100644 --- a/doc/manual/source/command-ref/files/default-nix-expression.md +++ b/doc/manual/source/command-ref/files/default-nix-expression.md @@ -31,12 +31,12 @@ Then, the resulting expression is interpreted like this: The file [`manifest.nix`](@docroot@/command-ref/files/manifest.nix.md) is always ignored. -The command [`nix-channel`] places a symlink to the current user's [channels] in this directory, the [user channel link](#user-channel-link). +The command [`nix-channel`] places a symlink to the current user's channels in this directory, the [user channel link](#user-channel-link). This makes all subscribed channels available as attributes in the default expression. ## User channel link -A symlink that ensures that [`nix-env`] can find the current user's [channels]: +A symlink that ensures that [`nix-env`] can find the current user's channels: - `~/.nix-defexpr/channels` - `$XDG_STATE_HOME/defexpr/channels` if [`use-xdg-base-directories`] is set to `true`. @@ -51,4 +51,3 @@ In a multi-user installation, you may also have `~/.nix-defexpr/channels_root`, [`nix-channel`]: @docroot@/command-ref/nix-channel.md [`nix-env`]: @docroot@/command-ref/nix-env.md [`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories -[channels]: @docroot@/command-ref/files/channels.md From 0f04d36c730175efc36756f7e842f8f97d948352 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 17:45:11 -0300 Subject: [PATCH 253/361] Remove default Nix expression doc from summary --- doc/manual/source/SUMMARY.md.in | 1 - src/libexpr/eval-settings.hh | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index c43e4e9f6f0..b8b6ee763a0 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -101,7 +101,6 @@ - [Profiles](command-ref/files/profiles.md) - [manifest.nix](command-ref/files/manifest.nix.md) - [manifest.json](command-ref/files/manifest.json.md) - - [Default Nix expression](command-ref/files/default-nix-expression.md) - [Architecture and Design](architecture/architecture.md) - [Formats and Protocols](protocols/index.md) - [JSON Formats](protocols/json/index.md) diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index c61a186c08c..4740c298386 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -86,7 +86,7 @@ struct EvalSettings : Config - `$HOME/.nix-defexpr/channels` - The [user channel link](@docroot@/command-ref/files/default-nix-expression.md#user-channel-link), pointing to the current state of [channels](@docroot@/command-ref/files/channels.md) for the current user. + The user channel link pointing to the current state of channels for the current user. - `nixpkgs=$NIX_STATE_DIR/profiles/per-user/root/channels/nixpkgs` From aaf1967faaa1fb417aed8ae2fdc7040a97c55cb6 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Wed, 5 Mar 2025 17:49:23 -0300 Subject: [PATCH 254/361] Remove default Nix expression links --- doc/manual/source/command-ref/nix-env.md | 2 +- doc/manual/source/command-ref/nix-env/install.md | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/doc/manual/source/command-ref/nix-env.md b/doc/manual/source/command-ref/nix-env.md index bda02149ed0..d01caaf7f78 100644 --- a/doc/manual/source/command-ref/nix-env.md +++ b/doc/manual/source/command-ref/nix-env.md @@ -52,7 +52,7 @@ These pages can be viewed offline: `nix-env` can obtain packages from multiple sources: - An attribute set of derivations from: - - The [default Nix expression](@docroot@/command-ref/files/default-nix-expression.md) (by default) + - The default Nix expression (by default) - A Nix file, specified via `--file` - A [profile](@docroot@/command-ref/files/profiles.md), specified via `--from-profile` - A Nix expression that is a function which takes default expression as argument, specified via `--from-expression` diff --git a/doc/manual/source/command-ref/nix-env/install.md b/doc/manual/source/command-ref/nix-env/install.md index aa5c2fbba83..b6a71e8bdaa 100644 --- a/doc/manual/source/command-ref/nix-env/install.md +++ b/doc/manual/source/command-ref/nix-env/install.md @@ -22,12 +22,11 @@ It is based on the current generation of the active [profile](@docroot@/command- The arguments *args* map to store paths in a number of possible ways: -- By default, *args* is a set of [derivation] names denoting derivations in the [default Nix expression]. +- By default, *args* is a set of [derivation] names denoting derivations in the default Nix expression. These are [realised], and the resulting output paths are installed. Currently installed derivations with a name equal to the name of a derivation being added are removed unless the option `--preserve-installed` is specified. [derivation]: @docroot@/glossary.md#gloss-derivation - [default Nix expression]: @docroot@/command-ref/files/default-nix-expression.md [realised]: @docroot@/glossary.md#gloss-realise If there are multiple derivations matching a name in *args* that @@ -45,7 +44,7 @@ The arguments *args* map to store paths in a number of possible ways: gcc-3.3.6 gcc-4.1.1` will install both version of GCC (and will probably cause a user environment conflict\!). -- If [`--attr`](#opt-attr) / `-A` is specified, the arguments are *attribute paths* that select attributes from the [default Nix expression]. +- If [`--attr`](#opt-attr) / `-A` is specified, the arguments are *attribute paths* that select attributes from the default Nix expression. This is faster than using derivation names and unambiguous. Show the attribute paths of available packages with [`nix-env --query`](./query.md): @@ -58,7 +57,7 @@ The arguments *args* map to store paths in a number of possible ways: easy way to copy user environment elements from one profile to another. -- If `--from-expression` is given, *args* are [Nix language functions](@docroot@/language/syntax.md#functions) that are called with the [default Nix expression] as their single argument. +- If `--from-expression` is given, *args* are [Nix language functions](@docroot@/language/syntax.md#functions) that are called with the default Nix expression as their single argument. The derivations returned by those function calls are installed. This allows derivations to be specified in an unambiguous way, which is necessary if there are multiple derivations with the same name. From 4f6d3299a4bb8dd50718ed55638e295bbf537ab9 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Thu, 6 Mar 2025 14:42:58 -0300 Subject: [PATCH 255/361] Change document title --- doc/manual/source/introduction.md | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/doc/manual/source/introduction.md b/doc/manual/source/introduction.md index 76489bc1b2c..89cb61d3c41 100644 --- a/doc/manual/source/introduction.md +++ b/doc/manual/source/introduction.md @@ -1,7 +1,11 @@ -# Introduction +# Determinate Nix -Nix is a _purely functional package manager_. This means that it -treats packages like values in purely functional programming languages +**Determinate Nix** is a downstream distribution of [Nix], a purely +functional language, CLI tool, and package management system. + +## How Nix works + +Nix treats packages like values in purely functional programming languages such as Haskell — they are built by functions that don’t have side-effects, and they never change after they have been built. Nix stores packages in the _Nix store_, usually the directory @@ -184,10 +188,14 @@ to build configuration files in `/etc`). This means, among other things, that it is easy to roll back the entire configuration of the system to an earlier state. Also, users can install software without root privileges. For more information and downloads, see the [NixOS -homepage](https://nixos.org/). +homepage][site]. ## License Nix is released under the terms of the [GNU LGPLv2.1 or (at your option) any later -version](http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html). +version][license]. + +[license]: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html +[nix]: https://nixos.org +[site]: https://nixos.org From fd6231e61230b37e0e2408929ba4e20bdfc5c556 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Thu, 6 Mar 2025 15:36:43 -0800 Subject: [PATCH 256/361] Publish the flake as public, every time This exposed a bug in FlakeHub's private toggling, where the public 3.0.0 release followed by an accidentally private 0.1.x release, managed to cause the flake to be shunted closed. This should not be possible, so let's dig into how that came to be and make sure to create a test case against this should-be-impossible transition. --- .github/workflows/upload-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/upload-release.yml b/.github/workflows/upload-release.yml index 2eaf48d0ece..b600dfba04f 100644 --- a/.github/workflows/upload-release.yml +++ b/.github/workflows/upload-release.yml @@ -100,5 +100,5 @@ jobs: - uses: "DeterminateSystems/flakehub-push@main" with: rolling: ${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} - visibility: "private" + visibility: "public" tag: "${{ github.ref_name }}" From 644f79dfd8aca7e2fd5662b8f7411d42c5bd7c43 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 7 Mar 2025 14:18:04 -0300 Subject: [PATCH 257/361] Add installation instructions to intro --- doc/manual/redirects.js | 1 - doc/manual/source/SUMMARY.md.in | 1 - doc/manual/source/installation/index.md | 4 ++-- .../source/installation/supported-platforms.md | 7 ------- doc/manual/source/introduction.md | 18 ++++++++++++++---- doc/manual/source/quick-start.md | 2 +- 6 files changed, 17 insertions(+), 16 deletions(-) delete mode 100644 doc/manual/source/installation/supported-platforms.md diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js index 36f53cbc82c..3a86ae4075a 100644 --- a/doc/manual/redirects.js +++ b/doc/manual/redirects.js @@ -275,7 +275,6 @@ const redirects = { "ssec-multi-user": "installation/multi-user.html", "sec-obtaining-source": "installation/obtaining-source.html", "sec-prerequisites-source": "installation/prerequisites-source.html", - "ch-supported-platforms": "installation/supported-platforms.html", "ch-upgrading-nix": "installation/upgrading.html", "ch-about-nix": "introduction.html", "chap-introduction": "introduction.html", diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 612867c2586..9acd7907712 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -3,7 +3,6 @@ - [Introduction](introduction.md) - [Quick Start](quick-start.md) - [Installation](installation/index.md) - - [Supported Platforms](installation/supported-platforms.md) - [Installing Nix from Source](installation/installing-source.md) - [Prerequisites](installation/prerequisites-source.md) - [Obtaining a Source Distribution](installation/obtaining-source.md) diff --git a/doc/manual/source/installation/index.md b/doc/manual/source/installation/index.md index 1a1d4efdc98..21aca146fd2 100644 --- a/doc/manual/source/installation/index.md +++ b/doc/manual/source/installation/index.md @@ -1,10 +1,10 @@ # Installation -We recommend that macOS users install Determinate Nix using [Determinate.pkg][pkg]. +We recommend that macOS users install Determinate Nix using our graphical installer, [Determinate.pkg][pkg]. For Linux and Windows Subsystem for Linux (WSL) users: ```console -$ curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | \ +curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | \ sh -s -- install --determinate ``` diff --git a/doc/manual/source/installation/supported-platforms.md b/doc/manual/source/installation/supported-platforms.md deleted file mode 100644 index 8ca3ce8d445..00000000000 --- a/doc/manual/source/installation/supported-platforms.md +++ /dev/null @@ -1,7 +0,0 @@ -# Supported Platforms - -Nix is currently supported on the following platforms: - - - Linux (i686, x86\_64, aarch64). - - - macOS (x86\_64, aarch64). diff --git a/doc/manual/source/introduction.md b/doc/manual/source/introduction.md index 89cb61d3c41..a95e82740c6 100644 --- a/doc/manual/source/introduction.md +++ b/doc/manual/source/introduction.md @@ -1,7 +1,17 @@ # Determinate Nix -**Determinate Nix** is a downstream distribution of [Nix], a purely -functional language, CLI tool, and package management system. +**Determinate Nix** is a downstream distribution of [Nix], a purely functional language, CLI tool, and package management system. +It's available on Linux, macOS, and Windows Subsystem for Linux (WSL). + +## Installing + +We recommend that macOS users install Determinate Nix using our graphical installer, [Determinate.pkg][pkg]. +For Linux and Windows Subsystem for Linux (WSL) users: + +```console +curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | \ + sh -s -- install --determinate +``` ## How Nix works @@ -188,7 +198,7 @@ to build configuration files in `/etc`). This means, among other things, that it is easy to roll back the entire configuration of the system to an earlier state. Also, users can install software without root privileges. For more information and downloads, see the [NixOS -homepage][site]. +homepage][nix]. ## License @@ -197,5 +207,5 @@ option) any later version][license]. [license]: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html -[nix]: https://nixos.org +[pkg]: https://install.determinate.systems/determinate-pkg/stable/Universal [site]: https://nixos.org diff --git a/doc/manual/source/quick-start.md b/doc/manual/source/quick-start.md index 428063f97cc..ffb87aa725f 100644 --- a/doc/manual/source/quick-start.md +++ b/doc/manual/source/quick-start.md @@ -4,7 +4,7 @@ This chapter is for impatient people who don't like reading documentation. For more in-depth information you are kindly referred to subsequent chapters. 1. Install Nix. - We recommend that macOS users install Determinate Nix using [Determinate.pkg][pkg]. + We recommend that macOS users install Determinate Nix using our graphical installer, [Determinate.pkg][pkg]. For Linux and Windows Subsystem for Linux (WSL) users: ```console From b62167a0147b3500db644cb28fd6f9f63840ad44 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 7 Mar 2025 14:53:22 -0300 Subject: [PATCH 258/361] Update upgrade docs --- doc/manual/source/installation/upgrading.md | 30 +++------------------ 1 file changed, 3 insertions(+), 27 deletions(-) diff --git a/doc/manual/source/installation/upgrading.md b/doc/manual/source/installation/upgrading.md index f0992671d03..8fe342b09b7 100644 --- a/doc/manual/source/installation/upgrading.md +++ b/doc/manual/source/installation/upgrading.md @@ -1,34 +1,10 @@ # Upgrading Nix -> **Note** -> -> These upgrade instructions apply where Nix was installed following the [installation instructions in this manual](./index.md). - -Check which Nix version will be installed, for example from one of the [release channels](http://channels.nixos.org/) such as `nixpkgs-unstable`: - -```console -$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-unstable --run "nix --version" -nix (Nix) 2.18.1 -``` - -> **Warning** -> -> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with [`nix-build`](@docroot@/command-ref/nix-build.md) or [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md), may change the database schema! -> Reverting to an older version of Nix may therefore require purging the store database before it can be used. - -## Linux multi-user +You can upgrade Determinate Nix using Determinate Nixd: ```console -$ sudo su -# nix-env --install --file '' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable -# systemctl daemon-reload -# systemctl restart nix-daemon +sudo determinate-nixd upgrade ``` -## macOS multi-user +Note that the `sudo` is necessary here and upgrading fails without it. -```console -$ sudo nix-env --install --file '' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable -$ sudo launchctl remove org.nixos.nix-daemon -$ sudo launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist -``` From 1212b1fbfeee93ce7a04911a4085d796d6d9c72a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Feb 2025 14:59:07 +0100 Subject: [PATCH 259/361] JSONLogger: Log to a file descriptor instead of another Logger Logging to another Logger was kind of nonsensical - it was really just an easy way to get it to write its output to stderr, but that only works if the underlying logger writes to stderr. This change is needed to make it easy to log JSON output somewhere else (like a file or socket). --- src/build-remote/build-remote.cc | 2 +- src/libmain/loggers.cc | 2 +- src/libstore/unix/build/local-derivation-goal.cc | 2 +- src/libutil/logging.cc | 10 +++++----- src/libutil/logging.hh | 3 ++- 5 files changed, 10 insertions(+), 9 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 82ad7d86212..2c3176724e7 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -51,7 +51,7 @@ static bool allSupportedLocally(Store & store, const std::set& requ static int main_build_remote(int argc, char * * argv) { { - logger = makeJSONLogger(*logger); + logger = makeJSONLogger(STDERR_FILENO); /* Ensure we don't get any SSH passphrase or host key popups. */ unsetenv("DISPLAY"); diff --git a/src/libmain/loggers.cc b/src/libmain/loggers.cc index a4e0530c8f9..ede5ddae332 100644 --- a/src/libmain/loggers.cc +++ b/src/libmain/loggers.cc @@ -27,7 +27,7 @@ Logger * makeDefaultLogger() { case LogFormat::rawWithLogs: return makeSimpleLogger(true); case LogFormat::internalJSON: - return makeJSONLogger(*makeSimpleLogger(true)); + return makeJSONLogger(STDERR_FILENO); case LogFormat::bar: return makeProgressBar(); case LogFormat::barWithLogs: { diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index 5b9bc0bb011..805c3bbcaa5 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -2219,7 +2219,7 @@ void LocalDerivationGoal::runChild() /* Execute the program. This should not return. */ if (drv->isBuiltin()) { try { - logger = makeJSONLogger(*logger); + logger = makeJSONLogger(STDERR_FILENO); std::map outputs; for (auto & e : drv->outputs) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index a5add5565df..9caa83efebc 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -167,9 +167,9 @@ void to_json(nlohmann::json & json, std::shared_ptr pos) } struct JSONLogger : Logger { - Logger & prevLogger; + Descriptor fd; - JSONLogger(Logger & prevLogger) : prevLogger(prevLogger) { } + JSONLogger(Descriptor fd) : fd(fd) { } bool isVerbose() override { return true; @@ -190,7 +190,7 @@ struct JSONLogger : Logger { void write(const nlohmann::json & json) { - prevLogger.log(lvlError, "@nix " + json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace)); + writeLine(fd, "@nix " + json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace)); } void log(Verbosity lvl, std::string_view s) override @@ -262,9 +262,9 @@ struct JSONLogger : Logger { } }; -Logger * makeJSONLogger(Logger & prevLogger) +Logger * makeJSONLogger(Descriptor fd) { - return new JSONLogger(prevLogger); + return new JSONLogger(fd); } static Logger::Fields getFields(nlohmann::json & json) diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 11e4033a59d..e8112c6b020 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -3,6 +3,7 @@ #include "error.hh" #include "config.hh" +#include "file-descriptor.hh" #include @@ -183,7 +184,7 @@ extern Logger * logger; Logger * makeSimpleLogger(bool printBuildLogs = true); -Logger * makeJSONLogger(Logger & prevLogger); +Logger * makeJSONLogger(Descriptor fd); /** * @param source A noun phrase describing the source of the message, e.g. "the builder". From 8ef94c111413ce14a7f69dfe643e69dde2e724e3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Feb 2025 14:40:50 +0100 Subject: [PATCH 260/361] Add a structured log message for FOD hash mismatches --- src/libstore/unix/build/local-derivation-goal.cc | 6 ++++++ src/libutil/logging.hh | 1 + 2 files changed, 7 insertions(+) diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index 805c3bbcaa5..9ab0da32bdd 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -2656,6 +2656,12 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() worker.store.printStorePath(drvPath), wanted.to_string(HashFormat::SRI, true), got.to_string(HashFormat::SRI, true))); + // FIXME: put this in BuildResult and log that as JSON. + act->result(resHashMismatch, + {worker.store.printStorePath(drvPath), + wanted.to_string(HashFormat::SRI, true), + got.to_string(HashFormat::SRI, true) + }); } if (!newInfo0.references.empty()) { auto numViolations = newInfo.references.size(); diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index e8112c6b020..21493b9697c 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -36,6 +36,7 @@ typedef enum { resSetExpected = 106, resPostBuildLogLine = 107, resFetchStatus = 108, + resHashMismatch = 109, } ResultType; typedef uint64_t ActivityId; From 1f702cdb0166a9f3b03f931b27c6bd000c223eb3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Feb 2025 16:36:02 +0100 Subject: [PATCH 261/361] Allow separate JSON logging If the NIX_LOG_FILE environment variable is set, Nix will write JSON log messages to that file in addition to the regular logger (e.g. the progress bar). --- src/libutil/logging.cc | 18 +++++++ src/libutil/logging.hh | 6 +++ src/libutil/meson.build | 1 + src/libutil/tee-logger.cc | 102 ++++++++++++++++++++++++++++++++++++++ src/nix/main.cc | 4 ++ 5 files changed, 131 insertions(+) create mode 100644 src/libutil/tee-logger.cc diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 9caa83efebc..0bffe40e347 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -267,6 +267,24 @@ Logger * makeJSONLogger(Descriptor fd) return new JSONLogger(fd); } +Logger * makeJSONLogger(const std::filesystem::path & path) +{ + struct JSONFileLogger : JSONLogger { + AutoCloseFD fd; + + JSONFileLogger(AutoCloseFD && fd) + : JSONLogger(fd.get()) + , fd(std::move(fd)) + { } + }; + + auto fd{toDescriptor(open(path.c_str(), O_CREAT | O_APPEND | O_WRONLY, 0644))}; + if (!fd) + throw SysError("opening log file '%1%'", path); + + return new JSONFileLogger(std::move(fd)); +} + static Logger::Fields getFields(nlohmann::json & json) { Logger::Fields fields; diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 21493b9697c..cadeafea4e9 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -5,6 +5,8 @@ #include "config.hh" #include "file-descriptor.hh" +#include + #include namespace nix { @@ -185,8 +187,12 @@ extern Logger * logger; Logger * makeSimpleLogger(bool printBuildLogs = true); +Logger * makeTeeLogger(std::vector loggers); + Logger * makeJSONLogger(Descriptor fd); +Logger * makeJSONLogger(const std::filesystem::path & path); + /** * @param source A noun phrase describing the source of the message, e.g. "the builder". */ diff --git a/src/libutil/meson.build b/src/libutil/meson.build index ac701d8fd3b..d5855442d8a 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -158,6 +158,7 @@ sources = files( 'strings.cc', 'suggestions.cc', 'tarfile.cc', + 'tee-logger.cc', 'terminal.cc', 'thread-pool.cc', 'unix-domain-socket.cc', diff --git a/src/libutil/tee-logger.cc b/src/libutil/tee-logger.cc new file mode 100644 index 00000000000..7a5115ea795 --- /dev/null +++ b/src/libutil/tee-logger.cc @@ -0,0 +1,102 @@ +#include "logging.hh" + +namespace nix { + +struct TeeLogger : Logger +{ + std::vector loggers; + + TeeLogger(std::vector loggers) + : loggers(std::move(loggers)) + { + } + + void stop() override + { + for (auto & logger : loggers) + logger->stop(); + }; + + void pause() override + { + for (auto & logger : loggers) + logger->pause(); + }; + + void resume() override + { + for (auto & logger : loggers) + logger->resume(); + }; + + void log(Verbosity lvl, std::string_view s) override + { + for (auto & logger : loggers) + logger->log(lvl, s); + } + + void logEI(const ErrorInfo & ei) override + { + for (auto & logger : loggers) + logger->logEI(ei); + } + + void startActivity( + ActivityId act, + Verbosity lvl, + ActivityType type, + const std::string & s, + const Fields & fields, + ActivityId parent) override + { + for (auto & logger : loggers) + logger->startActivity(act, lvl, type, s, fields, parent); + } + + void stopActivity(ActivityId act) override + { + for (auto & logger : loggers) + logger->stopActivity(act); + } + + void result(ActivityId act, ResultType type, const Fields & fields) override + { + for (auto & logger : loggers) + logger->result(act, type, fields); + } + + void writeToStdout(std::string_view s) override + { + for (auto & logger : loggers) { + /* Let only the first logger write to stdout to avoid + duplication. This means that the first logger needs to + be the one managing stdout/stderr + (e.g. `ProgressBar`). */ + logger->writeToStdout(s); + break; + } + } + + std::optional ask(std::string_view s) override + { + for (auto & logger : loggers) { + auto c = logger->ask(s); + if (c) + return c; + } + return std::nullopt; + } + + void setPrintBuildLogs(bool printBuildLogs) override + { + for (auto & logger : loggers) + logger->setPrintBuildLogs(printBuildLogs); + } +}; + +Logger * makeTeeLogger(std::vector loggers) +{ + return new TeeLogger(std::move(loggers)); +} + +} diff --git a/src/nix/main.cc b/src/nix/main.cc index f8f9d03a4f6..5f83e997cb2 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -485,6 +485,10 @@ void mainWrapped(int argc, char * * argv) if (!args.helpRequested && !args.completions) throw; } + if (auto logFile = getEnv("NIX_LOG_FILE")) { + logger = makeTeeLogger({logger, makeJSONLogger(*logFile)}); + } + if (args.helpRequested) { std::vector subcommand; MultiCommand * command = &args; From 2972e7394606650ed2ed4669ea79581817294a72 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Mar 2025 13:15:14 +0100 Subject: [PATCH 262/361] Turn NIX_LOG_FILE into a setting --- src/nix/main.cc | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/nix/main.cc b/src/nix/main.cc index 5f83e997cb2..10a02fe3f3c 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -5,6 +5,7 @@ #include "eval.hh" #include "eval-settings.hh" #include "globals.hh" +#include "config-global.hh" #include "legacy.hh" #include "shared.hh" #include "store-api.hh" @@ -347,6 +348,20 @@ struct CmdHelpStores : Command static auto rCmdHelpStores = registerCommand("help-stores"); +struct ExtLoggerSettings : Config +{ + Setting jsonLogPath{ + this, "", "json-log-path", + R"( + A path to which JSON records of Nix's log output will be + written, in the same format as `--log-format internal-json`. + )"}; +}; + +static ExtLoggerSettings extLoggerSettings; + +static GlobalConfig::Register rExtLoggerSettings(&extLoggerSettings); + void mainWrapped(int argc, char * * argv) { savedArgv = argv; @@ -485,8 +500,8 @@ void mainWrapped(int argc, char * * argv) if (!args.helpRequested && !args.completions) throw; } - if (auto logFile = getEnv("NIX_LOG_FILE")) { - logger = makeTeeLogger({logger, makeJSONLogger(*logFile)}); + if (!extLoggerSettings.jsonLogPath.get().empty()) { + logger = makeTeeLogger({logger, makeJSONLogger(std::filesystem::path(extLoggerSettings.jsonLogPath.get()))}); } if (args.helpRequested) { From 29a9e638c1bf70eb5f57bf8c6b78de71293cdedf Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Mar 2025 13:37:38 +0100 Subject: [PATCH 263/361] Remove "@nix" prefix from json-log-path output --- src/libutil/logging.cc | 22 ++++++++++++++-------- src/libutil/logging.hh | 4 ++-- src/nix/main.cc | 5 +++-- 3 files changed, 19 insertions(+), 12 deletions(-) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 0bffe40e347..fcbc61d5e4d 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -168,8 +168,12 @@ void to_json(nlohmann::json & json, std::shared_ptr pos) struct JSONLogger : Logger { Descriptor fd; + bool includeNixPrefix; - JSONLogger(Descriptor fd) : fd(fd) { } + JSONLogger(Descriptor fd, bool includeNixPrefix) + : fd(fd) + , includeNixPrefix(includeNixPrefix) + { } bool isVerbose() override { return true; @@ -190,7 +194,9 @@ struct JSONLogger : Logger { void write(const nlohmann::json & json) { - writeLine(fd, "@nix " + json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace)); + writeLine(fd, + (includeNixPrefix ? "@nix " : "") + + json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace)); } void log(Verbosity lvl, std::string_view s) override @@ -262,18 +268,18 @@ struct JSONLogger : Logger { } }; -Logger * makeJSONLogger(Descriptor fd) +Logger * makeJSONLogger(Descriptor fd, bool includeNixPrefix) { - return new JSONLogger(fd); + return new JSONLogger(fd, includeNixPrefix); } -Logger * makeJSONLogger(const std::filesystem::path & path) +Logger * makeJSONLogger(const std::filesystem::path & path, bool includeNixPrefix) { struct JSONFileLogger : JSONLogger { AutoCloseFD fd; - JSONFileLogger(AutoCloseFD && fd) - : JSONLogger(fd.get()) + JSONFileLogger(AutoCloseFD && fd, bool includeNixPrefix) + : JSONLogger(fd.get(), includeNixPrefix) , fd(std::move(fd)) { } }; @@ -282,7 +288,7 @@ Logger * makeJSONLogger(const std::filesystem::path & path) if (!fd) throw SysError("opening log file '%1%'", path); - return new JSONFileLogger(std::move(fd)); + return new JSONFileLogger(std::move(fd), includeNixPrefix); } static Logger::Fields getFields(nlohmann::json & json) diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index cadeafea4e9..ef449d03ef8 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -189,9 +189,9 @@ Logger * makeSimpleLogger(bool printBuildLogs = true); Logger * makeTeeLogger(std::vector loggers); -Logger * makeJSONLogger(Descriptor fd); +Logger * makeJSONLogger(Descriptor fd, bool includeNixPrefix = true); -Logger * makeJSONLogger(const std::filesystem::path & path); +Logger * makeJSONLogger(const std::filesystem::path & path, bool includeNixPrefix = true); /** * @param source A noun phrase describing the source of the message, e.g. "the builder". diff --git a/src/nix/main.cc b/src/nix/main.cc index 10a02fe3f3c..68137a216a3 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -354,7 +354,8 @@ struct ExtLoggerSettings : Config this, "", "json-log-path", R"( A path to which JSON records of Nix's log output will be - written, in the same format as `--log-format internal-json`. + written, in the same format as `--log-format internal-json` + (without the `@nix ` prefixes on each line). )"}; }; @@ -501,7 +502,7 @@ void mainWrapped(int argc, char * * argv) } if (!extLoggerSettings.jsonLogPath.get().empty()) { - logger = makeTeeLogger({logger, makeJSONLogger(std::filesystem::path(extLoggerSettings.jsonLogPath.get()))}); + logger = makeTeeLogger({logger, makeJSONLogger(std::filesystem::path(extLoggerSettings.jsonLogPath.get()), false)}); } if (args.helpRequested) { From 1efccf34b12ceaf3565bd70b8c3b3465e65d4a18 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Mar 2025 13:58:35 +0100 Subject: [PATCH 264/361] JSONLogger: Acquire a lock to prevent log messages from clobbering each other --- src/libutil/logging.cc | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index fcbc61d5e4d..c3ccfba42db 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -6,6 +6,7 @@ #include "config-global.hh" #include "source-path.hh" #include "position.hh" +#include "sync.hh" #include #include @@ -192,11 +193,22 @@ struct JSONLogger : Logger { unreachable(); } + struct State + { + }; + + Sync _state; + void write(const nlohmann::json & json) { - writeLine(fd, + auto line = (includeNixPrefix ? "@nix " : "") + - json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace)); + json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace); + + /* Acquire a lock to prevent log messages from clobbering each + other. */ + auto state(_state.lock()); + writeLine(fd, line); } void log(Verbosity lvl, std::string_view s) override From d9730fc93b61c864fb73fae887a2d9bd102f0221 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Mar 2025 15:42:17 +0100 Subject: [PATCH 265/361] Fix fd check --- src/libutil/logging.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index c3ccfba42db..8ef7a361274 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -296,7 +296,7 @@ Logger * makeJSONLogger(const std::filesystem::path & path, bool includeNixPrefi { } }; - auto fd{toDescriptor(open(path.c_str(), O_CREAT | O_APPEND | O_WRONLY, 0644))}; + AutoCloseFD fd{toDescriptor(open(path.c_str(), O_CREAT | O_APPEND | O_WRONLY, 0644))}; if (!fd) throw SysError("opening log file '%1%'", path); From 220000dc1aaa1157862ea287542092eeab14111a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Mar 2025 15:48:52 +0100 Subject: [PATCH 266/361] makeJSONLogger(): Support logging to a Unix domain socket --- src/libstore/uds-remote-store.cc | 4 +--- src/libutil/logging.cc | 6 +++++- src/libutil/unix-domain-socket.cc | 7 +++++++ src/libutil/unix-domain-socket.hh | 5 +++++ 4 files changed, 18 insertions(+), 4 deletions(-) diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc index 3c445eb1318..93c48c0e63d 100644 --- a/src/libstore/uds-remote-store.cc +++ b/src/libstore/uds-remote-store.cc @@ -84,9 +84,7 @@ ref UDSRemoteStore::openConnection() auto conn = make_ref(); /* Connect to a daemon that does the privileged work for us. */ - conn->fd = createUnixDomainSocket(); - - nix::connect(toSocket(conn->fd.get()), path); + conn->fd = nix::connect(path); conn->from.fd = conn->fd.get(); conn->to.fd = conn->fd.get(); diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 8ef7a361274..94683cca5ba 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -7,6 +7,7 @@ #include "source-path.hh" #include "position.hh" #include "sync.hh" +#include "unix-domain-socket.hh" #include #include @@ -296,7 +297,10 @@ Logger * makeJSONLogger(const std::filesystem::path & path, bool includeNixPrefi { } }; - AutoCloseFD fd{toDescriptor(open(path.c_str(), O_CREAT | O_APPEND | O_WRONLY, 0644))}; + AutoCloseFD fd = + std::filesystem::is_socket(path) + ? connect(path) + : toDescriptor(open(path.c_str(), O_CREAT | O_APPEND | O_WRONLY, 0644)); if (!fd) throw SysError("opening log file '%1%'", path); diff --git a/src/libutil/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc index 1707fdb75e1..0a7af130868 100644 --- a/src/libutil/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -114,4 +114,11 @@ void connect(Socket fd, const std::string & path) bindConnectProcHelper("connect", ::connect, fd, path); } +AutoCloseFD connect(const std::filesystem::path & path) +{ + auto fd = createUnixDomainSocket(); + nix::connect(toSocket(fd.get()), path); + return fd; +} + } diff --git a/src/libutil/unix-domain-socket.hh b/src/libutil/unix-domain-socket.hh index ba2baeb1334..e0d9340115d 100644 --- a/src/libutil/unix-domain-socket.hh +++ b/src/libutil/unix-domain-socket.hh @@ -80,4 +80,9 @@ void bind(Socket fd, const std::string & path); */ void connect(Socket fd, const std::string & path); +/** + * Connect to a Unix domain socket. + */ +AutoCloseFD connect(const std::filesystem::path & path); + } From 2a2af3f72f1841a67d06120d0be5553fddda71d7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Mar 2025 18:23:00 +0100 Subject: [PATCH 267/361] Logger::result(): Support logging arbitrary JSON objects --- src/libstore/unix/build/local-derivation-goal.cc | 8 ++++---- src/libutil/logging.cc | 10 ++++++++++ src/libutil/logging.hh | 7 +++++++ src/libutil/tee-logger.cc | 6 ++++++ 4 files changed, 27 insertions(+), 4 deletions(-) diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index 9ab0da32bdd..ec06c204418 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -2656,11 +2656,11 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() worker.store.printStorePath(drvPath), wanted.to_string(HashFormat::SRI, true), got.to_string(HashFormat::SRI, true))); - // FIXME: put this in BuildResult and log that as JSON. act->result(resHashMismatch, - {worker.store.printStorePath(drvPath), - wanted.to_string(HashFormat::SRI, true), - got.to_string(HashFormat::SRI, true) + { + {"storePath", worker.store.printStorePath(drvPath)}, + {"wanted", wanted.to_string(HashFormat::SRI, true)}, + {"got", got.to_string(HashFormat::SRI, true)}, }); } if (!newInfo0.references.empty()) { diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 94683cca5ba..c7b859bd536 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -279,6 +279,16 @@ struct JSONLogger : Logger { addFields(json, fields); write(json); } + + void result(ActivityId act, ResultType type, const nlohmann::json & j) override + { + nlohmann::json json; + json["action"] = "result"; + json["id"] = act; + json["type"] = type; + json["payload"] = j; + write(json); + } }; Logger * makeJSONLogger(Descriptor fd, bool includeNixPrefix) diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index ef449d03ef8..9d655f73592 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -108,6 +108,8 @@ public: virtual void result(ActivityId act, ResultType type, const Fields & fields) { }; + virtual void result(ActivityId act, ResultType type, const nlohmann::json & json) { }; + virtual void writeToStdout(std::string_view s); template @@ -160,6 +162,11 @@ struct Activity void setExpected(ActivityType type2, uint64_t expected) const { result(resSetExpected, type2, expected); } + void result(ResultType type, const nlohmann::json & json) const + { + logger.result(id, type, json); + } + template void result(ResultType type, const Args & ... args) const { diff --git a/src/libutil/tee-logger.cc b/src/libutil/tee-logger.cc index 7a5115ea795..c9873a53a97 100644 --- a/src/libutil/tee-logger.cc +++ b/src/libutil/tee-logger.cc @@ -65,6 +65,12 @@ struct TeeLogger : Logger logger->result(act, type, fields); } + void result(ActivityId act, ResultType type, const nlohmann::json & json) override + { + for (auto & logger : loggers) + logger->result(act, type, json); + } + void writeToStdout(std::string_view s) override { for (auto & logger : loggers) { From c515bc66f1d8941290ef448eea4661b741a8fcc7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Mar 2025 18:52:29 +0100 Subject: [PATCH 268/361] Provide a structured JSON serialisation of hashes --- src/libstore/unix/build/local-derivation-goal.cc | 4 ++-- src/libutil/hash.cc | 11 +++++++++++ src/libutil/hash.hh | 6 ++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index ec06c204418..cb3d4a04f81 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -2659,8 +2659,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() act->result(resHashMismatch, { {"storePath", worker.store.printStorePath(drvPath)}, - {"wanted", wanted.to_string(HashFormat::SRI, true)}, - {"got", got.to_string(HashFormat::SRI, true)}, + {"wanted", wanted}, + {"got", got}, }); } if (!newInfo0.references.empty()) { diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index b69dec685f5..9668800af2c 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -14,6 +14,8 @@ #include #include +#include + #include namespace nix { @@ -456,4 +458,13 @@ std::string_view printHashAlgo(HashAlgorithm ha) } } +void to_json(nlohmann::json & json, const Hash & hash) +{ + json = nlohmann::json::object( + { + {"algo", printHashAlgo(hash.algo)}, + {"base16", hash.to_string(HashFormat::Base16, false)}, + }); +} + } diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index dc95b9f2f9b..3ef7e8b14b3 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -5,6 +5,8 @@ #include "serialise.hh" #include "file-system.hh" +#include + namespace nix { @@ -209,6 +211,10 @@ std::optional parseHashAlgoOpt(std::string_view s); */ std::string_view printHashAlgo(HashAlgorithm ha); +/** + * Write a JSON serialisation of the format `{"algo":"","base16":""}`. + */ +void to_json(nlohmann::json & json, const Hash & hash); union Ctx; From 762114b7c4d28027cdc7a673035f87664cc0fe68 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Mar 2025 19:42:52 +0100 Subject: [PATCH 269/361] Log BuildResult --- src/libstore/build-result.cc | 25 ++++++++++++ src/libstore/build-result.hh | 52 +++++++++++++++---------- src/libstore/build/derivation-goal.cc | 7 ++++ src/libstore/build/substitution-goal.cc | 11 ++++++ src/libutil/logging.hh | 1 + 5 files changed, 75 insertions(+), 21 deletions(-) diff --git a/src/libstore/build-result.cc b/src/libstore/build-result.cc index 96cbfd62fff..3e316f6791f 100644 --- a/src/libstore/build-result.cc +++ b/src/libstore/build-result.cc @@ -1,8 +1,33 @@ #include "build-result.hh" +#include + namespace nix { bool BuildResult::operator==(const BuildResult &) const noexcept = default; std::strong_ordering BuildResult::operator<=>(const BuildResult &) const noexcept = default; +void to_json(nlohmann::json & json, const BuildResult & buildResult) +{ + json = nlohmann::json::object(); + json["status"] = BuildResult::statusToString(buildResult.status); + if (buildResult.errorMsg != "") + json["errorMsg"] = buildResult.errorMsg; + if (buildResult.timesBuilt) + json["timesBuilt"] = buildResult.timesBuilt; + if (buildResult.isNonDeterministic) + json["isNonDeterministic"] = buildResult.isNonDeterministic; + if (buildResult.startTime) + json["startTime"] = buildResult.startTime; + if (buildResult.stopTime) + json["stopTime"] = buildResult.stopTime; +} + +nlohmann::json KeyedBuildResult::toJSON(Store & store) const +{ + auto json = nlohmann::json((const BuildResult &) *this); + json["path"] = path.toJSON(store); + return json; +} + } diff --git a/src/libstore/build-result.hh b/src/libstore/build-result.hh index 8c66cfeb353..f56817f19c1 100644 --- a/src/libstore/build-result.hh +++ b/src/libstore/build-result.hh @@ -8,6 +8,8 @@ #include #include +#include + namespace nix { struct BuildResult @@ -46,28 +48,32 @@ struct BuildResult */ std::string errorMsg; + static std::string_view statusToString(Status status) + { + switch (status) { + case Built: return "Built"; + case Substituted: return "Substituted"; + case AlreadyValid: return "AlreadyValid"; + case PermanentFailure: return "PermanentFailure"; + case InputRejected: return "InputRejected"; + case OutputRejected: return "OutputRejected"; + case TransientFailure: return "TransientFailure"; + case CachedFailure: return "CachedFailure"; + case TimedOut: return "TimedOut"; + case MiscFailure: return "MiscFailure"; + case DependencyFailed: return "DependencyFailed"; + case LogLimitExceeded: return "LogLimitExceeded"; + case NotDeterministic: return "NotDeterministic"; + case ResolvesToAlreadyValid: return "ResolvesToAlreadyValid"; + case NoSubstituters: return "NoSubstituters"; + default: return "Unknown"; + }; + } + std::string toString() const { - auto strStatus = [&]() { - switch (status) { - case Built: return "Built"; - case Substituted: return "Substituted"; - case AlreadyValid: return "AlreadyValid"; - case PermanentFailure: return "PermanentFailure"; - case InputRejected: return "InputRejected"; - case OutputRejected: return "OutputRejected"; - case TransientFailure: return "TransientFailure"; - case CachedFailure: return "CachedFailure"; - case TimedOut: return "TimedOut"; - case MiscFailure: return "MiscFailure"; - case DependencyFailed: return "DependencyFailed"; - case LogLimitExceeded: return "LogLimitExceeded"; - case NotDeterministic: return "NotDeterministic"; - case ResolvesToAlreadyValid: return "ResolvesToAlreadyValid"; - case NoSubstituters: return "NoSubstituters"; - default: return "Unknown"; - }; - }(); - return strStatus + ((errorMsg == "") ? "" : " : " + errorMsg); + return + std::string(statusToString(status)) + + ((errorMsg == "") ? "" : " : " + errorMsg); } /** @@ -128,6 +134,10 @@ struct KeyedBuildResult : BuildResult KeyedBuildResult(BuildResult res, DerivedPath path) : BuildResult(std::move(res)), path(std::move(path)) { } + + nlohmann::json toJSON(Store & store) const; }; +void to_json(nlohmann::json & json, const BuildResult & buildResult); + } diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 714dc87c86c..6c335e17c08 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -1563,6 +1563,13 @@ Goal::Done DerivationGoal::done( fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl; } + logger->result( + act ? act->id : getCurActivity(), + resBuildResult, + KeyedBuildResult( + buildResult, + DerivedPath::Built{.drvPath = makeConstantStorePathRef(drvPath), .outputs = wantedOutputs}).toJSON(worker.store)); + return amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex)); } diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 983c86601d8..625e64781aa 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -3,8 +3,11 @@ #include "nar-info.hh" #include "finally.hh" #include "signals.hh" + #include +#include + namespace nix { PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional ca) @@ -35,6 +38,14 @@ Goal::Done PathSubstitutionGoal::done( debug(*errorMsg); buildResult.errorMsg = *errorMsg; } + + logger->result( + getCurActivity(), + resBuildResult, + KeyedBuildResult( + buildResult, + DerivedPath::Opaque{storePath}).toJSON(worker.store)); + return amDone(result); } diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 9d655f73592..aeb058526b6 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -39,6 +39,7 @@ typedef enum { resPostBuildLogLine = 107, resFetchStatus = 108, resHashMismatch = 109, + resBuildResult = 110, } ResultType; typedef uint64_t ActivityId; From fd0d824fa5b3ed367903d49efd75c30d886de6a5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Mar 2025 17:05:38 +0100 Subject: [PATCH 270/361] Don't use DerivedPath::toJSON() It doesn't work on unrealized paths. --- src/libstore/build-result.cc | 17 +++++++++++++---- src/libstore/build-result.hh | 3 +-- src/libstore/build/derivation-goal.cc | 7 ++++--- src/libstore/build/substitution-goal.cc | 7 ++++--- 4 files changed, 22 insertions(+), 12 deletions(-) diff --git a/src/libstore/build-result.cc b/src/libstore/build-result.cc index 3e316f6791f..e6469e38f05 100644 --- a/src/libstore/build-result.cc +++ b/src/libstore/build-result.cc @@ -23,11 +23,20 @@ void to_json(nlohmann::json & json, const BuildResult & buildResult) json["stopTime"] = buildResult.stopTime; } -nlohmann::json KeyedBuildResult::toJSON(Store & store) const +void to_json(nlohmann::json & json, const KeyedBuildResult & buildResult) { - auto json = nlohmann::json((const BuildResult &) *this); - json["path"] = path.toJSON(store); - return json; + to_json(json, (const BuildResult &) buildResult); + auto path = nlohmann::json::object(); + std::visit( + overloaded{ + [&](const DerivedPathOpaque & opaque) { path["opaque"] = opaque.path.to_string(); }, + [&](const DerivedPathBuilt & drv) { + path["drvPath"] = drv.drvPath->getBaseStorePath().to_string(); + path["outputs"] = drv.outputs.to_string(); + }, + }, + buildResult.path.raw()); + json["path"] = std::move(path); } } diff --git a/src/libstore/build-result.hh b/src/libstore/build-result.hh index f56817f19c1..44862980de4 100644 --- a/src/libstore/build-result.hh +++ b/src/libstore/build-result.hh @@ -134,10 +134,9 @@ struct KeyedBuildResult : BuildResult KeyedBuildResult(BuildResult res, DerivedPath path) : BuildResult(std::move(res)), path(std::move(path)) { } - - nlohmann::json toJSON(Store & store) const; }; void to_json(nlohmann::json & json, const BuildResult & buildResult); +void to_json(nlohmann::json & json, const KeyedBuildResult & buildResult); } diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 6c335e17c08..a32dc5e53ed 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -1566,9 +1566,10 @@ Goal::Done DerivationGoal::done( logger->result( act ? act->id : getCurActivity(), resBuildResult, - KeyedBuildResult( - buildResult, - DerivedPath::Built{.drvPath = makeConstantStorePathRef(drvPath), .outputs = wantedOutputs}).toJSON(worker.store)); + nlohmann::json( + KeyedBuildResult( + buildResult, + DerivedPath::Built{.drvPath = makeConstantStorePathRef(drvPath), .outputs = wantedOutputs}))); return amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex)); } diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 625e64781aa..41d8a0c3002 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -42,9 +42,10 @@ Goal::Done PathSubstitutionGoal::done( logger->result( getCurActivity(), resBuildResult, - KeyedBuildResult( - buildResult, - DerivedPath::Opaque{storePath}).toJSON(worker.store)); + nlohmann::json( + KeyedBuildResult( + buildResult, + DerivedPath::Opaque{storePath}))); return amDone(result); } From 8674792eba1ba41dc3d048ab8d88f3cdf2bb2aa2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Mar 2025 17:33:48 +0100 Subject: [PATCH 271/361] Make the JSON logger more robust We now ignore connection / write errors. --- src/libutil/logging.cc | 15 +++++++++++++-- src/nix/main.cc | 6 +++++- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index c7b859bd536..de8df24b016 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -196,6 +196,7 @@ struct JSONLogger : Logger { struct State { + bool enabled = true; }; Sync _state; @@ -208,8 +209,18 @@ struct JSONLogger : Logger { /* Acquire a lock to prevent log messages from clobbering each other. */ - auto state(_state.lock()); - writeLine(fd, line); + try { + auto state(_state.lock()); + if (state->enabled) + writeLine(fd, line); + } catch (...) { + bool enabled = false; + std::swap(_state.lock()->enabled, enabled); + if (enabled) { + ignoreExceptionExceptInterrupt(); + logger->warn("disabling JSON logger due to write errors"); + } + } } void log(Verbosity lvl, std::string_view s) override diff --git a/src/nix/main.cc b/src/nix/main.cc index 68137a216a3..644c65cf041 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -502,7 +502,11 @@ void mainWrapped(int argc, char * * argv) } if (!extLoggerSettings.jsonLogPath.get().empty()) { - logger = makeTeeLogger({logger, makeJSONLogger(std::filesystem::path(extLoggerSettings.jsonLogPath.get()), false)}); + try { + logger = makeTeeLogger({logger, makeJSONLogger(std::filesystem::path(extLoggerSettings.jsonLogPath.get()), false)}); + } catch (...) { + ignoreExceptionExceptInterrupt(); + } } if (args.helpRequested) { From c32441f207194e480f4570df5560a9ffc2d207da Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Mar 2025 20:20:02 +0100 Subject: [PATCH 272/361] Remove redundant quotes --- src/libutil/logging.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index de8df24b016..ddf90d7c53a 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -323,7 +323,7 @@ Logger * makeJSONLogger(const std::filesystem::path & path, bool includeNixPrefi ? connect(path) : toDescriptor(open(path.c_str(), O_CREAT | O_APPEND | O_WRONLY, 0644)); if (!fd) - throw SysError("opening log file '%1%'", path); + throw SysError("opening log file %1%", path); return new JSONFileLogger(std::move(fd), includeNixPrefix); } From bc3a847784223978580878fdb8dce141c37d9cbf Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 21 Mar 2025 15:59:05 +0100 Subject: [PATCH 273/361] BuildResult: Serialize builtOutputs --- src/libstore/build-result.cc | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/libstore/build-result.cc b/src/libstore/build-result.cc index e6469e38f05..1f27f68f44a 100644 --- a/src/libstore/build-result.cc +++ b/src/libstore/build-result.cc @@ -32,7 +32,14 @@ void to_json(nlohmann::json & json, const KeyedBuildResult & buildResult) [&](const DerivedPathOpaque & opaque) { path["opaque"] = opaque.path.to_string(); }, [&](const DerivedPathBuilt & drv) { path["drvPath"] = drv.drvPath->getBaseStorePath().to_string(); - path["outputs"] = drv.outputs.to_string(); + path["outputs"] = drv.outputs; + auto outputs = nlohmann::json::object(); + for (auto & [name, output] : buildResult.builtOutputs) + outputs[name] = { + {"path", output.outPath.to_string()}, + {"signatures", output.signatures}, + }; + json["builtOutputs"] = std::move(outputs); }, }, buildResult.path.raw()); From 9c26996e73057485f37165332583de5aa8c6bf3f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Mar 2025 21:34:11 +0100 Subject: [PATCH 274/361] Fix release notes (1.0.0 -> 3.0.0) --- doc/manual/source/SUMMARY.md.in | 2 +- doc/manual/source/release-notes-determinate/changes.md | 2 +- .../release-notes-determinate/{rl-1.0.0.md => rl-3.0.0.md} | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) rename doc/manual/source/release-notes-determinate/{rl-1.0.0.md => rl-3.0.0.md} (78%) diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 8d6ad9f93be..c218c306bf5 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -128,7 +128,7 @@ - [Contributing](development/contributing.md) - [Determinate Nix Release Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) - - [Release 1.0.0 (2025-??-??)](release-notes-determinate/rl-1.0.0.md) + - [Release 3.0.0 (2025-03-04)](release-notes-determinate/rl-3.0.0.md) - [Nix Release Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} - [Release 2.27 (2025-03-03)](release-notes/rl-2.27.md) diff --git a/doc/manual/source/release-notes-determinate/changes.md b/doc/manual/source/release-notes-determinate/changes.md index a71867ea2ec..4f60f139b02 100644 --- a/doc/manual/source/release-notes-determinate/changes.md +++ b/doc/manual/source/release-notes-determinate/changes.md @@ -1,6 +1,6 @@ # Changes between Nix and Determinate Nix -This section lists the differences between upstream Nix 2.24 and Determinate Nix 1.0. +This section lists the differences between upstream Nix 2.24 and Determinate Nix 3.0.0. * In Determinate Nix, flakes are stable. You no longer need to enable the `flakes` experimental feature. diff --git a/doc/manual/source/release-notes-determinate/rl-1.0.0.md b/doc/manual/source/release-notes-determinate/rl-3.0.0.md similarity index 78% rename from doc/manual/source/release-notes-determinate/rl-1.0.0.md rename to doc/manual/source/release-notes-determinate/rl-3.0.0.md index 16dcc9d3e9f..d60786e9a72 100644 --- a/doc/manual/source/release-notes-determinate/rl-1.0.0.md +++ b/doc/manual/source/release-notes-determinate/rl-3.0.0.md @@ -1,4 +1,4 @@ -# Release 1.0.0 (2025-??-??) +# Release 3.0.0 (2025-03-04) * Initial release of Determinate Nix. From 117d6719238c079c13858db9014653c542932c46 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Mar 2025 21:28:34 +0100 Subject: [PATCH 275/361] Bump Determinate Nix version --- .version-determinate | 2 +- doc/manual/source/SUMMARY.md.in | 1 + doc/manual/source/release-notes-determinate/changes.md | 2 +- doc/manual/source/release-notes-determinate/rl-3.1.0.md | 3 +++ 4 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 doc/manual/source/release-notes-determinate/rl-3.1.0.md diff --git a/.version-determinate b/.version-determinate index 4a36342fcab..fd2a01863fd 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -3.0.0 +3.1.0 diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index c218c306bf5..57edad19915 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -129,6 +129,7 @@ - [Determinate Nix Release Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) - [Release 3.0.0 (2025-03-04)](release-notes-determinate/rl-3.0.0.md) + - [Release 3.1.0 (2025-??-??)](release-notes-determinate/rl-3.1.0.md) - [Nix Release Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} - [Release 2.27 (2025-03-03)](release-notes/rl-2.27.md) diff --git a/doc/manual/source/release-notes-determinate/changes.md b/doc/manual/source/release-notes-determinate/changes.md index 4f60f139b02..fa468dee9e5 100644 --- a/doc/manual/source/release-notes-determinate/changes.md +++ b/doc/manual/source/release-notes-determinate/changes.md @@ -1,6 +1,6 @@ # Changes between Nix and Determinate Nix -This section lists the differences between upstream Nix 2.24 and Determinate Nix 3.0.0. +This section lists the differences between upstream Nix 2.24 and Determinate Nix 3.1.0. * In Determinate Nix, flakes are stable. You no longer need to enable the `flakes` experimental feature. diff --git a/doc/manual/source/release-notes-determinate/rl-3.1.0.md b/doc/manual/source/release-notes-determinate/rl-3.1.0.md new file mode 100644 index 00000000000..8d55939da64 --- /dev/null +++ b/doc/manual/source/release-notes-determinate/rl-3.1.0.md @@ -0,0 +1,3 @@ +# Release 3.1.0 (2025-??-??) + +* Based on [upstream Nix 2.27.1](../release-notes/rl-2.27.md). From 9d0c3dd6a747b1aeb9312041e17d8d72e9b1b713 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Mar 2025 22:02:49 +0100 Subject: [PATCH 276/361] Formatting --- flake.nix | 42 ++++++++++++++++++++++------------------ packaging/components.nix | 11 ----------- 2 files changed, 23 insertions(+), 30 deletions(-) diff --git a/flake.nix b/flake.nix index a1a7b160c98..54cf1a36df6 100644 --- a/flake.nix +++ b/flake.nix @@ -310,27 +310,31 @@ closures = forAllSystems (system: self.packages.${system}.default.outPath); - closures_json = pkgs.runCommand "versions.json" - { - buildInputs = [ pkgs.jq ]; - passAsFile = [ "json" ]; - json = builtins.toJSON closures; - } '' - cat "$jsonPath" | jq . > $out - ''; + closures_json = + pkgs.runCommand "versions.json" + { + buildInputs = [ pkgs.jq ]; + passAsFile = [ "json" ]; + json = builtins.toJSON closures; + } + '' + cat "$jsonPath" | jq . > $out + ''; - closures_nix = pkgs.runCommand "versions.nix" - { - buildInputs = [ pkgs.jq ]; - passAsFile = [ "template" ]; - jsonPath = closures_json; - template = '' - builtins.fromJSON('''@closures@''') + closures_nix = + pkgs.runCommand "versions.nix" + { + buildInputs = [ pkgs.jq ]; + passAsFile = [ "template" ]; + jsonPath = closures_json; + template = '' + builtins.fromJSON('''@closures@''') + ''; + } + '' + export closures=$(cat "$jsonPath"); + substituteAll "$templatePath" "$out" ''; - } '' - export closures=$(cat "$jsonPath"); - substituteAll "$templatePath" "$out" - ''; in closures_nix; } diff --git a/packaging/components.nix b/packaging/components.nix index 4678e92ca1e..04b143bfe85 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -50,17 +50,6 @@ let exts: userFn: stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn); setVersionLayer = finalAttrs: prevAttrs: { - /* - preConfigure = - prevAttrs.preConfigure or "" - + - # Update the repo-global .version file. - # Symlink ./.version points there, but by default only workDir is writable. - '' - chmod u+w ./.version - echo ${finalAttrs.version} > ./.version - ''; - */ }; localSourceLayer = From 502f0273904536d7c162767f33d0dfe3d6612e10 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 26 Mar 2025 22:15:39 +0100 Subject: [PATCH 277/361] nix daemon: Respect json-log-path and re-open for every connection We don't want to inherit the parent's JSON logger since then messages from different daemon processes may clobber each other. --- src/libstore/daemon.cc | 12 +++++++++++- src/libutil/logging.hh | 8 ++++++++ src/nix/main.cc | 19 ++----------------- 3 files changed, 21 insertions(+), 18 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index b921dbe2de8..13655f6a80b 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -15,6 +15,7 @@ #include "derivations.hh" #include "args.hh" #include "git.hh" +#include "logging.hh" #ifndef _WIN32 // TODO need graceful async exit support on Windows? # include "monitor-fd.hh" @@ -1044,9 +1045,18 @@ void processConnection( auto tunnelLogger = new TunnelLogger(conn.to, protoVersion); auto prevLogger = nix::logger; // FIXME - if (!recursive) + if (!recursive) { logger = tunnelLogger; + if (!loggerSettings.jsonLogPath.get().empty()) { + try { + logger = makeTeeLogger({logger, makeJSONLogger(std::filesystem::path(loggerSettings.jsonLogPath.get()), false)}); + } catch (...) { + ignoreExceptionExceptInterrupt(); + } + } + } + unsigned int opCount = 0; Finally finally([&]() { diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index aeb058526b6..479459e9f6f 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -52,6 +52,14 @@ struct LoggerSettings : Config Whether Nix should print out a stack trace in case of Nix expression evaluation errors. )"}; + + Setting jsonLogPath{ + this, "", "json-log-path", + R"( + A path to which JSON records of Nix's log output will be + written, in the same format as `--log-format internal-json` + (without the `@nix ` prefixes on each line). + )"}; }; extern LoggerSettings loggerSettings; diff --git a/src/nix/main.cc b/src/nix/main.cc index 644c65cf041..cad561c66db 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -348,21 +348,6 @@ struct CmdHelpStores : Command static auto rCmdHelpStores = registerCommand("help-stores"); -struct ExtLoggerSettings : Config -{ - Setting jsonLogPath{ - this, "", "json-log-path", - R"( - A path to which JSON records of Nix's log output will be - written, in the same format as `--log-format internal-json` - (without the `@nix ` prefixes on each line). - )"}; -}; - -static ExtLoggerSettings extLoggerSettings; - -static GlobalConfig::Register rExtLoggerSettings(&extLoggerSettings); - void mainWrapped(int argc, char * * argv) { savedArgv = argv; @@ -501,9 +486,9 @@ void mainWrapped(int argc, char * * argv) if (!args.helpRequested && !args.completions) throw; } - if (!extLoggerSettings.jsonLogPath.get().empty()) { + if (!loggerSettings.jsonLogPath.get().empty()) { try { - logger = makeTeeLogger({logger, makeJSONLogger(std::filesystem::path(extLoggerSettings.jsonLogPath.get()), false)}); + logger = makeTeeLogger({logger, makeJSONLogger(std::filesystem::path(loggerSettings.jsonLogPath.get()), false)}); } catch (...) { ignoreExceptionExceptInterrupt(); } From 17d0810a7c4d5cd8ae6deff7d15fce6ea100a35b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 26 Mar 2025 23:49:35 +0100 Subject: [PATCH 278/361] Cleanup --- src/libstore/daemon.cc | 12 +----------- src/libutil/logging.cc | 14 ++++++++++++++ src/libutil/logging.hh | 2 ++ src/nix/main.cc | 11 +---------- 4 files changed, 18 insertions(+), 21 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 1013b23a36f..32c8f4d2dd5 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -1050,17 +1050,7 @@ void processConnection( if (!recursive) { prevLogger_ = std::move(logger); logger = std::move(tunnelLogger_); - - if (!loggerSettings.jsonLogPath.get().empty()) { - try { - std::vector> loggers; - loggers.push_back(std::move(logger)); - loggers.push_back(makeJSONLogger(std::filesystem::path(loggerSettings.jsonLogPath.get()), false)); - logger = makeTeeLogger(std::move(loggers)); - } catch (...) { - ignoreExceptionExceptInterrupt(); - } - } + applyJSONLogger(); } unsigned int opCount = 0; diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index b4bca0b3623..fd54cc580b0 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -341,6 +341,20 @@ std::unique_ptr makeJSONLogger(const std::filesystem::path & path, bool return std::make_unique(std::move(fd), includeNixPrefix); } +void applyJSONLogger() +{ + if (!loggerSettings.jsonLogPath.get().empty()) { + try { + std::vector> loggers; + loggers.push_back(std::move(logger)); + loggers.push_back(makeJSONLogger(std::filesystem::path(loggerSettings.jsonLogPath.get()), false)); + logger = makeTeeLogger(std::move(loggers)); + } catch (...) { + ignoreExceptionExceptInterrupt(); + } + } +} + static Logger::Fields getFields(nlohmann::json & json) { Logger::Fields fields; diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 5b69f501c84..290a49bb845 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -221,6 +221,8 @@ std::unique_ptr makeJSONLogger(Descriptor fd, bool includeNixPrefix = tr std::unique_ptr makeJSONLogger(const std::filesystem::path & path, bool includeNixPrefix = true); +void applyJSONLogger(); + /** * @param source A noun phrase describing the source of the message, e.g. "the builder". */ diff --git a/src/nix/main.cc b/src/nix/main.cc index f81a02ce6dc..a2dc371d466 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -487,16 +487,7 @@ void mainWrapped(int argc, char * * argv) if (!args.helpRequested && !args.completions) throw; } - if (!loggerSettings.jsonLogPath.get().empty()) { - try { - std::vector> loggers; - loggers.push_back(std::move(logger)); - loggers.push_back(makeJSONLogger(std::filesystem::path(loggerSettings.jsonLogPath.get()), false)); - logger = makeTeeLogger(std::move(loggers)); - } catch (...) { - ignoreExceptionExceptInterrupt(); - } - } + applyJSONLogger(); if (args.helpRequested) { std::vector subcommand; From eca002ddc40c92ee714445a28e6155b9c235a801 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Mar 2025 13:54:07 +0100 Subject: [PATCH 279/361] Don't segfault if we can't create the JSON logger --- src/libutil/logging.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index fd54cc580b0..16ff1c5f4a6 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -344,12 +344,13 @@ std::unique_ptr makeJSONLogger(const std::filesystem::path & path, bool void applyJSONLogger() { if (!loggerSettings.jsonLogPath.get().empty()) { + std::vector> loggers; try { - std::vector> loggers; loggers.push_back(std::move(logger)); loggers.push_back(makeJSONLogger(std::filesystem::path(loggerSettings.jsonLogPath.get()), false)); logger = makeTeeLogger(std::move(loggers)); } catch (...) { + logger = std::move(loggers[0]); ignoreExceptionExceptInterrupt(); } } From 37f3b255b285e87f353bc9451be5f322c7696e1b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Mar 2025 17:17:01 +0100 Subject: [PATCH 280/361] makeTeeLogger(): Distinguish between main and extra loggers --- src/libutil/logging.cc | 9 +++++---- src/libutil/logging.hh | 9 ++++++++- src/libutil/tee-logger.cc | 9 +++++++-- 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 16ff1c5f4a6..7884b6f298e 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -344,15 +344,16 @@ std::unique_ptr makeJSONLogger(const std::filesystem::path & path, bool void applyJSONLogger() { if (!loggerSettings.jsonLogPath.get().empty()) { - std::vector> loggers; try { - loggers.push_back(std::move(logger)); + std::vector> loggers; loggers.push_back(makeJSONLogger(std::filesystem::path(loggerSettings.jsonLogPath.get()), false)); - logger = makeTeeLogger(std::move(loggers)); + // Note: this had better not throw, otherwise `logger` is + // left unset. + logger = makeTeeLogger(std::move(logger), std::move(loggers)); } catch (...) { - logger = std::move(loggers[0]); ignoreExceptionExceptInterrupt(); } + } } diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 290a49bb845..07f49be19d1 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -215,7 +215,14 @@ extern std::unique_ptr logger; std::unique_ptr makeSimpleLogger(bool printBuildLogs = true); -std::unique_ptr makeTeeLogger(std::vector> && loggers); +/** + * Create a logger that sends log messages to `mainLogger` and the + * list of loggers in `extraLoggers`. Only `mainLogger` is used for + * writing to stdout and getting user input. + */ +std::unique_ptr makeTeeLogger( + std::unique_ptr mainLogger, + std::vector> && extraLoggers); std::unique_ptr makeJSONLogger(Descriptor fd, bool includeNixPrefix = true); diff --git a/src/libutil/tee-logger.cc b/src/libutil/tee-logger.cc index 84527ffded7..cb254826410 100644 --- a/src/libutil/tee-logger.cc +++ b/src/libutil/tee-logger.cc @@ -100,9 +100,14 @@ struct TeeLogger : Logger } }; -std::unique_ptr makeTeeLogger(std::vector> && loggers) +std::unique_ptr +makeTeeLogger(std::unique_ptr mainLogger, std::vector> && extraLoggers) { - return std::make_unique(std::move(loggers)); + std::vector> allLoggers; + allLoggers.push_back(std::move(mainLogger)); + for (auto & l : extraLoggers) + allLoggers.push_back(std::move(l)); + return std::make_unique(std::move(allLoggers)); } } From f80f7e001b4638667d59551f89b641f0e3fcbfa6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Mar 2025 19:07:00 +0100 Subject: [PATCH 281/361] Abort if we cannot create TeeLogger --- src/libutil/logging.cc | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 7884b6f298e..617ebeb1676 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -347,9 +347,12 @@ void applyJSONLogger() try { std::vector> loggers; loggers.push_back(makeJSONLogger(std::filesystem::path(loggerSettings.jsonLogPath.get()), false)); - // Note: this had better not throw, otherwise `logger` is - // left unset. - logger = makeTeeLogger(std::move(logger), std::move(loggers)); + try { + logger = makeTeeLogger(std::move(logger), std::move(loggers)); + } catch (...) { + // `logger` is now gone so give up. + abort(); + } } catch (...) { ignoreExceptionExceptInterrupt(); } From 9e6c999bdfdf54dbf02c28e5cddab0ba670c14be Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Mar 2025 19:07:06 +0100 Subject: [PATCH 282/361] Add release note --- doc/manual/source/release-notes-determinate/changes.md | 2 ++ doc/manual/source/release-notes-determinate/rl-3.0.0.md | 2 ++ 2 files changed, 4 insertions(+) diff --git a/doc/manual/source/release-notes-determinate/changes.md b/doc/manual/source/release-notes-determinate/changes.md index fa468dee9e5..8e6d053d0f6 100644 --- a/doc/manual/source/release-notes-determinate/changes.md +++ b/doc/manual/source/release-notes-determinate/changes.md @@ -5,3 +5,5 @@ This section lists the differences between upstream Nix 2.24 and Determinate Nix * In Determinate Nix, flakes are stable. You no longer need to enable the `flakes` experimental feature. * In Determinate Nix, the new Nix CLI (i.e. the `nix` command) is stable. You no longer need to enable the `nix-command` experimental feature. + +* Determinate Nix has a setting [`json-log-path`](@docroot@/command-ref/conf-file.md#conf-json-log-path) to send a copy of all Nix log messages (in JSON format) to a file or Unix domain socket. diff --git a/doc/manual/source/release-notes-determinate/rl-3.0.0.md b/doc/manual/source/release-notes-determinate/rl-3.0.0.md index d60786e9a72..ba9c0479b4b 100644 --- a/doc/manual/source/release-notes-determinate/rl-3.0.0.md +++ b/doc/manual/source/release-notes-determinate/rl-3.0.0.md @@ -3,3 +3,5 @@ * Initial release of Determinate Nix. * Based on [upstream Nix 2.26.2](../release-notes/rl-2.26.md). + +* New setting `json-log-path` that sends a copy of all Nix log messages (in JSON format) to a file or Unix domain socket. From 10f9b2f1fc7edab32d7729ed1643d474caaec114 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Mar 2025 19:08:09 +0100 Subject: [PATCH 283/361] Set release date --- doc/manual/source/SUMMARY.md.in | 2 +- doc/manual/source/release-notes-determinate/rl-3.1.0.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 57edad19915..69babe05bfe 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -129,7 +129,7 @@ - [Determinate Nix Release Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) - [Release 3.0.0 (2025-03-04)](release-notes-determinate/rl-3.0.0.md) - - [Release 3.1.0 (2025-??-??)](release-notes-determinate/rl-3.1.0.md) + - [Release 3.1.0 (2025-03-27)](release-notes-determinate/rl-3.1.0.md) - [Nix Release Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} - [Release 2.27 (2025-03-03)](release-notes/rl-2.27.md) diff --git a/doc/manual/source/release-notes-determinate/rl-3.1.0.md b/doc/manual/source/release-notes-determinate/rl-3.1.0.md index 8d55939da64..02b22ba9fd5 100644 --- a/doc/manual/source/release-notes-determinate/rl-3.1.0.md +++ b/doc/manual/source/release-notes-determinate/rl-3.1.0.md @@ -1,3 +1,3 @@ -# Release 3.1.0 (2025-??-??) +# Release 3.1.0 (2025-03-27) * Based on [upstream Nix 2.27.1](../release-notes/rl-2.27.md). From ce8deea082bc7583bc059cf856734886f7e3ae16 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 27 Mar 2025 18:48:05 +0000 Subject: [PATCH 284/361] Prepare release v3.1.0 From 946297c684c7db31c34ec1135175a54afa579b92 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Thu, 27 Mar 2025 12:02:06 -0700 Subject: [PATCH 285/361] Fixup release notes --- doc/manual/source/SUMMARY.md.in | 2 +- doc/manual/source/release-notes-determinate/rl-3.0.0.md | 2 -- doc/manual/source/release-notes-determinate/rl-3.1.0.md | 2 ++ 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 69babe05bfe..087c4b93c53 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -128,8 +128,8 @@ - [Contributing](development/contributing.md) - [Determinate Nix Release Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) - - [Release 3.0.0 (2025-03-04)](release-notes-determinate/rl-3.0.0.md) - [Release 3.1.0 (2025-03-27)](release-notes-determinate/rl-3.1.0.md) + - [Release 3.0.0 (2025-03-04)](release-notes-determinate/rl-3.0.0.md) - [Nix Release Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} - [Release 2.27 (2025-03-03)](release-notes/rl-2.27.md) diff --git a/doc/manual/source/release-notes-determinate/rl-3.0.0.md b/doc/manual/source/release-notes-determinate/rl-3.0.0.md index ba9c0479b4b..d60786e9a72 100644 --- a/doc/manual/source/release-notes-determinate/rl-3.0.0.md +++ b/doc/manual/source/release-notes-determinate/rl-3.0.0.md @@ -3,5 +3,3 @@ * Initial release of Determinate Nix. * Based on [upstream Nix 2.26.2](../release-notes/rl-2.26.md). - -* New setting `json-log-path` that sends a copy of all Nix log messages (in JSON format) to a file or Unix domain socket. diff --git a/doc/manual/source/release-notes-determinate/rl-3.1.0.md b/doc/manual/source/release-notes-determinate/rl-3.1.0.md index 02b22ba9fd5..96b7819d08d 100644 --- a/doc/manual/source/release-notes-determinate/rl-3.1.0.md +++ b/doc/manual/source/release-notes-determinate/rl-3.1.0.md @@ -1,3 +1,5 @@ # Release 3.1.0 (2025-03-27) * Based on [upstream Nix 2.27.1](../release-notes/rl-2.27.md). + +* New setting `json-log-path` that sends a copy of all Nix log messages (in JSON format) to a file or Unix domain socket. From 4a667d4459c74c070faee4509be875bf5337a4ea Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 28 Mar 2025 13:31:06 +0000 Subject: [PATCH 286/361] Prepare release v3.1.1 From 3b72727be0e9f290e8c7ecb816a561122b45d058 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 28 Mar 2025 13:31:09 +0000 Subject: [PATCH 287/361] Set .version-determinate to 3.1.1 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index fd2a01863fd..94ff29cc4de 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -3.1.0 +3.1.1 From c648c52392be46241df8484e128dceee45fb5dba Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 28 Mar 2025 07:22:21 -0700 Subject: [PATCH 288/361] ci: make macos runners larger --- .github/workflows/ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 443664e496c..b1fefc8df58 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -40,12 +40,12 @@ jobs: if: github.event_name == 'merge_group' uses: ./.github/workflows/build.yml with: - os: macos-13 + os: macos-latest-large build_aarch64-darwin: uses: ./.github/workflows/build.yml with: - os: macos-latest + os: macos-latest-xlarge test_x86_64-linux: uses: ./.github/workflows/test.yml @@ -65,14 +65,14 @@ jobs: uses: ./.github/workflows/test.yml needs: build_aarch64-darwin with: - os: macos-13 + os: macos-latest-large test_aarch64-darwin: if: github.event_name == 'merge_group' uses: ./.github/workflows/test.yml needs: build_aarch64-darwin with: - os: macos-latest + os: macos-latest-xlarge vm_tests_smoke: if: github.event_name != 'merge_group' From c3b29c1c8cce4fb70876d6afaeccc626a7bef7be Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 28 Mar 2025 10:56:16 -0400 Subject: [PATCH 289/361] Pass the system to build.yml directly --- .github/workflows/build.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f041267474c..a30eb3ed4df 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,9 +4,11 @@ on: os: required: true type: string + system: + required: true + type: string jobs: - build: strategy: fail-fast: false @@ -16,13 +18,11 @@ jobs: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main with: - flakehub: true + determinate: true - uses: DeterminateSystems/flakehub-cache-action@main - - run: echo "system=$(nix eval --impure --raw --expr 'builtins.currentSystem')" >> "$GITHUB_OUTPUT" - id: system - - run: nix build .# .#binaryTarball --no-link -L - - run: nix build .#binaryTarball --out-link tarball + - run: nix build .#packages.${{ inputs.system }}.default .#packages.${{ inputs.system }}.binaryTarball --no-link -L + - run: nix build .#packages.${{ inputs.system }}.binaryTarball --out-link tarball - uses: actions/upload-artifact@v4 with: - name: ${{ steps.system.outputs.system }} + name: ${{ inputs.system }} path: ./tarball/*.xz From 5766d207a54a04f02788ccf553d7a3fcd0a21a1f Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 28 Mar 2025 10:58:38 -0400 Subject: [PATCH 290/361] Pass system from ci --- .github/workflows/ci.yml | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b1fefc8df58..28259974fe8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,35 +17,39 @@ jobs: eval: runs-on: blacksmith-32vcpu-ubuntu-2204 steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: DeterminateSystems/nix-installer-action@main - with: - flakehub: true - - run: nix flake show --all-systems --json + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: DeterminateSystems/nix-installer-action@main + with: + flakehub: true + - run: nix flake show --all-systems --json build_x86_64-linux: uses: ./.github/workflows/build.yml with: os: blacksmith-32vcpu-ubuntu-2204 + system: x86_64-linux build_aarch64-linux: if: github.event_name == 'merge_group' uses: ./.github/workflows/build.yml with: os: blacksmith-32vcpu-ubuntu-2204-arm + system: aarch64-linux build_x86_64-darwin: if: github.event_name == 'merge_group' uses: ./.github/workflows/build.yml with: os: macos-latest-large + system: x86_64-darwin build_aarch64-darwin: uses: ./.github/workflows/build.yml with: os: macos-latest-xlarge + system: aarch64-darwin test_x86_64-linux: uses: ./.github/workflows/test.yml From 10b7535c87c5fa2ebd25c8b69d8a076cdda0f26d Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 28 Mar 2025 11:00:31 -0400 Subject: [PATCH 291/361] Pass system to test.yml --- .github/workflows/test.yml | 7 ++++--- .github/workflows/upload-release.yml | 7 +++++++ 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e58827a9c06..a54b1f83988 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -4,9 +4,10 @@ on: os: required: true type: string - + system: + required: true + type: string jobs: - tests: strategy: fail-fast: false @@ -18,4 +19,4 @@ jobs: with: flakehub: true - uses: DeterminateSystems/flakehub-cache-action@main - - run: nix flake check -L + - run: nix flake check -L --system ${{ inputs.system }} diff --git a/.github/workflows/upload-release.yml b/.github/workflows/upload-release.yml index b600dfba04f..f762446bda6 100644 --- a/.github/workflows/upload-release.yml +++ b/.github/workflows/upload-release.yml @@ -29,18 +29,25 @@ jobs: uses: ./.github/workflows/build.yml with: os: blacksmith-32vcpu-ubuntu-2204 + system: x86_64-linux + build-aarch64-linux: uses: ./.github/workflows/build.yml with: os: blacksmith-32vcpu-ubuntu-2204-arm + system: aarch64-linux + build-x86_64-darwin: uses: ./.github/workflows/build.yml with: os: macos-13 + system: x86_64-darwin + build-aarch64-darwin: uses: ./.github/workflows/build.yml with: os: macos-latest + system: aarch64-darwin release: runs-on: ubuntu-latest From 6469efee7be029d82806e41a9300d6f4648d5490 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 28 Mar 2025 11:04:51 -0400 Subject: [PATCH 292/361] Pass the system to the test yml from ci --- .github/workflows/ci.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 28259974fe8..fc7f491d844 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -56,6 +56,7 @@ jobs: needs: build_x86_64-linux with: os: blacksmith-32vcpu-ubuntu-2204 + system: x86_64-linux test_aarch64-linux: if: github.event_name == 'merge_group' @@ -63,13 +64,15 @@ jobs: needs: build_aarch64-linux with: os: blacksmith-32vcpu-ubuntu-2204-arm + system: aarch64-linux test_x86_64-darwin: if: github.event_name == 'merge_group' uses: ./.github/workflows/test.yml - needs: build_aarch64-darwin + needs: build_x86_64-darwin with: os: macos-latest-large + system: x86_64-darwin test_aarch64-darwin: if: github.event_name == 'merge_group' @@ -77,6 +80,7 @@ jobs: needs: build_aarch64-darwin with: os: macos-latest-xlarge + system: aarch64-darwin vm_tests_smoke: if: github.event_name != 'merge_group' From 8762c10aaebc0344b56ab78756e1f3ed8df77b44 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 28 Mar 2025 11:06:22 -0400 Subject: [PATCH 293/361] Move the if evaluation of the test jobs onthe tests job itself, so we can skip it properly in PRs and block on it in merge groups --- .github/workflows/ci.yml | 6 +++--- .github/workflows/test.yml | 5 +++++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fc7f491d844..7c1ef3cda5b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -59,26 +59,26 @@ jobs: system: x86_64-linux test_aarch64-linux: - if: github.event_name == 'merge_group' uses: ./.github/workflows/test.yml needs: build_aarch64-linux with: + if: github.event_name == 'merge_group' os: blacksmith-32vcpu-ubuntu-2204-arm system: aarch64-linux test_x86_64-darwin: - if: github.event_name == 'merge_group' uses: ./.github/workflows/test.yml needs: build_x86_64-darwin with: + if: github.event_name == 'merge_group' os: macos-latest-large system: x86_64-darwin test_aarch64-darwin: - if: github.event_name == 'merge_group' uses: ./.github/workflows/test.yml needs: build_aarch64-darwin with: + if: github.event_name == 'merge_group' os: macos-latest-xlarge system: aarch64-darwin diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a54b1f83988..49af88020ac 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,8 +7,13 @@ on: system: required: true type: string + if: + required: false + default: true + type: boolean jobs: tests: + if: ${{ inputs.if }} strategy: fail-fast: false runs-on: ${{ inputs.os }} From 96e7e63ea08d2b4d30382012429a9e99b7acaf7d Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 28 Mar 2025 11:09:04 -0400 Subject: [PATCH 294/361] Bigger runners thank you --- .github/workflows/ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7c1ef3cda5b..b363f9951c6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -42,13 +42,13 @@ jobs: if: github.event_name == 'merge_group' uses: ./.github/workflows/build.yml with: - os: macos-latest-large + os: namespace-profile-mac-m2-12c28g system: x86_64-darwin build_aarch64-darwin: uses: ./.github/workflows/build.yml with: - os: macos-latest-xlarge + os: namespace-profile-mac-m2-12c28g system: aarch64-darwin test_x86_64-linux: @@ -71,7 +71,7 @@ jobs: needs: build_x86_64-darwin with: if: github.event_name == 'merge_group' - os: macos-latest-large + os: namespace-profile-mac-m2-12c28g system: x86_64-darwin test_aarch64-darwin: @@ -79,7 +79,7 @@ jobs: needs: build_aarch64-darwin with: if: github.event_name == 'merge_group' - os: macos-latest-xlarge + os: namespace-profile-mac-m2-12c28g system: aarch64-darwin vm_tests_smoke: From feba05b18dec460bdae4857edc2a6f2dacff9c3b Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 28 Mar 2025 11:15:04 -0400 Subject: [PATCH 295/361] fixup ifs --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b363f9951c6..b2b542bccc6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -62,7 +62,7 @@ jobs: uses: ./.github/workflows/test.yml needs: build_aarch64-linux with: - if: github.event_name == 'merge_group' + if: ${{ github.event_name == 'merge_group' }} os: blacksmith-32vcpu-ubuntu-2204-arm system: aarch64-linux @@ -70,7 +70,7 @@ jobs: uses: ./.github/workflows/test.yml needs: build_x86_64-darwin with: - if: github.event_name == 'merge_group' + if: ${{ github.event_name == 'merge_group' }} os: namespace-profile-mac-m2-12c28g system: x86_64-darwin @@ -78,7 +78,7 @@ jobs: uses: ./.github/workflows/test.yml needs: build_aarch64-darwin with: - if: github.event_name == 'merge_group' + if: ${{ github.event_name == 'merge_group' }} os: namespace-profile-mac-m2-12c28g system: aarch64-darwin From 2315b54f914796dc54a8cc54084573da1a259f6b Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 28 Mar 2025 11:22:58 -0400 Subject: [PATCH 296/361] Move down the if from build workflows --- .github/workflows/build.yml | 5 +++++ .github/workflows/ci.yml | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a30eb3ed4df..84dbdfd79bf 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -7,9 +7,14 @@ on: system: required: true type: string + if: + required: false + default: true + type: boolean jobs: build: + if: ${{ inputs.if }} strategy: fail-fast: false runs-on: ${{ inputs.os }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b2b542bccc6..6bba30f9d65 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,16 +32,16 @@ jobs: system: x86_64-linux build_aarch64-linux: - if: github.event_name == 'merge_group' uses: ./.github/workflows/build.yml with: + if: ${{ github.event_name == 'merge_group' }} os: blacksmith-32vcpu-ubuntu-2204-arm system: aarch64-linux build_x86_64-darwin: - if: github.event_name == 'merge_group' uses: ./.github/workflows/build.yml with: + if: ${{ github.event_name == 'merge_group' }} os: namespace-profile-mac-m2-12c28g system: x86_64-darwin From 42cb18970337d4b417b38fb8762a82c61eefcd52 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 28 Mar 2025 12:20:28 -0400 Subject: [PATCH 297/361] success/failure the vm checks --- .github/workflows/ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6bba30f9d65..bff8dcc4e8f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -59,6 +59,7 @@ jobs: system: x86_64-linux test_aarch64-linux: + if: success() || failure() uses: ./.github/workflows/test.yml needs: build_aarch64-linux with: @@ -67,6 +68,7 @@ jobs: system: aarch64-linux test_x86_64-darwin: + if: success() || failure() uses: ./.github/workflows/test.yml needs: build_x86_64-darwin with: From c134cf52dbae31e28b76f2472055d984280b63a0 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 28 Mar 2025 12:25:25 -0400 Subject: [PATCH 298/361] Collapse build / test into one .yml to make skips easier --- .github/workflows/build.yml | 17 ++++++++++++++ .github/workflows/ci.yml | 35 +--------------------------- .github/workflows/test.yml | 27 --------------------- .github/workflows/upload-release.yml | 4 ++++ 4 files changed, 22 insertions(+), 61 deletions(-) delete mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 84dbdfd79bf..607a31a6b0f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,6 +11,10 @@ on: required: false default: true type: boolean + run_tests: + required: false + default: true + type: boolean jobs: build: @@ -31,3 +35,16 @@ jobs: with: name: ${{ inputs.system }} path: ./tarball/*.xz + test: + if: ${{ inputs.if && inputs.run_tests}} + strategy: + fail-fast: false + runs-on: ${{ inputs.os }} + timeout-minutes: 60 + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + with: + determinate: true + - uses: DeterminateSystems/flakehub-cache-action@main + - run: nix flake check -L --system ${{ inputs.system }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bff8dcc4e8f..6c400f29bb3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -51,39 +51,6 @@ jobs: os: namespace-profile-mac-m2-12c28g system: aarch64-darwin - test_x86_64-linux: - uses: ./.github/workflows/test.yml - needs: build_x86_64-linux - with: - os: blacksmith-32vcpu-ubuntu-2204 - system: x86_64-linux - - test_aarch64-linux: - if: success() || failure() - uses: ./.github/workflows/test.yml - needs: build_aarch64-linux - with: - if: ${{ github.event_name == 'merge_group' }} - os: blacksmith-32vcpu-ubuntu-2204-arm - system: aarch64-linux - - test_x86_64-darwin: - if: success() || failure() - uses: ./.github/workflows/test.yml - needs: build_x86_64-darwin - with: - if: ${{ github.event_name == 'merge_group' }} - os: namespace-profile-mac-m2-12c28g - system: x86_64-darwin - - test_aarch64-darwin: - uses: ./.github/workflows/test.yml - needs: build_aarch64-darwin - with: - if: ${{ github.event_name == 'merge_group' }} - os: namespace-profile-mac-m2-12c28g - system: aarch64-darwin - vm_tests_smoke: if: github.event_name != 'merge_group' needs: build_x86_64-linux @@ -165,7 +132,7 @@ jobs: run: nix build .#hydraJobs.manual - uses: nwtgck/actions-netlify@v3.0 with: - publish-dir: './result/share/doc/nix/manual' + publish-dir: "./result/share/doc/nix/manual" production-branch: detsys-main github-token: ${{ secrets.GITHUB_TOKEN }} deploy-message: "Deploy from GitHub Actions" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 49af88020ac..00000000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,27 +0,0 @@ -on: - workflow_call: - inputs: - os: - required: true - type: string - system: - required: true - type: string - if: - required: false - default: true - type: boolean -jobs: - tests: - if: ${{ inputs.if }} - strategy: - fail-fast: false - runs-on: ${{ inputs.os }} - timeout-minutes: 60 - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - with: - flakehub: true - - uses: DeterminateSystems/flakehub-cache-action@main - - run: nix flake check -L --system ${{ inputs.system }} diff --git a/.github/workflows/upload-release.yml b/.github/workflows/upload-release.yml index f762446bda6..083f39dfd4b 100644 --- a/.github/workflows/upload-release.yml +++ b/.github/workflows/upload-release.yml @@ -30,24 +30,28 @@ jobs: with: os: blacksmith-32vcpu-ubuntu-2204 system: x86_64-linux + run_tests: false build-aarch64-linux: uses: ./.github/workflows/build.yml with: os: blacksmith-32vcpu-ubuntu-2204-arm system: aarch64-linux + run_tests: false build-x86_64-darwin: uses: ./.github/workflows/build.yml with: os: macos-13 system: x86_64-darwin + run_tests: false build-aarch64-darwin: uses: ./.github/workflows/build.yml with: os: macos-latest system: aarch64-darwin + run_tests: false release: runs-on: ubuntu-latest From 77c2ac633e100c94b10c7b28a12cd713252478a3 Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 28 Mar 2025 13:43:45 -0300 Subject: [PATCH 299/361] Use determinate param with nix-installer-action --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 10 +++++----- .github/workflows/test.yml | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f041267474c..49f9beba776 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -16,7 +16,7 @@ jobs: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main with: - flakehub: true + determinate: true - uses: DeterminateSystems/flakehub-cache-action@main - run: echo "system=$(nix eval --impure --raw --expr 'builtins.currentSystem')" >> "$GITHUB_OUTPUT" id: system diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b1fefc8df58..147d2526957 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,7 @@ jobs: fetch-depth: 0 - uses: DeterminateSystems/nix-installer-action@main with: - flakehub: true + determinate: true - run: nix flake show --all-systems --json build_x86_64-linux: @@ -82,7 +82,7 @@ jobs: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main with: - flakehub: true + determinate: true - uses: DeterminateSystems/flakehub-cache-action@main - run: | nix build -L \ @@ -100,7 +100,7 @@ jobs: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main with: - flakehub: true + determinate: true - uses: DeterminateSystems/flakehub-cache-action@main - run: | nix build -L --keep-going \ @@ -130,7 +130,7 @@ jobs: path: flake-regressions/tests - uses: DeterminateSystems/nix-installer-action@main with: - flakehub: true + determinate: true - uses: DeterminateSystems/flakehub-cache-action@main - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=50 flake-regressions/eval-all.sh @@ -149,7 +149,7 @@ jobs: uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main with: - flakehub: true + determinate: true - uses: DeterminateSystems/flakehub-cache-action@main - name: Build manual run: nix build .#hydraJobs.manual diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e58827a9c06..7b58c825f37 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -16,6 +16,6 @@ jobs: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main with: - flakehub: true + determinate: true - uses: DeterminateSystems/flakehub-cache-action@main - run: nix flake check -L From 6feccefc2d0347d100839e171bd027feb6e25b2e Mon Sep 17 00:00:00 2001 From: Luc Perkins Date: Fri, 28 Mar 2025 13:58:39 -0300 Subject: [PATCH 300/361] Remove test.yml --- .github/workflows/test.yml | 21 --------------------- 1 file changed, 21 deletions(-) delete mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 7b58c825f37..00000000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,21 +0,0 @@ -on: - workflow_call: - inputs: - os: - required: true - type: string - -jobs: - - tests: - strategy: - fail-fast: false - runs-on: ${{ inputs.os }} - timeout-minutes: 60 - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - with: - determinate: true - - uses: DeterminateSystems/flakehub-cache-action@main - - run: nix flake check -L From 5506428e679e9402fa835ba74c5d97e0f3dbcbdb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Feb 2025 01:42:29 +0100 Subject: [PATCH 301/361] Set path display for substituted inputs --- src/libfetchers/fetchers.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index abf021554e7..de1885db9ed 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -323,6 +323,8 @@ std::pair, Input> Input::getAccessorUnchecked(ref sto accessor->fingerprint = getFingerprint(store); + accessor->setPathDisplay("«" + to_string() + "»"); + return {accessor, *this}; } catch (Error & e) { debug("substitution of input '%s' failed: %s", to_string(), e.what()); From b28bc7ae6471e22354ebdfa3b32765b743cae6b6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Feb 2025 01:09:49 +0100 Subject: [PATCH 302/361] Make rootFS's showPath() render the paths from the original accessors This makes paths in error messages behave similar to lazy-trees, e.g. instead of store paths like error: attribute 'foobar' missing at /nix/store/ddzfiipzqlrh3gnprmqbadnsnrxsmc9i-source/machine/configuration.nix:209:7: 208| 209| pkgs.foobar | ^ 210| ]; you now get error: attribute 'foobar' missing at /home/eelco/Misc/eelco-configurations/machine/configuration.nix:209:7: 208| 209| pkgs.foobar | ^ 210| ]; --- src/libexpr/eval.cc | 32 +++++++++++++ src/libexpr/eval.hh | 10 ++++ src/libexpr/primops/fetchMercurial.cc | 2 +- src/libexpr/primops/fetchTree.cc | 4 +- src/libfetchers/fetchers.cc | 32 ++++++------- src/libfetchers/fetchers.hh | 2 +- src/libflake/flake/flake.cc | 2 + src/libutil/forwarding-source-accessor.hh | 57 +++++++++++++++++++++++ src/libutil/meson.build | 1 + src/nix/flake.cc | 2 +- 10 files changed, 122 insertions(+), 22 deletions(-) create mode 100644 src/libutil/forwarding-source-accessor.hh diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 4e15175ac2d..fcfee2d293c 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -14,6 +14,7 @@ #include "profiles.hh" #include "print.hh" #include "filtering-source-accessor.hh" +#include "forwarding-source-accessor.hh" #include "memory-source-accessor.hh" #include "gc-small-vector.hh" #include "url.hh" @@ -180,6 +181,34 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env) } } +struct PathDisplaySourceAccessor : ForwardingSourceAccessor +{ + ref storePathAccessors; + + PathDisplaySourceAccessor( + ref next, + ref storePathAccessors) + : ForwardingSourceAccessor(next) + , storePathAccessors(storePathAccessors) + { + } + + std::string showPath(const CanonPath & path) override + { + /* Find the accessor that produced `path`, if any, and use it + to render a more informative path + (e.g. `«github:foo/bar»/flake.nix` rather than + `/nix/store/hash.../flake.nix`). */ + auto ub = storePathAccessors->upper_bound(path); + if (ub != storePathAccessors->begin()) + ub--; + if (ub != storePathAccessors->end() && path.isWithin(ub->first)) + return ub->second->showPath(path.removePrefix(ub->first)); + else + return next->showPath(path); + } +}; + static constexpr size_t BASE_ENV_SIZE = 128; EvalState::EvalState( @@ -245,6 +274,7 @@ EvalState::EvalState( } , repair(NoRepair) , emptyBindings(0) + , storePathAccessors(make_ref()) , rootFS( ({ /* In pure eval mode, we provide a filesystem that only @@ -270,6 +300,8 @@ EvalState::EvalState( : makeUnionSourceAccessor({accessor, storeFS}); } + accessor = make_ref(accessor, storePathAccessors); + /* Apply access control if needed. */ if (settings.restrictEval || settings.pureEval) accessor = AllowListSourceAccessor::create(accessor, {}, diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index eb6f667a253..3797c40a43c 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -262,6 +262,16 @@ public: /** `"unknown"` */ Value vStringUnknown; + using StorePathAccessors = std::map>; + + /** + * A map back to the original `SourceAccessor`s used to produce + * store paths. We keep track of this to produce error messages + * that refer to the original flakerefs. + * FIXME: use Sync. + */ + ref storePathAccessors; + /** * The accessor for the root filesystem. */ diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 64e3abf2db4..96800d9efa9 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -64,7 +64,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a if (rev) attrs.insert_or_assign("rev", rev->gitRev()); auto input = fetchers::Input::fromAttrs(state.fetchSettings, std::move(attrs)); - auto [storePath, input2] = input.fetchToStore(state.store); + auto [storePath, accessor, input2] = input.fetchToStore(state.store); auto attrs2 = state.buildBindings(8); state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath)); diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 0c82c82bfab..8bbc435e440 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -200,10 +200,12 @@ static void fetchTree( throw Error("input '%s' is not allowed to use the '__final' attribute", input.to_string()); } - auto [storePath, input2] = input.fetchToStore(state.store); + auto [storePath, accessor, input2] = input.fetchToStore(state.store); state.allowPath(storePath); + state.storePathAccessors->insert_or_assign(CanonPath(state.store->printStorePath(storePath)), accessor); + emitTreeAttrs(state, storePath, input2, v, params.emptyRevFallback, false); } diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index de1885db9ed..67728501e6e 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -187,34 +187,30 @@ bool Input::contains(const Input & other) const } // FIXME: remove -std::pair Input::fetchToStore(ref store) const +std::tuple, Input> Input::fetchToStore(ref store) const { if (!scheme) throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs())); - auto [storePath, input] = [&]() -> std::pair { - try { - auto [accessor, result] = getAccessorUnchecked(store); - - auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, result.getName()); + try { + auto [accessor, result] = getAccessorUnchecked(store); - auto narHash = store->queryPathInfo(storePath)->narHash; - result.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); + auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, result.getName()); - result.attrs.insert_or_assign("__final", Explicit(true)); + auto narHash = store->queryPathInfo(storePath)->narHash; + result.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); - assert(result.isFinal()); + result.attrs.insert_or_assign("__final", Explicit(true)); - checkLocks(*this, result); + assert(result.isFinal()); - return {storePath, result}; - } catch (Error & e) { - e.addTrace({}, "while fetching the input '%s'", to_string()); - throw; - } - }(); + checkLocks(*this, result); - return {std::move(storePath), input}; + return {std::move(storePath), accessor, result}; + } catch (Error & e) { + e.addTrace({}, "while fetching the input '%s'", to_string()); + throw; + } } void Input::checkLocks(Input specified, Input & result) diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index 01354a6e38d..798d60177f0 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -121,7 +121,7 @@ public: * Fetch the entire input into the Nix store, returning the * location in the Nix store and the locked input. */ - std::pair fetchToStore(ref store) const; + std::tuple, Input> fetchToStore(ref store) const; /** * Check the locking attributes in `result` against diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index b678d5b6450..a14b55c6ae8 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -92,6 +92,8 @@ static StorePath copyInputToStore( state.allowPath(storePath); + state.storePathAccessors->insert_or_assign(CanonPath(state.store->printStorePath(storePath)), accessor); + auto narHash = state.store->queryPathInfo(storePath)->narHash; input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); diff --git a/src/libutil/forwarding-source-accessor.hh b/src/libutil/forwarding-source-accessor.hh new file mode 100644 index 00000000000..bdba2addcb0 --- /dev/null +++ b/src/libutil/forwarding-source-accessor.hh @@ -0,0 +1,57 @@ +#pragma once + +#include "source-accessor.hh" + +namespace nix { + +/** + * A source accessor that just forwards every operation to another + * accessor. This is not useful in itself but can be used as a + * superclass for accessors that do change some operations. + */ +struct ForwardingSourceAccessor : SourceAccessor +{ + ref next; + + ForwardingSourceAccessor(ref next) + : next(next) + { + } + + std::string readFile(const CanonPath & path) override + { + return next->readFile(path); + } + + void readFile(const CanonPath & path, Sink & sink, std::function sizeCallback) override + { + next->readFile(path, sink, sizeCallback); + } + + std::optional maybeLstat(const CanonPath & path) override + { + return next->maybeLstat(path); + } + + DirEntries readDirectory(const CanonPath & path) override + { + return next->readDirectory(path); + } + + std::string readLink(const CanonPath & path) override + { + return next->readLink(path); + } + + std::string showPath(const CanonPath & path) override + { + return next->showPath(path); + } + + std::optional getPhysicalPath(const CanonPath & path) override + { + return next->getPhysicalPath(path); + } +}; + +} diff --git a/src/libutil/meson.build b/src/libutil/meson.build index ab8f8f4db74..b2bc0b4ec60 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -215,6 +215,7 @@ headers = [config_h] + files( 'file-system.hh', 'finally.hh', 'fmt.hh', + 'forwarding-source-accessor.hh', 'fs-sink.hh', 'git.hh', 'hash.hh', diff --git a/src/nix/flake.cc b/src/nix/flake.cc index cbd412547cf..9ffe65b0694 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -1095,7 +1095,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun storePath = dryRun ? (*inputNode)->lockedRef.input.computeStorePath(*store) - : (*inputNode)->lockedRef.input.fetchToStore(store).first; + : std::get<0>((*inputNode)->lockedRef.input.fetchToStore(store)); sources.insert(*storePath); } if (json) { From 3f0a8241fcf0bd66a169cd845410e6a0a1d25b70 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 1 Apr 2025 13:58:08 +0200 Subject: [PATCH 303/361] Fix path display of empty Git repos --- src/libfetchers/git-utils.cc | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index a2761a543ee..6b9d1bce614 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -1221,15 +1221,18 @@ ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool export since that would allow access to all its children). */ ref fileAccessor = wd.files.empty() - ? makeEmptySourceAccessor() + ? ({ + auto empty = makeEmptySourceAccessor(); + empty->setPathDisplay(path.string()); + empty; + }) : AllowListSourceAccessor::create( makeFSSourceAccessor(path), std::set { wd.files }, std::move(makeNotAllowedError)).cast(); if (exportIgnore) - return make_ref(self, fileAccessor, std::nullopt); - else - return fileAccessor; + fileAccessor = make_ref(self, fileAccessor, std::nullopt); + return fileAccessor; } ref GitRepoImpl::getFileSystemObjectSink() From b2038f120cf106984853bbfd2af5ff4cb7ca0943 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 1 Apr 2025 13:58:56 +0200 Subject: [PATCH 304/361] Add test for source path display --- tests/functional/flakes/meson.build | 3 ++- tests/functional/flakes/source-paths.sh | 30 +++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 tests/functional/flakes/source-paths.sh diff --git a/tests/functional/flakes/meson.build b/tests/functional/flakes/meson.build index 74ff3d91d80..b8c650db403 100644 --- a/tests/functional/flakes/meson.build +++ b/tests/functional/flakes/meson.build @@ -29,7 +29,8 @@ suites += { 'non-flake-inputs.sh', 'relative-paths.sh', 'symlink-paths.sh', - 'debugger.sh' + 'debugger.sh', + 'source-paths.sh', ], 'workdir': meson.current_source_dir(), } diff --git a/tests/functional/flakes/source-paths.sh b/tests/functional/flakes/source-paths.sh new file mode 100644 index 00000000000..a3ebf4e3aac --- /dev/null +++ b/tests/functional/flakes/source-paths.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash + +source ./common.sh + +requireGit + +repo=$TEST_ROOT/repo + +createGitRepo "$repo" + +cat > "$repo/flake.nix" < Date: Mon, 31 Mar 2025 21:35:15 -0400 Subject: [PATCH 305/361] Improve and fix the error message when a file is not tracked by Git --- src/libfetchers/git.cc | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index f46334d3074..5684583cdc5 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -534,11 +534,21 @@ struct GitInputScheme : InputScheme static MakeNotAllowedError makeNotAllowedError(std::string url) { - return [url{std::move(url)}](const CanonPath & path) -> RestrictedPathError - { - if (nix::pathExists(path.abs())) - return RestrictedPathError("access to path '%s' is forbidden because it is not under Git control; maybe you should 'git add' it to the repository '%s'?", path, url); - else + return [url{std::move(url)}](const CanonPath & path) -> RestrictedPathError { + if (nix::pathExists(url + "/" + path.abs())) { + auto relativePath = path.rel(); // .makeRelative(CanonPath("/")); + + return RestrictedPathError( + "'%s' is not tracked by Git.\n" + "\n" + "To use '%s', stage it in the Git repository at '%s':\n" + "\n" + "git add %s", + relativePath, + relativePath, + url, + relativePath); + } else return RestrictedPathError("path '%s' does not exist in Git repository '%s'", path, url); }; } From 002faa3d1c6d3f728dc300b321ececb3a5166a02 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 1 Apr 2025 15:14:20 +0200 Subject: [PATCH 306/361] Tweak error message --- src/libfetchers/git.cc | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 5684583cdc5..6b82d9ae38b 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -532,24 +532,20 @@ struct GitInputScheme : InputScheme return *head; } - static MakeNotAllowedError makeNotAllowedError(std::string url) + static MakeNotAllowedError makeNotAllowedError(std::filesystem::path repoPath) { - return [url{std::move(url)}](const CanonPath & path) -> RestrictedPathError { - if (nix::pathExists(url + "/" + path.abs())) { - auto relativePath = path.rel(); // .makeRelative(CanonPath("/")); - + return [repoPath{std::move(repoPath)}](const CanonPath & path) -> RestrictedPathError { + if (nix::pathExists(repoPath / path.rel())) return RestrictedPathError( - "'%s' is not tracked by Git.\n" + "File '%1%' in the repository %2% is not tracked by Git.\n" "\n" - "To use '%s', stage it in the Git repository at '%s':\n" + "To make it visible to Nix, run:\n" "\n" - "git add %s", - relativePath, - relativePath, - url, - relativePath); - } else - return RestrictedPathError("path '%s' does not exist in Git repository '%s'", path, url); + "git -C %2% add \"%1%\"", + path.rel(), + repoPath); + else + return RestrictedPathError("path '%s' does not exist in Git repository %s", path, repoPath); }; } @@ -757,7 +753,7 @@ struct GitInputScheme : InputScheme ref accessor = repo->getAccessor(repoInfo.workdirInfo, exportIgnore, - makeNotAllowedError(repoInfo.locationToArg())); + makeNotAllowedError(repoPath)); /* If the repo has submodules, return a mounted input accessor consisting of the accessor for the top-level repo and the From fcddf4afe3b22e31c65780a3c62c6d73d178a086 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 1 Apr 2025 15:19:46 +0200 Subject: [PATCH 307/361] Apply makeNotAllowedError to empty repos --- src/libexpr/eval.cc | 2 +- src/libfetchers/filtering-source-accessor.cc | 14 ++++++++++++-- src/libfetchers/filtering-source-accessor.hh | 3 +++ src/libfetchers/git-utils.cc | 16 ++++------------ tests/functional/flakes/source-paths.sh | 2 +- 5 files changed, 21 insertions(+), 16 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index fcfee2d293c..18b8c2f913e 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -304,7 +304,7 @@ EvalState::EvalState( /* Apply access control if needed. */ if (settings.restrictEval || settings.pureEval) - accessor = AllowListSourceAccessor::create(accessor, {}, + accessor = AllowListSourceAccessor::create(accessor, {}, {}, [&settings](const CanonPath & path) -> RestrictedPathError { auto modeInformation = settings.pureEval ? "in pure evaluation mode (use '--impure' to override)" diff --git a/src/libfetchers/filtering-source-accessor.cc b/src/libfetchers/filtering-source-accessor.cc index d4557b6d4dd..c6a00faef01 100644 --- a/src/libfetchers/filtering-source-accessor.cc +++ b/src/libfetchers/filtering-source-accessor.cc @@ -58,18 +58,23 @@ void FilteringSourceAccessor::checkAccess(const CanonPath & path) struct AllowListSourceAccessorImpl : AllowListSourceAccessor { std::set allowedPrefixes; + std::unordered_set allowedPaths; AllowListSourceAccessorImpl( ref next, std::set && allowedPrefixes, + std::unordered_set && allowedPaths, MakeNotAllowedError && makeNotAllowedError) : AllowListSourceAccessor(SourcePath(next), std::move(makeNotAllowedError)) , allowedPrefixes(std::move(allowedPrefixes)) + , allowedPaths(std::move(allowedPaths)) { } bool isAllowed(const CanonPath & path) override { - return path.isAllowed(allowedPrefixes); + return + allowedPaths.contains(path) + || path.isAllowed(allowedPrefixes); } void allowPrefix(CanonPath prefix) override @@ -81,9 +86,14 @@ struct AllowListSourceAccessorImpl : AllowListSourceAccessor ref AllowListSourceAccessor::create( ref next, std::set && allowedPrefixes, + std::unordered_set && allowedPaths, MakeNotAllowedError && makeNotAllowedError) { - return make_ref(next, std::move(allowedPrefixes), std::move(makeNotAllowedError)); + return make_ref( + next, + std::move(allowedPrefixes), + std::move(allowedPaths), + std::move(makeNotAllowedError)); } bool CachingFilteringSourceAccessor::isAllowed(const CanonPath & path) diff --git a/src/libfetchers/filtering-source-accessor.hh b/src/libfetchers/filtering-source-accessor.hh index 1f8d84e531e..41889cfd7d2 100644 --- a/src/libfetchers/filtering-source-accessor.hh +++ b/src/libfetchers/filtering-source-accessor.hh @@ -2,6 +2,8 @@ #include "source-path.hh" +#include + namespace nix { /** @@ -70,6 +72,7 @@ struct AllowListSourceAccessor : public FilteringSourceAccessor static ref create( ref next, std::set && allowedPrefixes, + std::unordered_set && allowedPaths, MakeNotAllowedError && makeNotAllowedError); using FilteringSourceAccessor::FilteringSourceAccessor; diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 6b9d1bce614..6fa33e1305d 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -1215,20 +1215,12 @@ ref GitRepoImpl::getAccessor( ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) { auto self = ref(shared_from_this()); - /* In case of an empty workdir, return an empty in-memory tree. We - cannot use AllowListSourceAccessor because it would return an - error for the root (and we can't add the root to the allow-list - since that would allow access to all its children). */ ref fileAccessor = - wd.files.empty() - ? ({ - auto empty = makeEmptySourceAccessor(); - empty->setPathDisplay(path.string()); - empty; - }) - : AllowListSourceAccessor::create( + AllowListSourceAccessor::create( makeFSSourceAccessor(path), - std::set { wd.files }, + std::set{ wd.files }, + // Always allow access to the root, but not its children. + std::unordered_set{CanonPath::root}, std::move(makeNotAllowedError)).cast(); if (exportIgnore) fileAccessor = make_ref(self, fileAccessor, std::nullopt); diff --git a/tests/functional/flakes/source-paths.sh b/tests/functional/flakes/source-paths.sh index a3ebf4e3aac..1eb8d618d11 100644 --- a/tests/functional/flakes/source-paths.sh +++ b/tests/functional/flakes/source-paths.sh @@ -17,7 +17,7 @@ cat > "$repo/flake.nix" < Date: Tue, 1 Apr 2025 17:29:15 +0200 Subject: [PATCH 308/361] Mount flake input source accessors on top of storeFS This way, we don't need the PathDisplaySourceAccessor source accessor hack, since error messages are produced directly by the original source accessor. In fact, we don't even need to copy the inputs to the store at all, so this gets us very close to lazy trees. We just need to know the store path so that requires hashing the entire input, which isn't lazy. But the next step will be to use a virtual store path that gets rewritten to the actual store path only when needed. --- src/libexpr/eval.cc | 46 +++------------- src/libexpr/eval.hh | 10 ++-- src/libexpr/primops/fetchTree.cc | 3 +- src/libfetchers/filtering-source-accessor.cc | 7 ++- src/libfetchers/filtering-source-accessor.hh | 2 + src/libfetchers/git.cc | 1 + src/libflake/flake/flake.cc | 3 +- src/libutil/forwarding-source-accessor.hh | 57 -------------------- src/libutil/meson.build | 2 +- src/libutil/mounted-source-accessor.cc | 16 ++++-- src/libutil/mounted-source-accessor.hh | 14 +++++ src/libutil/source-accessor.hh | 4 +- tests/functional/flakes/source-paths.sh | 12 +++++ tests/functional/restricted.sh | 6 +-- 14 files changed, 66 insertions(+), 117 deletions(-) delete mode 100644 src/libutil/forwarding-source-accessor.hh create mode 100644 src/libutil/mounted-source-accessor.hh diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 18b8c2f913e..0ad12b9b5be 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -14,8 +14,8 @@ #include "profiles.hh" #include "print.hh" #include "filtering-source-accessor.hh" -#include "forwarding-source-accessor.hh" #include "memory-source-accessor.hh" +#include "mounted-source-accessor.hh" #include "gc-small-vector.hh" #include "url.hh" #include "fetch-to-store.hh" @@ -181,34 +181,6 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env) } } -struct PathDisplaySourceAccessor : ForwardingSourceAccessor -{ - ref storePathAccessors; - - PathDisplaySourceAccessor( - ref next, - ref storePathAccessors) - : ForwardingSourceAccessor(next) - , storePathAccessors(storePathAccessors) - { - } - - std::string showPath(const CanonPath & path) override - { - /* Find the accessor that produced `path`, if any, and use it - to render a more informative path - (e.g. `«github:foo/bar»/flake.nix` rather than - `/nix/store/hash.../flake.nix`). */ - auto ub = storePathAccessors->upper_bound(path); - if (ub != storePathAccessors->begin()) - ub--; - if (ub != storePathAccessors->end() && path.isWithin(ub->first)) - return ub->second->showPath(path.removePrefix(ub->first)); - else - return next->showPath(path); - } -}; - static constexpr size_t BASE_ENV_SIZE = 128; EvalState::EvalState( @@ -274,7 +246,12 @@ EvalState::EvalState( } , repair(NoRepair) , emptyBindings(0) - , storePathAccessors(make_ref()) + , storeFS( + makeMountedSourceAccessor( + { + {CanonPath::root, makeEmptySourceAccessor()}, + {CanonPath(store->storeDir), makeFSSourceAccessor(dirOf(store->toRealPath(StorePath::dummy)))} + })) , rootFS( ({ /* In pure eval mode, we provide a filesystem that only @@ -290,18 +267,11 @@ EvalState::EvalState( auto realStoreDir = dirOf(store->toRealPath(StorePath::dummy)); if (settings.pureEval || store->storeDir != realStoreDir) { - auto storeFS = makeMountedSourceAccessor( - { - {CanonPath::root, makeEmptySourceAccessor()}, - {CanonPath(store->storeDir), makeFSSourceAccessor(realStoreDir)} - }); accessor = settings.pureEval - ? storeFS + ? storeFS.cast() : makeUnionSourceAccessor({accessor, storeFS}); } - accessor = make_ref(accessor, storePathAccessors); - /* Apply access control if needed. */ if (settings.restrictEval || settings.pureEval) accessor = AllowListSourceAccessor::create(accessor, {}, {}, diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 3797c40a43c..4ae73de57f3 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -37,6 +37,7 @@ class StorePath; struct SingleDerivedPath; enum RepairFlag : bool; struct MemorySourceAccessor; +struct MountedSourceAccessor; namespace eval_cache { class EvalCache; } @@ -262,15 +263,10 @@ public: /** `"unknown"` */ Value vStringUnknown; - using StorePathAccessors = std::map>; - /** - * A map back to the original `SourceAccessor`s used to produce - * store paths. We keep track of this to produce error messages - * that refer to the original flakerefs. - * FIXME: use Sync. + * The accessor corresponding to `store`. */ - ref storePathAccessors; + const ref storeFS; /** * The accessor for the root filesystem. diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 8bbc435e440..f5ca5fd3e0b 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -10,6 +10,7 @@ #include "url.hh" #include "value-to-json.hh" #include "fetch-to-store.hh" +#include "mounted-source-accessor.hh" #include @@ -204,7 +205,7 @@ static void fetchTree( state.allowPath(storePath); - state.storePathAccessors->insert_or_assign(CanonPath(state.store->printStorePath(storePath)), accessor); + state.storeFS->mount(CanonPath(state.store->printStorePath(storePath)), accessor); emitTreeAttrs(state, storePath, input2, v, params.emptyRevFallback, false); } diff --git a/src/libfetchers/filtering-source-accessor.cc b/src/libfetchers/filtering-source-accessor.cc index c6a00faef01..10a22d0265c 100644 --- a/src/libfetchers/filtering-source-accessor.cc +++ b/src/libfetchers/filtering-source-accessor.cc @@ -20,9 +20,14 @@ bool FilteringSourceAccessor::pathExists(const CanonPath & path) } std::optional FilteringSourceAccessor::maybeLstat(const CanonPath & path) +{ + return isAllowed(path) ? next->maybeLstat(prefix / path) : std::nullopt; +} + +SourceAccessor::Stat FilteringSourceAccessor::lstat(const CanonPath & path) { checkAccess(path); - return next->maybeLstat(prefix / path); + return next->lstat(prefix / path); } SourceAccessor::DirEntries FilteringSourceAccessor::readDirectory(const CanonPath & path) diff --git a/src/libfetchers/filtering-source-accessor.hh b/src/libfetchers/filtering-source-accessor.hh index 41889cfd7d2..544b4a490e7 100644 --- a/src/libfetchers/filtering-source-accessor.hh +++ b/src/libfetchers/filtering-source-accessor.hh @@ -38,6 +38,8 @@ struct FilteringSourceAccessor : SourceAccessor bool pathExists(const CanonPath & path) override; + Stat lstat(const CanonPath & path) override; + std::optional maybeLstat(const CanonPath & path) override; DirEntries readDirectory(const CanonPath & path) override; diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 6b82d9ae38b..54c66d151a2 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -15,6 +15,7 @@ #include "fetch-settings.hh" #include "json-utils.hh" #include "archive.hh" +#include "mounted-source-accessor.hh" #include #include diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index a14b55c6ae8..aa022979323 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -13,6 +13,7 @@ #include "value-to-json.hh" #include "local-fs-store.hh" #include "fetch-to-store.hh" +#include "mounted-source-accessor.hh" #include @@ -92,7 +93,7 @@ static StorePath copyInputToStore( state.allowPath(storePath); - state.storePathAccessors->insert_or_assign(CanonPath(state.store->printStorePath(storePath)), accessor); + state.storeFS->mount(CanonPath(state.store->printStorePath(storePath)), accessor); auto narHash = state.store->queryPathInfo(storePath)->narHash; input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); diff --git a/src/libutil/forwarding-source-accessor.hh b/src/libutil/forwarding-source-accessor.hh deleted file mode 100644 index bdba2addcb0..00000000000 --- a/src/libutil/forwarding-source-accessor.hh +++ /dev/null @@ -1,57 +0,0 @@ -#pragma once - -#include "source-accessor.hh" - -namespace nix { - -/** - * A source accessor that just forwards every operation to another - * accessor. This is not useful in itself but can be used as a - * superclass for accessors that do change some operations. - */ -struct ForwardingSourceAccessor : SourceAccessor -{ - ref next; - - ForwardingSourceAccessor(ref next) - : next(next) - { - } - - std::string readFile(const CanonPath & path) override - { - return next->readFile(path); - } - - void readFile(const CanonPath & path, Sink & sink, std::function sizeCallback) override - { - next->readFile(path, sink, sizeCallback); - } - - std::optional maybeLstat(const CanonPath & path) override - { - return next->maybeLstat(path); - } - - DirEntries readDirectory(const CanonPath & path) override - { - return next->readDirectory(path); - } - - std::string readLink(const CanonPath & path) override - { - return next->readLink(path); - } - - std::string showPath(const CanonPath & path) override - { - return next->showPath(path); - } - - std::optional getPhysicalPath(const CanonPath & path) override - { - return next->getPhysicalPath(path); - } -}; - -} diff --git a/src/libutil/meson.build b/src/libutil/meson.build index b2bc0b4ec60..f698f04dd98 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -215,7 +215,6 @@ headers = [config_h] + files( 'file-system.hh', 'finally.hh', 'fmt.hh', - 'forwarding-source-accessor.hh', 'fs-sink.hh', 'git.hh', 'hash.hh', @@ -225,6 +224,7 @@ headers = [config_h] + files( 'logging.hh', 'lru-cache.hh', 'memory-source-accessor.hh', + 'mounted-source-accessor.hh', 'muxable-pipe.hh', 'os-string.hh', 'pool.hh', diff --git a/src/libutil/mounted-source-accessor.cc b/src/libutil/mounted-source-accessor.cc index 79223d15573..e1442d686dd 100644 --- a/src/libutil/mounted-source-accessor.cc +++ b/src/libutil/mounted-source-accessor.cc @@ -1,12 +1,12 @@ -#include "source-accessor.hh" +#include "mounted-source-accessor.hh" namespace nix { -struct MountedSourceAccessor : SourceAccessor +struct MountedSourceAccessorImpl : MountedSourceAccessor { std::map> mounts; - MountedSourceAccessor(std::map> _mounts) + MountedSourceAccessorImpl(std::map> _mounts) : mounts(std::move(_mounts)) { displayPrefix.clear(); @@ -69,11 +69,17 @@ struct MountedSourceAccessor : SourceAccessor auto [accessor, subpath] = resolve(path); return accessor->getPhysicalPath(subpath); } + + void mount(CanonPath mountPoint, ref accessor) override + { + // FIXME: thread-safety + mounts.insert_or_assign(std::move(mountPoint), accessor); + } }; -ref makeMountedSourceAccessor(std::map> mounts) +ref makeMountedSourceAccessor(std::map> mounts) { - return make_ref(std::move(mounts)); + return make_ref(std::move(mounts)); } } diff --git a/src/libutil/mounted-source-accessor.hh b/src/libutil/mounted-source-accessor.hh new file mode 100644 index 00000000000..4e75edfafff --- /dev/null +++ b/src/libutil/mounted-source-accessor.hh @@ -0,0 +1,14 @@ +#pragma once + +#include "source-accessor.hh" + +namespace nix { + +struct MountedSourceAccessor : SourceAccessor +{ + virtual void mount(CanonPath mountPoint, ref accessor) = 0; +}; + +ref makeMountedSourceAccessor(std::map> mounts); + +} diff --git a/src/libutil/source-accessor.hh b/src/libutil/source-accessor.hh index 79ae092ac18..a069e024df1 100644 --- a/src/libutil/source-accessor.hh +++ b/src/libutil/source-accessor.hh @@ -118,7 +118,7 @@ struct SourceAccessor : std::enable_shared_from_this std::string typeString(); }; - Stat lstat(const CanonPath & path); + virtual Stat lstat(const CanonPath & path); virtual std::optional maybeLstat(const CanonPath & path) = 0; @@ -214,8 +214,6 @@ ref getFSSourceAccessor(); */ ref makeFSSourceAccessor(std::filesystem::path root); -ref makeMountedSourceAccessor(std::map> mounts); - /** * Construct an accessor that presents a "union" view of a vector of * underlying accessors. Earlier accessors take precedence over later. diff --git a/tests/functional/flakes/source-paths.sh b/tests/functional/flakes/source-paths.sh index 1eb8d618d11..10b834bc8fa 100644 --- a/tests/functional/flakes/source-paths.sh +++ b/tests/functional/flakes/source-paths.sh @@ -13,6 +13,7 @@ cat > "$repo/flake.nix" < "$repo/foo" + +expectStderr 1 nix eval "$repo#z" | grepQuiet "error: File 'foo' in the repository \"$repo\" is not tracked by Git." + +git -C "$repo" add "$repo/foo" + +[[ $(nix eval --raw "$repo#z") = foo ]] diff --git a/tests/functional/restricted.sh b/tests/functional/restricted.sh index 00ee4ddc8c2..bc42ec891d1 100755 --- a/tests/functional/restricted.sh +++ b/tests/functional/restricted.sh @@ -23,7 +23,7 @@ nix-instantiate --restrict-eval ./simple.nix -I src1=./simple.nix -I src2=./conf (! nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix') nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix' -I src=../.. -expectStderr 1 nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' | grepQuiet "forbidden in restricted mode" +expectStderr 1 nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' #| grepQuiet "forbidden in restricted mode" nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' -I src=. p=$(nix eval --raw --expr "builtins.fetchurl file://${_NIX_TEST_SOURCE_DIR}/restricted.sh" --impure --restrict-eval --allowed-uris "file://${_NIX_TEST_SOURCE_DIR}") @@ -53,9 +53,9 @@ mkdir -p $TEST_ROOT/tunnel.d $TEST_ROOT/foo2 ln -sfn .. $TEST_ROOT/tunnel.d/tunnel echo foo > $TEST_ROOT/bar -expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readFile " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" +expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readFile " -I $TEST_ROOT/tunnel.d #| grepQuiet "forbidden in restricted mode" -expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" +expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d #| grepQuiet "forbidden in restricted mode" # Reading the parents of allowed paths should show only the ancestors of the allowed paths. [[ $(nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d) == '{ "tunnel.d" = "directory"; }' ]] From 5b079073c1639ebc8ddf3eef2f34d7397c94cb91 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 1 Apr 2025 18:34:08 +0200 Subject: [PATCH 309/361] Add FIXME --- src/libflake/flake/flake.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index aa022979323..d61210670c6 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -91,7 +91,7 @@ static StorePath copyInputToStore( { auto storePath = fetchToStore(*state.store, accessor, FetchMode::Copy, input.getName()); - state.allowPath(storePath); + state.allowPath(storePath); // FIXME: should just whitelist the entire virtual store state.storeFS->mount(CanonPath(state.store->printStorePath(storePath)), accessor); From 1564c8f9d90017ef446815d8aadbf28aaf5a5e81 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 1 Apr 2025 18:37:21 +0200 Subject: [PATCH 310/361] Fix missing file error messages from 'import' --- src/libutil/mounted-source-accessor.cc | 6 ++++++ tests/functional/flakes/source-paths.sh | 19 +++++++++++++++++-- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/src/libutil/mounted-source-accessor.cc b/src/libutil/mounted-source-accessor.cc index e1442d686dd..c21a7104775 100644 --- a/src/libutil/mounted-source-accessor.cc +++ b/src/libutil/mounted-source-accessor.cc @@ -23,6 +23,12 @@ struct MountedSourceAccessorImpl : MountedSourceAccessor return accessor->readFile(subpath); } + Stat lstat(const CanonPath & path) override + { + auto [accessor, subpath] = resolve(path); + return accessor->lstat(subpath); + } + std::optional maybeLstat(const CanonPath & path) override { auto [accessor, subpath] = resolve(path); diff --git a/tests/functional/flakes/source-paths.sh b/tests/functional/flakes/source-paths.sh index 10b834bc8fa..e82d27c814d 100644 --- a/tests/functional/flakes/source-paths.sh +++ b/tests/functional/flakes/source-paths.sh @@ -14,6 +14,8 @@ cat > "$repo/flake.nix" < "$repo/foo" +echo 123 > "$repo/foo" expectStderr 1 nix eval "$repo#z" | grepQuiet "error: File 'foo' in the repository \"$repo\" is not tracked by Git." +expectStderr 1 nix eval "$repo#a" | grepQuiet "error: File 'foo' in the repository \"$repo\" is not tracked by Git." git -C "$repo" add "$repo/foo" -[[ $(nix eval --raw "$repo#z") = foo ]] +[[ $(nix eval --raw "$repo#z") = 123 ]] + +expectStderr 1 nix eval "$repo#b" | grepQuiet "error: path '/dir' does not exist in Git repository \"$repo\"" + +mkdir -p $repo/dir +echo 456 > $repo/dir/default.nix + +expectStderr 1 nix eval "$repo#b" | grepQuiet "error: File 'dir' in the repository \"$repo\" is not tracked by Git." + +git -C "$repo" add "$repo/dir/default.nix" + +[[ $(nix eval "$repo#b") = 456 ]] From 4e0346dcc15d7ffd8795e6364e2b81f29412f201 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 1 Apr 2025 20:46:26 +0200 Subject: [PATCH 311/361] Restore 'forbidden in restricted mode' errors --- src/libexpr/eval.cc | 10 ++++++++++ tests/functional/restricted.sh | 6 +++--- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 0ad12b9b5be..9b9aabf7e6e 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -3073,6 +3073,11 @@ SourcePath EvalState::findFile(const LookupPath & lookupPath, const std::string_ auto res = (r / CanonPath(suffix)).resolveSymlinks(); if (res.pathExists()) return res; + + // Backward compatibility hack: throw an exception if access + // to this path is not allowed. + if (auto accessor = res.accessor.dynamic_pointer_cast()) + accessor->checkAccess(res.path); } if (hasPrefix(path, "nix/")) @@ -3143,6 +3148,11 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pat if (path.resolveSymlinks().pathExists()) return finish(std::move(path)); else { + // Backward compatibility hack: throw an exception if access + // to this path is not allowed. + if (auto accessor = path.accessor.dynamic_pointer_cast()) + accessor->checkAccess(path.path); + logWarning({ .msg = HintFmt("Nix search path entry '%1%' does not exist, ignoring", value) }); diff --git a/tests/functional/restricted.sh b/tests/functional/restricted.sh index bc42ec891d1..00ee4ddc8c2 100755 --- a/tests/functional/restricted.sh +++ b/tests/functional/restricted.sh @@ -23,7 +23,7 @@ nix-instantiate --restrict-eval ./simple.nix -I src1=./simple.nix -I src2=./conf (! nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix') nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix' -I src=../.. -expectStderr 1 nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' #| grepQuiet "forbidden in restricted mode" +expectStderr 1 nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' | grepQuiet "forbidden in restricted mode" nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' -I src=. p=$(nix eval --raw --expr "builtins.fetchurl file://${_NIX_TEST_SOURCE_DIR}/restricted.sh" --impure --restrict-eval --allowed-uris "file://${_NIX_TEST_SOURCE_DIR}") @@ -53,9 +53,9 @@ mkdir -p $TEST_ROOT/tunnel.d $TEST_ROOT/foo2 ln -sfn .. $TEST_ROOT/tunnel.d/tunnel echo foo > $TEST_ROOT/bar -expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readFile " -I $TEST_ROOT/tunnel.d #| grepQuiet "forbidden in restricted mode" +expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readFile " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" -expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d #| grepQuiet "forbidden in restricted mode" +expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" # Reading the parents of allowed paths should show only the ancestors of the allowed paths. [[ $(nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d) == '{ "tunnel.d" = "directory"; }' ]] From 25262931711b64b3e5c1067a66b8f6b15872e61d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 1 Apr 2025 20:52:27 +0200 Subject: [PATCH 312/361] shellcheck --- tests/functional/flakes/source-paths.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/functional/flakes/source-paths.sh b/tests/functional/flakes/source-paths.sh index e82d27c814d..5318806ceac 100644 --- a/tests/functional/flakes/source-paths.sh +++ b/tests/functional/flakes/source-paths.sh @@ -47,8 +47,8 @@ git -C "$repo" add "$repo/foo" expectStderr 1 nix eval "$repo#b" | grepQuiet "error: path '/dir' does not exist in Git repository \"$repo\"" -mkdir -p $repo/dir -echo 456 > $repo/dir/default.nix +mkdir -p "$repo/dir" +echo 456 > "$repo/dir/default.nix" expectStderr 1 nix eval "$repo#b" | grepQuiet "error: File 'dir' in the repository \"$repo\" is not tracked by Git." From fb7bcdd5543c7deb06cb2e65edd8ca6c895716ec Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 1 Apr 2025 22:56:14 +0200 Subject: [PATCH 313/361] Make Git error messages more consistent --- src/libfetchers/git.cc | 4 ++-- tests/functional/flakes/source-paths.sh | 14 +++++++------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 54c66d151a2..e182740d668 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -538,7 +538,7 @@ struct GitInputScheme : InputScheme return [repoPath{std::move(repoPath)}](const CanonPath & path) -> RestrictedPathError { if (nix::pathExists(repoPath / path.rel())) return RestrictedPathError( - "File '%1%' in the repository %2% is not tracked by Git.\n" + "Path '%1%' in the repository %2% is not tracked by Git.\n" "\n" "To make it visible to Nix, run:\n" "\n" @@ -546,7 +546,7 @@ struct GitInputScheme : InputScheme path.rel(), repoPath); else - return RestrictedPathError("path '%s' does not exist in Git repository %s", path, repoPath); + return RestrictedPathError("Path '%s' does not exist in Git repository %s.", path.rel(), repoPath); }; } diff --git a/tests/functional/flakes/source-paths.sh b/tests/functional/flakes/source-paths.sh index 5318806ceac..3aa3683c27c 100644 --- a/tests/functional/flakes/source-paths.sh +++ b/tests/functional/flakes/source-paths.sh @@ -20,7 +20,7 @@ cat > "$repo/flake.nix" < "$repo/foo" -expectStderr 1 nix eval "$repo#z" | grepQuiet "error: File 'foo' in the repository \"$repo\" is not tracked by Git." -expectStderr 1 nix eval "$repo#a" | grepQuiet "error: File 'foo' in the repository \"$repo\" is not tracked by Git." +expectStderr 1 nix eval "$repo#z" | grepQuiet "error: Path 'foo' in the repository \"$repo\" is not tracked by Git." +expectStderr 1 nix eval "$repo#a" | grepQuiet "error: Path 'foo' in the repository \"$repo\" is not tracked by Git." git -C "$repo" add "$repo/foo" [[ $(nix eval --raw "$repo#z") = 123 ]] -expectStderr 1 nix eval "$repo#b" | grepQuiet "error: path '/dir' does not exist in Git repository \"$repo\"" +expectStderr 1 nix eval "$repo#b" | grepQuiet "error: Path 'dir' does not exist in Git repository \"$repo\"." mkdir -p "$repo/dir" echo 456 > "$repo/dir/default.nix" -expectStderr 1 nix eval "$repo#b" | grepQuiet "error: File 'dir' in the repository \"$repo\" is not tracked by Git." +expectStderr 1 nix eval "$repo#b" | grepQuiet "error: Path 'dir' in the repository \"$repo\" is not tracked by Git." git -C "$repo" add "$repo/dir/default.nix" From 2bb85049db815c172a9152f7d22e9f1c16f93271 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 22:41:41 +0000 Subject: [PATCH 314/361] Prepare release v3.2.0 From 1d65af83fd23214b49772664e22dfab5e3511399 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 22:41:44 +0000 Subject: [PATCH 315/361] Set .version-determinate to 3.2.0 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index 94ff29cc4de..944880fa15e 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -3.1.1 +3.2.0 From 086058d17c83c9e55226d252e4236482ebccc74a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 19:26:12 +0000 Subject: [PATCH 316/361] Prepare release v3.2.1 From 3ad67d1a0369923b4161870d8486a4bd961e9461 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 19:26:15 +0000 Subject: [PATCH 317/361] Set .version-determinate to 3.2.1 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index 944880fa15e..e4604e3afd0 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -3.2.0 +3.2.1 From 8443f01536e1a8f3c13c2a038e56c4b7ad9651b1 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Mon, 7 Apr 2025 09:19:58 -0400 Subject: [PATCH 318/361] Rename `nix profile install` to `nix profile add`. --- src/libcmd/installables.cc | 2 +- src/nix/profile-add.md | 37 ++++++++++++++++++++++++++++++ src/nix/profile-install.md | 34 ---------------------------- src/nix/profile.cc | 34 ++++++++++++++-------------- tests/functional/nix-profile.sh | 40 ++++++++++++++++----------------- 5 files changed, 75 insertions(+), 72 deletions(-) create mode 100644 src/nix/profile-add.md delete mode 100644 src/nix/profile-install.md diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 2fde59e8b02..e4a1d0a42d3 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -844,7 +844,7 @@ RawInstallablesCommand::RawInstallablesCommand() void RawInstallablesCommand::applyDefaultInstallables(std::vector & rawInstallables) { if (rawInstallables.empty()) { - // FIXME: commands like "nix profile install" should not have a + // FIXME: commands like "nix profile add" should not have a // default, probably. rawInstallables.push_back("."); } diff --git a/src/nix/profile-add.md b/src/nix/profile-add.md new file mode 100644 index 00000000000..0bb65d8e696 --- /dev/null +++ b/src/nix/profile-add.md @@ -0,0 +1,37 @@ +R""( + +# Examples + +- Add a package from Nixpkgs: + + ```console + # nix profile add nixpkgs#hello + ``` + +- Add a package from a specific branch of Nixpkgs: + + ```console + # nix profile add nixpkgs/release-20.09#hello + ``` + +- Add a package from a specific revision of Nixpkgs: + + ```console + # nix profile add nixpkgs/d73407e8e6002646acfdef0e39ace088bacc83da#hello + ``` + +- Add a specific output of a package: + + ```console + # nix profile add nixpkgs#bash^man + ``` + +# Description + +This command adds [_installables_](./nix.md#installables) to a Nix profile. + +> **Note** +> +> `nix profile install` is an alias for `nix profile add` in Determinate Nix. + +)"" diff --git a/src/nix/profile-install.md b/src/nix/profile-install.md deleted file mode 100644 index 4c0f82c09e5..00000000000 --- a/src/nix/profile-install.md +++ /dev/null @@ -1,34 +0,0 @@ -R""( - -# Examples - -* Install a package from Nixpkgs: - - ```console - # nix profile install nixpkgs#hello - ``` - -* Install a package from a specific branch of Nixpkgs: - - ```console - # nix profile install nixpkgs/release-20.09#hello - ``` - -* Install a package from a specific revision of Nixpkgs: - - ```console - # nix profile install nixpkgs/d73407e8e6002646acfdef0e39ace088bacc83da#hello - ``` - -* Install a specific output of a package: - - ```console - # nix profile install nixpkgs#bash^man - ``` - - -# Description - -This command adds [*installables*](./nix.md#installables) to a Nix profile. - -)"" diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 1a129d0c530..b22421a6069 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -338,14 +338,14 @@ builtPathsPerInstallable( return res; } -struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile +struct CmdProfileAdd : InstallablesCommand, MixDefaultProfile { std::optional priority; - CmdProfileInstall() { + CmdProfileAdd() { addFlag({ .longName = "priority", - .description = "The priority of the package to install.", + .description = "The priority of the package to add.", .labels = {"priority"}, .handler = {&priority}, }); @@ -353,13 +353,13 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile std::string description() override { - return "install a package into a profile"; + return "add a package to a profile"; } std::string doc() override { return - #include "profile-install.md" + #include "profile-add.md" ; } @@ -415,7 +415,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile && existingSource->originalRef == elementSource->originalRef && existingSource->attrPath == elementSource->attrPath ) { - warn("'%s' is already installed", elementName); + warn("'%s' is already added", elementName); continue; } } @@ -462,15 +462,15 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile "\n" " nix profile remove %3%\n" "\n" - "The new package can also be installed next to the existing one by assigning a different priority.\n" + "The new package can also be added next to the existing one by assigning a different priority.\n" "The conflicting packages have a priority of %5%.\n" "To prioritise the new package:\n" "\n" - " nix profile install %4% --priority %6%\n" + " nix profile add %4% --priority %6%\n" "\n" "To prioritise the existing package:\n" "\n" - " nix profile install %4% --priority %7%\n", + " nix profile add %4% --priority %7%\n", originalConflictingFilePath, newConflictingFilePath, originalEntryName, @@ -708,16 +708,14 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf if (!element.source) { warn( - "Found package '%s', but it was not installed from a flake, so it can't be checked for upgrades!", - element.identifier() - ); + "Found package '%s', but it was not added from a flake, so it can't be checked for upgrades!", + element.identifier()); continue; } if (element.source->originalRef.input.isLocked()) { warn( - "Found package '%s', but it was installed from a locked flake reference so it can't be upgraded!", - element.identifier() - ); + "Found package '%s', but it was added from a locked flake reference so it can't be upgraded!", + element.identifier()); continue; } @@ -787,7 +785,7 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro { std::string description() override { - return "list installed packages"; + return "list packages in the profile"; } std::string doc() override @@ -978,7 +976,7 @@ struct CmdProfile : NixMultiCommand : NixMultiCommand( "profile", { - {"install", []() { return make_ref(); }}, + {"add", []() { return make_ref(); }}, {"remove", []() { return make_ref(); }}, {"upgrade", []() { return make_ref(); }}, {"list", []() { return make_ref(); }}, @@ -986,6 +984,8 @@ struct CmdProfile : NixMultiCommand {"history", []() { return make_ref(); }}, {"rollback", []() { return make_ref(); }}, {"wipe-history", []() { return make_ref(); }}, + // 2025-04-05 Deprecated in favor of "add" + {"install", []() { return make_ref(); }}, }) { } diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 7cf5fcb7456..b1cfef6b0b2 100755 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -52,7 +52,7 @@ cp "${config_nix}" $flake1Dir/ # Test upgrading from nix-env. nix-env -f ./user-envs.nix -i foo-1.0 nix profile list | grep -A2 'Name:.*foo' | grep 'Store paths:.*foo-1.0' -nix profile install $flake1Dir -L +nix profile add $flake1Dir -L nix profile list | grep -A4 'Name:.*flake1' | grep 'Locked flake URL:.*narHash' [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] [ -e $TEST_HOME/.nix-profile/share/man ] @@ -64,12 +64,12 @@ nix profile diff-closures | grep 'env-manifest.nix: ε → ∅' # Test XDG Base Directories support export NIX_CONFIG="use-xdg-base-directories = true" nix profile remove flake1 2>&1 | grep 'removed 1 packages' -nix profile install $flake1Dir +nix profile add $flake1Dir [[ $($TEST_HOME/.local/state/nix/profile/bin/hello) = "Hello World" ]] unset NIX_CONFIG -# Test conflicting package install. -nix profile install $flake1Dir 2>&1 | grep "warning: 'flake1' is already installed" +# Test conflicting package add. +nix profile add $flake1Dir 2>&1 | grep "warning: 'flake1' is already added" # Test upgrading a package. printf NixOS > $flake1Dir/who @@ -132,16 +132,16 @@ nix profile history | grep 'foo: 1.0 -> ∅' nix profile diff-closures | grep 'Version 3 -> 4' # Test installing a non-flake package. -nix profile install --file ./simple.nix '' +nix profile add --file ./simple.nix '' [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] nix profile remove simple 2>&1 | grep 'removed 1 packages' -nix profile install $(nix-build --no-out-link ./simple.nix) +nix profile add $(nix-build --no-out-link ./simple.nix) [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] # Test packages with same name from different sources mkdir $TEST_ROOT/simple-too cp ./simple.nix "${config_nix}" simple.builder.sh $TEST_ROOT/simple-too -nix profile install --file $TEST_ROOT/simple-too/simple.nix '' +nix profile add --file $TEST_ROOT/simple-too/simple.nix '' nix profile list | grep -A4 'Name:.*simple' | grep 'Name:.*simple-1' nix profile remove simple 2>&1 | grep 'removed 1 packages' nix profile remove simple-1 2>&1 | grep 'removed 1 packages' @@ -160,13 +160,13 @@ nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-m nix profile remove flake1 2>&1 | grep 'removed 1 packages' printf 4.0 > $flake1Dir/version printf Utrecht > $flake1Dir/who -nix profile install $flake1Dir +nix profile add $flake1Dir [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]] [[ $(nix path-info --json $(realpath $TEST_HOME/.nix-profile/bin/hello) | jq -r .[].ca) =~ fixed:r:sha256: ]] # Override the outputs. nix profile remove simple flake1 -nix profile install "$flake1Dir^*" +nix profile add "$flake1Dir^*" [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]] [ -e $TEST_HOME/.nix-profile/share/man ] [ -e $TEST_HOME/.nix-profile/include ] @@ -179,7 +179,7 @@ nix profile upgrade flake1 [ -e $TEST_HOME/.nix-profile/include ] nix profile remove flake1 2>&1 | grep 'removed 1 packages' -nix profile install "$flake1Dir^man" +nix profile add "$flake1Dir^man" (! [ -e $TEST_HOME/.nix-profile/bin/hello ]) [ -e $TEST_HOME/.nix-profile/share/man ] (! [ -e $TEST_HOME/.nix-profile/include ]) @@ -193,9 +193,9 @@ printf World > $flake1Dir/who cp -r $flake1Dir $flake2Dir printf World2 > $flake2Dir/who -nix profile install $flake1Dir +nix profile add $flake1Dir [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] -expect 1 nix profile install $flake2Dir +expect 1 nix profile add $flake2Dir diff -u <( nix --offline profile install $flake2Dir 2>&1 1> /dev/null \ | grep -vE "^warning: " \ @@ -214,31 +214,31 @@ error: An existing package already provides the following file: nix profile remove flake1 - The new package can also be installed next to the existing one by assigning a different priority. + The new package can also be added next to the existing one by assigning a different priority. The conflicting packages have a priority of 5. To prioritise the new package: - nix profile install path:${flake2Dir}#packages.${system}.default --priority 4 + nix profile add path:${flake2Dir}#packages.${system}.default --priority 4 To prioritise the existing package: - nix profile install path:${flake2Dir}#packages.${system}.default --priority 6 + nix profile add path:${flake2Dir}#packages.${system}.default --priority 6 EOF ) [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] -nix profile install $flake2Dir --priority 100 +nix profile add $flake2Dir --priority 100 [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] -nix profile install $flake2Dir --priority 0 +nix profile add $flake2Dir --priority 0 [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World2" ]] -# nix profile install $flake1Dir --priority 100 +# nix profile add $flake1Dir --priority 100 # [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] # Ensure that conflicts are handled properly even when the installables aren't # flake references. # Regression test for https://github.com/NixOS/nix/issues/8284 clearProfiles -nix profile install $(nix build $flake1Dir --no-link --print-out-paths) -expect 1 nix profile install --impure --expr "(builtins.getFlake ''$flake2Dir'').packages.$system.default" +nix profile add $(nix build $flake1Dir --no-link --print-out-paths) +expect 1 nix profile add --impure --expr "(builtins.getFlake ''$flake2Dir'').packages.$system.default" # Test upgrading from profile version 2. clearProfiles From 74bcfbe10c11359e42761b086828d11e7355eeef Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Mon, 7 Apr 2025 09:15:51 -0700 Subject: [PATCH 319/361] ci: manual: don't try to comment on the perpetual PR --- .github/workflows/ci.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index be68de76485..87a14b4bca2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -136,7 +136,10 @@ jobs: production-branch: detsys-main github-token: ${{ secrets.GITHUB_TOKEN }} deploy-message: "Deploy from GitHub Actions" - enable-pull-request-comment: true + # NOTE(cole-h): We have a perpetual PR displaying our changes against upstream open, but + # its conversation is locked, so this PR comment can never be posted. + # https://github.com/DeterminateSystems/nix-src/pull/4 + enable-pull-request-comment: ${{ github.event.pull_request.number != 4 }} enable-commit-comment: true enable-commit-status: true overwrites-pull-request-comment: true From 5b21c94fabe9a57ed15f0682554c537f31c808db Mon Sep 17 00:00:00 2001 From: Sandro Date: Mon, 7 Apr 2025 15:06:10 +0200 Subject: [PATCH 320/361] Fix meson warnings on minimum version nix> meson.build:216: WARNING: Project targets '>= 1.1' but uses feature introduced in '1.4.0': fs.name with build_tgt, custom_tgt, and custom_idx. nix> meson.build:222: WARNING: Project targets '>= 1.1' but uses feature introduced in '1.4.0': fs.name with build_tgt, custom_tgt, and custom_idx. nix> meson.build:235: WARNING: Project targets '>= 1.1' but uses feature introduced in '1.4.0': fs.name with build_tgt, custom_tgt, and custom_idx. nix> meson.build:236: WARNING: Project targets '>= 1.1' but uses feature introduced in '1.4.0': fs.name with build_tgt, custom_tgt, and custom_idx. nix> meson.build:242: WARNING: Project targets '>= 1.1' but uses feature introduced in '1.4.0': fs.name with build_tgt, custom_tgt, and custom_idx. (cherry picked from commit 14a829acbbbc0c8373abbb1d744228047e2fc141) --- src/nix/meson.build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/meson.build b/src/nix/meson.build index 3cb45f1f56d..90102133034 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -7,7 +7,7 @@ project('nix', 'cpp', 'errorlogs=true', # Please print logs for tests that fail 'localstatedir=/nix/var', ], - meson_version : '>= 1.1', + meson_version : '>= 1.4', license : 'LGPL-2.1-or-later', ) From 17de9dd2755f3ffcd90083a062e73aba4cc3ff2c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 7 Apr 2025 17:54:39 +0200 Subject: [PATCH 321/361] Make lexer-helpers.hh internal to fix a clang-tidy error (cherry picked from commit 8be24f58f2bc3ccbb85570378022673cb8b36b27) --- src/libexpr/include/nix/expr/meson.build | 1 - src/libexpr/lexer-helpers.cc | 5 +---- src/libexpr/{include/nix/expr => }/lexer-helpers.hh | 0 src/libexpr/lexer.l | 3 +-- 4 files changed, 2 insertions(+), 7 deletions(-) rename src/libexpr/{include/nix/expr => }/lexer-helpers.hh (100%) diff --git a/src/libexpr/include/nix/expr/meson.build b/src/libexpr/include/nix/expr/meson.build index 01275e52ee1..50ea8f3c22c 100644 --- a/src/libexpr/include/nix/expr/meson.build +++ b/src/libexpr/include/nix/expr/meson.build @@ -20,7 +20,6 @@ headers = [config_pub_h] + files( 'gc-small-vector.hh', 'get-drvs.hh', 'json-to-value.hh', - # internal: 'lexer-helpers.hh', 'nixexpr.hh', 'parser-state.hh', 'primops.hh', diff --git a/src/libexpr/lexer-helpers.cc b/src/libexpr/lexer-helpers.cc index 4b27393bbac..927e3cc7324 100644 --- a/src/libexpr/lexer-helpers.cc +++ b/src/libexpr/lexer-helpers.cc @@ -1,7 +1,4 @@ -#include "lexer-tab.hh" -#include "parser-tab.hh" - -#include "nix/expr/lexer-helpers.hh" +#include "lexer-helpers.hh" void nix::lexer::internal::initLoc(YYLTYPE * loc) { diff --git a/src/libexpr/include/nix/expr/lexer-helpers.hh b/src/libexpr/lexer-helpers.hh similarity index 100% rename from src/libexpr/include/nix/expr/lexer-helpers.hh rename to src/libexpr/lexer-helpers.hh diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index 511c8e47bbf..1e196741d21 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -25,8 +25,7 @@ #endif #include "nix/expr/nixexpr.hh" -#include "parser-tab.hh" -#include "nix/expr/lexer-helpers.hh" +#include "lexer-helpers.hh" namespace nix { struct LexerState; From aa1c690ebf4c7d229f8ac4138fcf929c1b645206 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 7 Apr 2025 18:10:03 +0200 Subject: [PATCH 322/361] Keep fchmodat2-compat.hh private Since it references store-config-private.hh. (cherry picked from commit 04e9dc27ac6a0ebcb4163581a208aeb9837164c3) --- src/libstore/linux/{include/nix/store => }/fchmodat2-compat.hh | 0 src/libstore/linux/include/nix/store/meson.build | 1 - src/libstore/unix/build/local-derivation-goal.cc | 2 +- 3 files changed, 1 insertion(+), 2 deletions(-) rename src/libstore/linux/{include/nix/store => }/fchmodat2-compat.hh (100%) diff --git a/src/libstore/linux/include/nix/store/fchmodat2-compat.hh b/src/libstore/linux/fchmodat2-compat.hh similarity index 100% rename from src/libstore/linux/include/nix/store/fchmodat2-compat.hh rename to src/libstore/linux/fchmodat2-compat.hh diff --git a/src/libstore/linux/include/nix/store/meson.build b/src/libstore/linux/include/nix/store/meson.build index fd05fcaea62..a664aefa9f4 100644 --- a/src/libstore/linux/include/nix/store/meson.build +++ b/src/libstore/linux/include/nix/store/meson.build @@ -1,6 +1,5 @@ include_dirs += include_directories('../..') headers += files( - 'fchmodat2-compat.hh', 'personality.hh', ) diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index 4d3813dc59b..9edb6fb0f96 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -42,7 +42,7 @@ /* Includes required for chroot support. */ #ifdef __linux__ -# include "nix/store/fchmodat2-compat.hh" +# include "linux/fchmodat2-compat.hh" # include # include # include From 85902fad588c259a9b2a8bb7aee4efb355f64ac4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 7 Apr 2025 17:55:20 +0200 Subject: [PATCH 323/361] Fix some clang-tidy warnings (cherry picked from commit c0ad5d36c451f3fa22f28d91ee814bcc3bc50dbf) --- src/libexpr/include/nix/expr/nixexpr.hh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 9409bdca86b..a5ce0fd8922 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -65,7 +65,7 @@ struct DocComment { struct AttrName { Symbol symbol; - Expr * expr; + Expr * expr = nullptr; AttrName(Symbol s) : symbol(s) {}; AttrName(Expr * e) : expr(e) {}; }; @@ -159,7 +159,7 @@ struct ExprVar : Expr `nullptr`: Not from a `with`. Valid pointer: the nearest, innermost `with` expression to query first. */ - ExprWith * fromWith; + ExprWith * fromWith = nullptr; /* In the former case, the value is obtained by going `level` levels up from the current environment and getting the @@ -167,7 +167,7 @@ struct ExprVar : Expr value is obtained by getting the attribute named `name` from the set stored in the environment that is `level` levels up from the current one.*/ - Level level; + Level level = 0; Displacement displ = 0; ExprVar(Symbol name) : name(name) { }; From f0ed61bb4e24cbf957c8472879429229d22a9e5a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 7 Apr 2025 17:09:42 +0200 Subject: [PATCH 324/361] Fix/run monitorfdhup test (cherry picked from commit 340fa00d5243beb0d2c69596e6e890970e5a03ec) --- src/libutil-tests/meson.build | 1 + src/libutil-tests/monitorfdhup.cc | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libutil-tests/meson.build b/src/libutil-tests/meson.build index 8f9c18eedb4..f2552550d3b 100644 --- a/src/libutil-tests/meson.build +++ b/src/libutil-tests/meson.build @@ -59,6 +59,7 @@ sources = files( 'json-utils.cc', 'logging.cc', 'lru-cache.cc', + 'monitorfdhup.cc', 'nix_api_util.cc', 'pool.cc', 'position.cc', diff --git a/src/libutil-tests/monitorfdhup.cc b/src/libutil-tests/monitorfdhup.cc index 01ecb92d96c..f9da4022da1 100644 --- a/src/libutil-tests/monitorfdhup.cc +++ b/src/libutil-tests/monitorfdhup.cc @@ -1,5 +1,5 @@ -#include "util.hh" -#include "monitor-fd.hh" +#include "nix/util/util.hh" +#include "nix/util/monitor-fd.hh" #include #include From 3a4dc47c22be4bed2614b7b10ee301332338d1ed Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 7 Apr 2025 17:10:28 +0200 Subject: [PATCH 325/361] Remove unused tracing-file-system-object-sink.{hh,cc} (cherry picked from commit 611fd806cbcee3a0c9ae89df5d26a24769e75ed0) --- .../tests/tracing-file-system-object-sink.hh | 41 ------------------- .../tracing-file-system-object-sink.cc | 34 --------------- 2 files changed, 75 deletions(-) delete mode 100644 src/libutil-test-support/include/nix/util/tests/tracing-file-system-object-sink.hh delete mode 100644 src/libutil-test-support/tracing-file-system-object-sink.cc diff --git a/src/libutil-test-support/include/nix/util/tests/tracing-file-system-object-sink.hh b/src/libutil-test-support/include/nix/util/tests/tracing-file-system-object-sink.hh deleted file mode 100644 index d721c13af05..00000000000 --- a/src/libutil-test-support/include/nix/util/tests/tracing-file-system-object-sink.hh +++ /dev/null @@ -1,41 +0,0 @@ -#pragma once -#include "nix/util/fs-sink.hh" - -namespace nix::test { - -/** - * A `FileSystemObjectSink` that traces calls, writing to stderr. - */ -class TracingFileSystemObjectSink : public virtual FileSystemObjectSink -{ - FileSystemObjectSink & sink; -public: - TracingFileSystemObjectSink(FileSystemObjectSink & sink) - : sink(sink) - { - } - - void createDirectory(const CanonPath & path) override; - - void createRegularFile(const CanonPath & path, std::function fn) override; - - void createSymlink(const CanonPath & path, const std::string & target) override; -}; - -/** - * A `ExtendedFileSystemObjectSink` that traces calls, writing to stderr. - */ -class TracingExtendedFileSystemObjectSink : public TracingFileSystemObjectSink, public ExtendedFileSystemObjectSink -{ - ExtendedFileSystemObjectSink & sink; -public: - TracingExtendedFileSystemObjectSink(ExtendedFileSystemObjectSink & sink) - : TracingFileSystemObjectSink(sink) - , sink(sink) - { - } - - void createHardlink(const CanonPath & path, const CanonPath & target) override; -}; - -} diff --git a/src/libutil-test-support/tracing-file-system-object-sink.cc b/src/libutil-test-support/tracing-file-system-object-sink.cc deleted file mode 100644 index 52b081fb8fa..00000000000 --- a/src/libutil-test-support/tracing-file-system-object-sink.cc +++ /dev/null @@ -1,34 +0,0 @@ -#include -#include "nix/tracing-file-system-object-sink.hh" - -namespace nix::test { - -void TracingFileSystemObjectSink::createDirectory(const CanonPath & path) -{ - std::cerr << "createDirectory(" << path << ")\n"; - sink.createDirectory(path); -} - -void TracingFileSystemObjectSink::createRegularFile( - const CanonPath & path, std::function fn) -{ - std::cerr << "createRegularFile(" << path << ")\n"; - sink.createRegularFile(path, [&](CreateRegularFileSink & crf) { - // We could wrap this and trace about the chunks of data and such - fn(crf); - }); -} - -void TracingFileSystemObjectSink::createSymlink(const CanonPath & path, const std::string & target) -{ - std::cerr << "createSymlink(" << path << ", target: " << target << ")\n"; - sink.createSymlink(path, target); -} - -void TracingExtendedFileSystemObjectSink::createHardlink(const CanonPath & path, const CanonPath & target) -{ - std::cerr << "createHardlink(" << path << ", target: " << target << ")\n"; - sink.createHardlink(path, target); -} - -} // namespace nix::test From 16a2cddfb9ae2218759a004f8e86cd7f5acfdc81 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Mon, 7 Apr 2025 17:18:15 -0400 Subject: [PATCH 326/361] Add trailing commas on addFlag incantations (cherry picked from commit 06acbd37bdbfb6287b882d0464372e6f71259014) --- src/libcmd/command.cc | 7 ++++--- src/libcmd/common-eval-args.cc | 8 ++++---- src/libcmd/installables.cc | 26 +++++++++++++------------- src/libmain/common-args.cc | 4 ++-- src/libmain/shared.cc | 2 +- src/libstore/globals.cc | 6 +++--- src/nix/build.cc | 2 +- src/nix/bundle.cc | 4 ++-- src/nix/copy.cc | 2 +- src/nix/derivation-show.cc | 2 +- src/nix/develop.cc | 4 ++-- src/nix/env.cc | 21 +++++++++++---------- src/nix/flake.cc | 20 ++++++++++---------- src/nix/prefetch.cc | 4 ++-- src/nix/sigs.cc | 2 +- src/nix/store-delete.cc | 2 +- src/nix/store-gc.cc | 2 +- src/nix/upgrade-nix.cc | 4 ++-- src/nix/verify.cc | 4 ++-- 19 files changed, 64 insertions(+), 62 deletions(-) diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index 565f424dde7..ce93833cb54 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -237,12 +237,13 @@ void StorePathCommand::run(ref store, StorePaths && storePaths) MixProfile::MixProfile() { - addFlag( - {.longName = "profile", + addFlag({ + .longName = "profile", .description = "The profile to operate on.", .labels = {"path"}, .handler = {&profile}, - .completer = completePath}); + .completer = completePath, + }); } void MixProfile::updateProfile(const StorePath & storePath) diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index c051792f3d3..1c7c70a3080 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -63,7 +63,7 @@ MixEvalArgs::MixEvalArgs() .description = "Pass the value *expr* as the argument *name* to Nix functions.", .category = category, .labels = {"name", "expr"}, - .handler = {[&](std::string name, std::string expr) { autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr{expr}}); }} + .handler = {[&](std::string name, std::string expr) { autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr{expr}}); }}, }); addFlag({ @@ -80,7 +80,7 @@ MixEvalArgs::MixEvalArgs() .category = category, .labels = {"name", "path"}, .handler = {[&](std::string name, std::string path) { autoArgs.insert_or_assign(name, AutoArg{AutoArgFile{path}}); }}, - .completer = completePath + .completer = completePath, }); addFlag({ @@ -105,7 +105,7 @@ MixEvalArgs::MixEvalArgs() .labels = {"path"}, .handler = {[&](std::string s) { lookupPath.elements.emplace_back(LookupPath::Elem::parse(s)); - }} + }}, }); addFlag({ @@ -131,7 +131,7 @@ MixEvalArgs::MixEvalArgs() }}, .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { completeFlakeRef(completions, openStore(), prefix); - }} + }}, }); addFlag({ diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index c010887fa00..2ebfac3e667 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -64,21 +64,21 @@ MixFlakeOptions::MixFlakeOptions() .handler = {[&]() { lockFlags.recreateLockFile = true; warn("'--recreate-lock-file' is deprecated and will be removed in a future version; use 'nix flake update' instead."); - }} + }}, }); addFlag({ .longName = "no-update-lock-file", .description = "Do not allow any updates to the flake's lock file.", .category = category, - .handler = {&lockFlags.updateLockFile, false} + .handler = {&lockFlags.updateLockFile, false}, }); addFlag({ .longName = "no-write-lock-file", .description = "Do not write the flake's newly generated lock file.", .category = category, - .handler = {&lockFlags.writeLockFile, false} + .handler = {&lockFlags.writeLockFile, false}, }); addFlag({ @@ -94,14 +94,14 @@ MixFlakeOptions::MixFlakeOptions() .handler = {[&]() { lockFlags.useRegistries = false; warn("'--no-registries' is deprecated; use '--no-use-registries'"); - }} + }}, }); addFlag({ .longName = "commit-lock-file", .description = "Commit changes to the flake's lock file.", .category = category, - .handler = {&lockFlags.commitLockFile, true} + .handler = {&lockFlags.commitLockFile, true}, }); addFlag({ @@ -121,7 +121,7 @@ MixFlakeOptions::MixFlakeOptions() }}, .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); - }} + }}, }); addFlag({ @@ -141,7 +141,7 @@ MixFlakeOptions::MixFlakeOptions() } else if (n == 1) { completeFlakeRef(completions, getEvalState()->store, prefix); } - }} + }}, }); addFlag({ @@ -152,7 +152,7 @@ MixFlakeOptions::MixFlakeOptions() .handler = {[&](std::string lockFilePath) { lockFlags.referenceLockFilePath = {getFSSourceAccessor(), CanonPath(absPath(lockFilePath))}; }}, - .completer = completePath + .completer = completePath, }); addFlag({ @@ -163,7 +163,7 @@ MixFlakeOptions::MixFlakeOptions() .handler = {[&](std::string lockFilePath) { lockFlags.outputLockFilePath = lockFilePath; }}, - .completer = completePath + .completer = completePath, }); addFlag({ @@ -190,7 +190,7 @@ MixFlakeOptions::MixFlakeOptions() }}, .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { completeFlakeRef(completions, getEvalState()->store, prefix); - }} + }}, }); } @@ -206,7 +206,7 @@ SourceExprCommand::SourceExprCommand() .category = installablesCategory, .labels = {"file"}, .handler = {&file}, - .completer = completePath + .completer = completePath, }); addFlag({ @@ -214,7 +214,7 @@ SourceExprCommand::SourceExprCommand() .description = "Interpret [*installables*](@docroot@/command-ref/new-cli/nix.md#installables) as attribute paths relative to the Nix expression *expr*.", .category = installablesCategory, .labels = {"expr"}, - .handler = {&expr} + .handler = {&expr}, }); } @@ -834,7 +834,7 @@ RawInstallablesCommand::RawInstallablesCommand() addFlag({ .longName = "stdin", .description = "Read installables from the standard input. No default installable applied.", - .handler = {&readFromStdIn, true} + .handler = {&readFromStdIn, true}, }); expectArgs({ diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc index c3338996c4b..13b85e54456 100644 --- a/src/libmain/common-args.cc +++ b/src/libmain/common-args.cc @@ -57,7 +57,7 @@ MixCommonArgs::MixCommonArgs(const std::string & programName) if (hasPrefix(s.first, prefix)) completions.add(s.first, fmt("Set the `%s` setting.", s.first)); } - } + }, }); addFlag({ @@ -75,7 +75,7 @@ MixCommonArgs::MixCommonArgs(const std::string & programName) .labels = Strings{"jobs"}, .handler = {[=](std::string s) { settings.set("max-jobs", s); - }} + }}, }); std::string cat = "Options to override configuration settings"; diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 7ff93f6d9c7..50d4991be8b 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -231,7 +231,7 @@ LegacyArgs::LegacyArgs(const std::string & programName, .handler = {[=](std::string s) { auto n = string2IntWithUnitPrefix(s); settings.set(dest, std::to_string(n)); - }} + }}, }); }; diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index c590ccf28b5..c2ecc496494 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -278,21 +278,21 @@ template<> void BaseSetting::convertToArg(Args & args, const std::s .aliases = aliases, .description = "Enable sandboxing.", .category = category, - .handler = {[this]() { override(smEnabled); }} + .handler = {[this]() { override(smEnabled); }}, }); args.addFlag({ .longName = "no-" + name, .aliases = aliases, .description = "Disable sandboxing.", .category = category, - .handler = {[this]() { override(smDisabled); }} + .handler = {[this]() { override(smDisabled); }}, }); args.addFlag({ .longName = "relaxed-" + name, .aliases = aliases, .description = "Enable sandboxing, but allow builds to disable it.", .category = category, - .handler = {[this]() { override(smRelaxed); }} + .handler = {[this]() { override(smRelaxed); }}, }); } diff --git a/src/nix/build.cc b/src/nix/build.cc index 7cd3c7fbeb4..8db831240b8 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -55,7 +55,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile .description = "Use *path* as prefix for the symlinks to the build results. It defaults to `result`.", .labels = {"path"}, .handler = {&outLink}, - .completer = completePath + .completer = completePath, }); addFlag({ diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index 30b3003e7e6..c334469b5ad 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -24,7 +24,7 @@ struct CmdBundle : InstallableValueCommand .handler = {&bundler}, .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { completeFlakeRef(completions, getStore(), prefix); - }} + }}, }); addFlag({ @@ -33,7 +33,7 @@ struct CmdBundle : InstallableValueCommand .description = "Override the name of the symlink to the build result. It defaults to the base name of the app.", .labels = {"path"}, .handler = {&outLink}, - .completer = completePath + .completer = completePath, }); } diff --git a/src/nix/copy.cc b/src/nix/copy.cc index 0702215fdf6..013f2a7e393 100644 --- a/src/nix/copy.cc +++ b/src/nix/copy.cc @@ -21,7 +21,7 @@ struct CmdCopy : virtual CopyCommand, virtual BuiltPathsCommand, MixProfile .description = "Create symlinks prefixed with *path* to the top-level store paths fetched from the source store.", .labels = {"path"}, .handler = {&outLink}, - .completer = completePath + .completer = completePath, }); addFlag({ diff --git a/src/nix/derivation-show.cc b/src/nix/derivation-show.cc index 050144ccf8b..86755c3e81d 100644 --- a/src/nix/derivation-show.cc +++ b/src/nix/derivation-show.cc @@ -21,7 +21,7 @@ struct CmdShowDerivation : InstallablesCommand .longName = "recursive", .shortName = 'r', .description = "Include the dependencies of the specified derivations.", - .handler = {&recursive, true} + .handler = {&recursive, true}, }); } diff --git a/src/nix/develop.cc b/src/nix/develop.cc index e88134a78a5..00572697aee 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -334,7 +334,7 @@ struct Common : InstallableCommand, MixProfile .labels = {"installable", "outputs-dir"}, .handler = {[&](std::string installable, std::string outputsDir) { redirects.push_back({installable, outputsDir}); - }} + }}, }); } @@ -524,7 +524,7 @@ struct CmdDevelop : Common, MixEnvironment .handler = {[&](std::vector ss) { if (ss.empty()) throw UsageError("--command requires at least one argument"); command = ss; - }} + }}, }); addFlag({ diff --git a/src/nix/env.cc b/src/nix/env.cc index 4b00dbc7c93..f6b12f21c02 100644 --- a/src/nix/env.cc +++ b/src/nix/env.cc @@ -38,16 +38,17 @@ struct CmdShell : InstallablesCommand, MixEnvironment CmdShell() { - addFlag( - {.longName = "command", - .shortName = 'c', - .description = "Command and arguments to be executed, defaulting to `$SHELL`", - .labels = {"command", "args"}, - .handler = {[&](std::vector ss) { - if (ss.empty()) - throw UsageError("--command requires at least one argument"); - command = ss; - }}}); + addFlag({ + .longName = "command", + .shortName = 'c', + .description = "Command and arguments to be executed, defaulting to `$SHELL`", + .labels = {"command", "args"}, + .handler = {[&](std::vector ss) { + if (ss.empty()) + throw UsageError("--command requires at least one argument"); + command = ss; + }}, + }); } std::string description() override diff --git a/src/nix/flake.cc b/src/nix/flake.cc index a7b6000e7fb..3a33db8f219 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -90,7 +90,7 @@ struct CmdFlakeUpdate : FlakeCommand .handler={&flakeUrl}, .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { completeFlakeRef(completions, getStore(), prefix); - }} + }}, }); expectArgs({ .label="inputs", @@ -111,7 +111,7 @@ struct CmdFlakeUpdate : FlakeCommand }}, .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); - }} + }}, }); /* Remove flags that don't make sense. */ @@ -336,12 +336,12 @@ struct CmdFlakeCheck : FlakeCommand addFlag({ .longName = "no-build", .description = "Do not build checks.", - .handler = {&build, false} + .handler = {&build, false}, }); addFlag({ .longName = "all-systems", .description = "Check the outputs for all systems.", - .handler = {&checkAllSystems, true} + .handler = {&checkAllSystems, true}, }); } @@ -874,7 +874,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand defaultTemplateAttrPathsPrefixes, defaultTemplateAttrPaths, prefix); - }} + }}, }); } @@ -1034,7 +1034,7 @@ struct CmdFlakeClone : FlakeCommand .shortName = 'f', .description = "Clone the flake to path *dest*.", .labels = {"path"}, - .handler = {&destDir} + .handler = {&destDir}, }); } @@ -1057,7 +1057,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun .longName = "to", .description = "URI of the destination Nix store", .labels = {"store-uri"}, - .handler = {&dstUri} + .handler = {&dstUri}, }); } @@ -1137,12 +1137,12 @@ struct CmdFlakeShow : FlakeCommand, MixJSON addFlag({ .longName = "legacy", .description = "Show the contents of the `legacyPackages` output.", - .handler = {&showLegacy, true} + .handler = {&showLegacy, true}, }); addFlag({ .longName = "all-systems", .description = "Show the contents of outputs for all systems.", - .handler = {&showAllSystems, true} + .handler = {&showAllSystems, true}, }); } @@ -1443,7 +1443,7 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON .description = "Create symlink named *path* to the resulting store path.", .labels = {"path"}, .handler = {&outLink}, - .completer = completePath + .completer = completePath, }); } diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 397134b0304..4495a148994 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -275,7 +275,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON .longName = "name", .description = "Override the name component of the resulting store path. It defaults to the base name of *url*.", .labels = {"name"}, - .handler = {&name} + .handler = {&name}, }); addFlag({ @@ -284,7 +284,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON .labels = {"hash"}, .handler = {[&](std::string s) { expectedHash = Hash::parseAny(s, hashAlgo); - }} + }}, }); addFlag(flag::hashAlgo("hash-type", &hashAlgo)); diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index 87d0e1edbfb..9ef54a414a5 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -104,7 +104,7 @@ struct CmdSign : StorePathsCommand .description = "File containing the secret signing key.", .labels = {"file"}, .handler = {&secretKeyFile}, - .completer = completePath + .completer = completePath, }); } diff --git a/src/nix/store-delete.cc b/src/nix/store-delete.cc index f71a56bc7b0..fae960c9013 100644 --- a/src/nix/store-delete.cc +++ b/src/nix/store-delete.cc @@ -16,7 +16,7 @@ struct CmdStoreDelete : StorePathsCommand addFlag({ .longName = "ignore-liveness", .description = "Do not check whether the paths are reachable from a root.", - .handler = {&options.ignoreLiveness, true} + .handler = {&options.ignoreLiveness, true}, }); } diff --git a/src/nix/store-gc.cc b/src/nix/store-gc.cc index e6a303874f4..c71e89233b9 100644 --- a/src/nix/store-gc.cc +++ b/src/nix/store-gc.cc @@ -17,7 +17,7 @@ struct CmdStoreGC : StoreCommand, MixDryRun .longName = "max", .description = "Stop after freeing *n* bytes of disk space.", .labels = {"n"}, - .handler = {&options.maxFreed} + .handler = {&options.maxFreed}, }); } diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index c0a6e68276d..64824110460 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -23,14 +23,14 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand .shortName = 'p', .description = "The path to the Nix profile to upgrade.", .labels = {"profile-dir"}, - .handler = {&profileDir} + .handler = {&profileDir}, }); addFlag({ .longName = "nix-store-paths-url", .description = "The URL of the file that contains the store paths of the latest Nix release.", .labels = {"url"}, - .handler = {&(std::string&) settings.upgradeNixStorePathUrl} + .handler = {&(std::string&) settings.upgradeNixStorePathUrl}, }); } diff --git a/src/nix/verify.cc b/src/nix/verify.cc index 734387ee7e0..ff81d78b6d1 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -37,7 +37,7 @@ struct CmdVerify : StorePathsCommand .shortName = 's', .description = "Use signatures from the specified store.", .labels = {"store-uri"}, - .handler = {[&](std::string s) { substituterUris.push_back(s); }} + .handler = {[&](std::string s) { substituterUris.push_back(s); }}, }); addFlag({ @@ -45,7 +45,7 @@ struct CmdVerify : StorePathsCommand .shortName = 'n', .description = "Require that each path is signed by at least *n* different keys.", .labels = {"n"}, - .handler = {&sigsNeeded} + .handler = {&sigsNeeded}, }); } From cd7e01526ea3a4256f0d0862e3d4a6b7fe13bd07 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Mon, 7 Apr 2025 17:24:41 -0400 Subject: [PATCH 327/361] format as required (cherry picked from commit 9b47b2b21703a4c7cadf95f05bfc32b5146d8327) --- src/libcmd/command.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index ce93833cb54..56541fa5755 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -239,10 +239,10 @@ MixProfile::MixProfile() { addFlag({ .longName = "profile", - .description = "The profile to operate on.", - .labels = {"path"}, - .handler = {&profile}, - .completer = completePath, + .description = "The profile to operate on.", + .labels = {"path"}, + .handler = {&profile}, + .completer = completePath, }); } From a08477975d90dc0d2c9f89d2a417bedb5b266931 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 9 Apr 2025 17:59:51 +0200 Subject: [PATCH 328/361] Actually ignore system/user registries during locking Something went wrong in #12068 so this didn't work. Also added a test. (cherry picked from commit 77d4316353deaf8f429025738891b625eb0b5d8a) --- src/libflake/flake/flakeref.cc | 2 +- tests/functional/flakes/flakes.sh | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/libflake/flake/flakeref.cc b/src/libflake/flake/flakeref.cc index 6e95eb76759..1580c284641 100644 --- a/src/libflake/flake/flakeref.cc +++ b/src/libflake/flake/flakeref.cc @@ -39,7 +39,7 @@ FlakeRef FlakeRef::resolve( ref store, const fetchers::RegistryFilter & filter) const { - auto [input2, extraAttrs] = lookupInRegistries(store, input); + auto [input2, extraAttrs] = lookupInRegistries(store, input, filter); return FlakeRef(std::move(input2), fetchers::maybeGetStrAttr(extraAttrs, "dir").value_or(subdir)); } diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh index d8c9f254d15..b67a0964aef 100755 --- a/tests/functional/flakes/flakes.sh +++ b/tests/functional/flakes/flakes.sh @@ -220,6 +220,13 @@ nix store gc nix registry list --flake-registry "file://$registry" --refresh | grepQuiet flake3 mv "$registry.tmp" "$registry" +# Ensure that locking ignores the user registry. +mkdir -p "$TEST_HOME/.config/nix" +ln -sfn "$registry" "$TEST_HOME/.config/nix/registry.json" +nix flake metadata flake1 +expectStderr 1 nix flake update --flake-registry '' --flake "$flake3Dir" | grepQuiet "cannot find flake 'flake:flake1' in the flake registries" +rm "$TEST_HOME/.config/nix/registry.json" + # Test whether flakes are registered as GC roots for offline use. # FIXME: use tarballs rather than git. rm -rf "$TEST_HOME/.cache" From 080950b0fea8df7377f84254728a049149b895d5 Mon Sep 17 00:00:00 2001 From: Rodney Lorrimar Date: Thu, 20 Mar 2025 13:28:05 +0800 Subject: [PATCH 329/361] tests/functional/flakes: Add test case for subflake locking This adds a test case where the lockfile of a relative path flake dependency is updated. It was reported by a user here: https://discourse.nixos.org/t/updating-local-subflakes-inputs-when-building-root-flake/61682 I think this test case relates to issue #7730. Because the issue is not resolved, this test case would fail without the `|| true` clause. (cherry picked from commit 1bc82d1c867463bc1973991c6819912c391013de) --- tests/functional/flakes/meson.build | 1 + .../flakes/relative-paths-lockfile.sh | 73 +++++++++++++++++++ 2 files changed, 74 insertions(+) create mode 100644 tests/functional/flakes/relative-paths-lockfile.sh diff --git a/tests/functional/flakes/meson.build b/tests/functional/flakes/meson.build index b8c650db403..368c43876e5 100644 --- a/tests/functional/flakes/meson.build +++ b/tests/functional/flakes/meson.build @@ -28,6 +28,7 @@ suites += { 'commit-lock-file-summary.sh', 'non-flake-inputs.sh', 'relative-paths.sh', + 'relative-paths-lockfile.sh', 'symlink-paths.sh', 'debugger.sh', 'source-paths.sh', diff --git a/tests/functional/flakes/relative-paths-lockfile.sh b/tests/functional/flakes/relative-paths-lockfile.sh new file mode 100644 index 00000000000..d91aedd16cd --- /dev/null +++ b/tests/functional/flakes/relative-paths-lockfile.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash + +source ./common.sh + +requireGit + +# Test a "vendored" subflake dependency. This is a relative path flake +# which doesn't reference the root flake and has its own lock file. +# +# This might occur in a monorepo for example. The root flake.lock is +# populated from the dependency's flake.lock. + +rootFlake="$TEST_ROOT/flake1" +subflake="$rootFlake/sub" +depFlakeA="$TEST_ROOT/depFlakeA" +depFlakeB="$TEST_ROOT/depFlakeB" + +rm -rf "$rootFlake" +mkdir -p "$rootFlake" "$subflake" "$depFlakeA" "$depFlakeB" + +cat > "$depFlakeA/flake.nix" < "$depFlakeB/flake.nix" < "$subflake/flake.nix" < "$rootFlake/flake.nix" < Date: Wed, 9 Apr 2025 12:31:33 -0400 Subject: [PATCH 330/361] Fix `;` and `#` bug in machine file parsing Comments go to the end of the line, not merely the next ; *or* \n. Fix by splitting on `;` *within* lines, and test. (cherry picked from commit f8b13cce19538796a881cc30fe449436d45cdbb6) --- src/libstore-tests/machines.cc | 12 +++++++++++ src/libstore/machines.cc | 38 ++++++++++++++++++---------------- 2 files changed, 32 insertions(+), 18 deletions(-) diff --git a/src/libstore-tests/machines.cc b/src/libstore-tests/machines.cc index 1d574ceeb77..3d857094614 100644 --- a/src/libstore-tests/machines.cc +++ b/src/libstore-tests/machines.cc @@ -73,6 +73,18 @@ TEST(machines, getMachinesWithSemicolonSeparator) { EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@itchy.labs.cs.uu.nl")))); } +TEST(machines, getMachinesWithCommentsAndSemicolonSeparator) { + auto actual = Machine::parseConfig({}, + "# This is a comment ; this is still that comment\n" + "nix@scratchy.labs.cs.uu.nl ; nix@itchy.labs.cs.uu.nl\n" + "# This is also a comment ; this also is still that comment\n" + "nix@scabby.labs.cs.uu.nl\n"); + EXPECT_THAT(actual, SizeIs(3)); + EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@scratchy.labs.cs.uu.nl")))); + EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@itchy.labs.cs.uu.nl")))); + EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@scabby.labs.cs.uu.nl")))); +} + TEST(machines, getMachinesWithCorrectCompleteSingleBuilder) { auto actual = Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl i686-linux " diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 7c077239d69..6ed4ac8b650 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -105,28 +105,30 @@ ref Machine::openStore() const static std::vector expandBuilderLines(const std::string & builders) { std::vector result; - for (auto line : tokenizeString>(builders, "\n;")) { + for (auto line : tokenizeString>(builders, "\n")) { trim(line); line.erase(std::find(line.begin(), line.end(), '#'), line.end()); - if (line.empty()) continue; - - if (line[0] == '@') { - const std::string path = trim(std::string(line, 1)); - std::string text; - try { - text = readFile(path); - } catch (const SysError & e) { - if (e.errNo != ENOENT) - throw; - debug("cannot find machines file '%s'", path); + for (auto entry : tokenizeString>(line, ";")) { + if (entry.empty()) continue; + + if (entry[0] == '@') { + const std::string path = trim(std::string(entry, 1)); + std::string text; + try { + text = readFile(path); + } catch (const SysError & e) { + if (e.errNo != ENOENT) + throw; + debug("cannot find machines file '%s'", path); + continue; + } + + const auto entrys = expandBuilderLines(text); + result.insert(end(result), begin(entrys), end(entrys)); + } else { + result.emplace_back(entry); } - - const auto lines = expandBuilderLines(text); - result.insert(end(result), begin(lines), end(lines)); - continue; } - - result.emplace_back(line); } return result; } From f9c262c3d5a2d795625ef723b4f08fd08f653781 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 9 Apr 2025 15:23:12 -0400 Subject: [PATCH 331/361] Fix another machine config parsing bug We were ignorning the result of `trim`, and after my last change we were also trimmming too early. (cherry picked from commit b74b0f4e1c4efe5e278a1a9b9c59f08688af9115) --- src/libstore-tests/machines.cc | 14 ++++++++++++++ src/libstore/machines.cc | 9 +++++---- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/src/libstore-tests/machines.cc b/src/libstore-tests/machines.cc index 3d857094614..084807130d9 100644 --- a/src/libstore-tests/machines.cc +++ b/src/libstore-tests/machines.cc @@ -85,6 +85,20 @@ TEST(machines, getMachinesWithCommentsAndSemicolonSeparator) { EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@scabby.labs.cs.uu.nl")))); } +TEST(machines, getMachinesWithFunnyWhitespace) { + auto actual = Machine::parseConfig({}, + " # commment ; comment\n" + " nix@scratchy.labs.cs.uu.nl ; nix@itchy.labs.cs.uu.nl \n" + "\n \n" + "\n ;;; \n" + "\n ; ; \n" + "nix@scabby.labs.cs.uu.nl\n\n"); + EXPECT_THAT(actual, SizeIs(3)); + EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@scratchy.labs.cs.uu.nl")))); + EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@itchy.labs.cs.uu.nl")))); + EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@scabby.labs.cs.uu.nl")))); +} + TEST(machines, getMachinesWithCorrectCompleteSingleBuilder) { auto actual = Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl i686-linux " diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 6ed4ac8b650..d98d06651e5 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -106,13 +106,14 @@ static std::vector expandBuilderLines(const std::string & builders) { std::vector result; for (auto line : tokenizeString>(builders, "\n")) { - trim(line); line.erase(std::find(line.begin(), line.end(), '#'), line.end()); for (auto entry : tokenizeString>(line, ";")) { - if (entry.empty()) continue; + entry = trim(entry); - if (entry[0] == '@') { - const std::string path = trim(std::string(entry, 1)); + if (entry.empty()) { + // skip blank entries + } else if (entry[0] == '@') { + const std::string path = trim(std::string_view{entry}.substr(1)); std::string text; try { text = readFile(path); From 666aa20da8aa00dc3eb5b99e761085976fb399f0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 10 Apr 2025 18:40:27 +0200 Subject: [PATCH 332/361] Move alias support from NixArgs to MultiCommand This allows subcommands to declare aliases, e.g. `nix store ping` is now a proper alias of `nix store info`. --- doc/manual/meson.build | 1 - src/libutil/args.cc | 21 +++++++ src/libutil/include/nix/util/args.hh | 22 ++++++++ src/nix/main.cc | 83 +++++++++------------------- src/nix/store-info.cc | 15 +---- src/nix/store.cc | 6 +- 6 files changed, 75 insertions(+), 73 deletions(-) diff --git a/doc/manual/meson.build b/doc/manual/meson.build index c251fadb15f..33dea3a2c62 100644 --- a/doc/manual/meson.build +++ b/doc/manual/meson.build @@ -283,7 +283,6 @@ nix3_manpages = [ 'nix3-store', 'nix3-store-optimise', 'nix3-store-path-from-hash-part', - 'nix3-store-ping', 'nix3-store-prefetch-file', 'nix3-store-repair', 'nix3-store-sign', diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 39d66b3ec0f..0541291ad3e 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -647,4 +647,25 @@ nlohmann::json MultiCommand::toJSON() return res; } +Strings::iterator MultiCommand::rewriteArgs(Strings & args, Strings::iterator pos) +{ + if (command) + return command->second->rewriteArgs(args, pos); + + if (aliasUsed || pos == args.end()) return pos; + auto arg = *pos; + auto i = aliases.find(arg); + if (i == aliases.end()) return pos; + auto & info = i->second; + if (info.status == AliasStatus::Deprecated) { + warn("'%s' is a deprecated alias for '%s'", + arg, concatStringsSep(" ", info.replacement)); + } + pos = args.erase(pos); + for (auto j = info.replacement.rbegin(); j != info.replacement.rend(); ++j) + pos = args.insert(pos, *j); + aliasUsed = true; + return pos; +} + } diff --git a/src/libutil/include/nix/util/args.hh b/src/libutil/include/nix/util/args.hh index 77c4fb5b62f..4632703741d 100644 --- a/src/libutil/include/nix/util/args.hh +++ b/src/libutil/include/nix/util/args.hh @@ -393,8 +393,30 @@ public: nlohmann::json toJSON() override; + enum struct AliasStatus { + /** Aliases that don't go away */ + AcceptedShorthand, + /** Aliases that will go away */ + Deprecated, + }; + + /** An alias, except for the original syntax, which is in the map key. */ + struct AliasInfo { + AliasStatus status; + std::vector replacement; + }; + + /** + * A list of aliases (remapping a deprecated/shorthand subcommand + * to something else). + */ + std::map aliases; + + Strings::iterator rewriteArgs(Strings & args, Strings::iterator pos) override; + protected: std::string commandName = ""; + bool aliasUsed = false; }; Strings argvToStrings(int argc, char * * argv); diff --git a/src/nix/main.cc b/src/nix/main.cc index 580be09928f..098d461a31e 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -51,19 +51,6 @@ void chrootHelper(int argc, char * * argv); namespace nix { -enum struct AliasStatus { - /** Aliases that don't go away */ - AcceptedShorthand, - /** Aliases that will go away */ - Deprecated, -}; - -/** An alias, except for the original syntax, which is in the map key. */ -struct AliasInfo { - AliasStatus status; - std::vector replacement; -}; - /* Check if we have a non-loopback/link-local network interface. */ static bool haveInternet() { @@ -151,54 +138,34 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs .category = miscCategory, .handler = {[&]() { refresh = true; }}, }); - } - std::map aliases = { - {"add-to-store", { AliasStatus::Deprecated, {"store", "add-path"}}}, - {"cat-nar", { AliasStatus::Deprecated, {"nar", "cat"}}}, - {"cat-store", { AliasStatus::Deprecated, {"store", "cat"}}}, - {"copy-sigs", { AliasStatus::Deprecated, {"store", "copy-sigs"}}}, - {"dev-shell", { AliasStatus::Deprecated, {"develop"}}}, - {"diff-closures", { AliasStatus::Deprecated, {"store", "diff-closures"}}}, - {"dump-path", { AliasStatus::Deprecated, {"store", "dump-path"}}}, - {"hash-file", { AliasStatus::Deprecated, {"hash", "file"}}}, - {"hash-path", { AliasStatus::Deprecated, {"hash", "path"}}}, - {"ls-nar", { AliasStatus::Deprecated, {"nar", "ls"}}}, - {"ls-store", { AliasStatus::Deprecated, {"store", "ls"}}}, - {"make-content-addressable", { AliasStatus::Deprecated, {"store", "make-content-addressed"}}}, - {"optimise-store", { AliasStatus::Deprecated, {"store", "optimise"}}}, - {"ping-store", { AliasStatus::Deprecated, {"store", "info"}}}, - {"sign-paths", { AliasStatus::Deprecated, {"store", "sign"}}}, - {"shell", { AliasStatus::AcceptedShorthand, {"env", "shell"}}}, - {"show-derivation", { AliasStatus::Deprecated, {"derivation", "show"}}}, - {"show-config", { AliasStatus::Deprecated, {"config", "show"}}}, - {"to-base16", { AliasStatus::Deprecated, {"hash", "to-base16"}}}, - {"to-base32", { AliasStatus::Deprecated, {"hash", "to-base32"}}}, - {"to-base64", { AliasStatus::Deprecated, {"hash", "to-base64"}}}, - {"verify", { AliasStatus::Deprecated, {"store", "verify"}}}, - {"doctor", { AliasStatus::Deprecated, {"config", "check"}}}, + aliases = { + {"add-to-store", { AliasStatus::Deprecated, {"store", "add-path"}}}, + {"cat-nar", { AliasStatus::Deprecated, {"nar", "cat"}}}, + {"cat-store", { AliasStatus::Deprecated, {"store", "cat"}}}, + {"copy-sigs", { AliasStatus::Deprecated, {"store", "copy-sigs"}}}, + {"dev-shell", { AliasStatus::Deprecated, {"develop"}}}, + {"diff-closures", { AliasStatus::Deprecated, {"store", "diff-closures"}}}, + {"dump-path", { AliasStatus::Deprecated, {"store", "dump-path"}}}, + {"hash-file", { AliasStatus::Deprecated, {"hash", "file"}}}, + {"hash-path", { AliasStatus::Deprecated, {"hash", "path"}}}, + {"ls-nar", { AliasStatus::Deprecated, {"nar", "ls"}}}, + {"ls-store", { AliasStatus::Deprecated, {"store", "ls"}}}, + {"make-content-addressable", { AliasStatus::Deprecated, {"store", "make-content-addressed"}}}, + {"optimise-store", { AliasStatus::Deprecated, {"store", "optimise"}}}, + {"ping-store", { AliasStatus::Deprecated, {"store", "info"}}}, + {"sign-paths", { AliasStatus::Deprecated, {"store", "sign"}}}, + {"shell", { AliasStatus::AcceptedShorthand, {"env", "shell"}}}, + {"show-derivation", { AliasStatus::Deprecated, {"derivation", "show"}}}, + {"show-config", { AliasStatus::Deprecated, {"config", "show"}}}, + {"to-base16", { AliasStatus::Deprecated, {"hash", "to-base16"}}}, + {"to-base32", { AliasStatus::Deprecated, {"hash", "to-base32"}}}, + {"to-base64", { AliasStatus::Deprecated, {"hash", "to-base64"}}}, + {"verify", { AliasStatus::Deprecated, {"store", "verify"}}}, + {"doctor", { AliasStatus::Deprecated, {"config", "check"}}}, + }; }; - bool aliasUsed = false; - - Strings::iterator rewriteArgs(Strings & args, Strings::iterator pos) override - { - if (aliasUsed || command || pos == args.end()) return pos; - auto arg = *pos; - auto i = aliases.find(arg); - if (i == aliases.end()) return pos; - auto & info = i->second; - if (info.status == AliasStatus::Deprecated) { - warn("'%s' is a deprecated alias for '%s'", - arg, concatStringsSep(" ", info.replacement)); - } - pos = args.erase(pos); - for (auto j = info.replacement.rbegin(); j != info.replacement.rend(); ++j) - pos = args.insert(pos, *j); - aliasUsed = true; - return pos; - } - std::string description() override { return "a tool for reproducible and declarative configuration management"; diff --git a/src/nix/store-info.cc b/src/nix/store-info.cc index 8b4ac9b308f..9402e82281a 100644 --- a/src/nix/store-info.cc +++ b/src/nix/store-info.cc @@ -7,7 +7,7 @@ using namespace nix; -struct CmdPingStore : StoreCommand, MixJSON +struct CmdInfoStore : StoreCommand, MixJSON { std::string description() override { @@ -46,15 +46,4 @@ struct CmdPingStore : StoreCommand, MixJSON } }; -struct CmdInfoStore : CmdPingStore -{ - void run(nix::ref store) override - { - warn("'nix store ping' is a deprecated alias for 'nix store info'"); - CmdPingStore::run(store); - } -}; - - -static auto rCmdPingStore = registerCommand2({"store", "info"}); -static auto rCmdInfoStore = registerCommand2({"store", "ping"}); +static auto rCmdInfoStore = registerCommand2({"store", "info"}); diff --git a/src/nix/store.cc b/src/nix/store.cc index b40b6d06847..80f9363cade 100644 --- a/src/nix/store.cc +++ b/src/nix/store.cc @@ -5,7 +5,11 @@ using namespace nix; struct CmdStore : NixMultiCommand { CmdStore() : NixMultiCommand("store", RegisterCommand::getCommandsFor({"store"})) - { } + { + aliases = { + {"ping", { AliasStatus::Deprecated, {"info"}}}, + }; + } std::string description() override { From 497fe6dd3182f75771667a350a7dcd1ad1018299 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 10 Apr 2025 18:42:04 +0200 Subject: [PATCH 333/361] Make `nix profile install` an alias of `nix profile add` --- doc/manual/meson.build | 1 - src/nix/profile.cc | 8 +++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/manual/meson.build b/doc/manual/meson.build index 33dea3a2c62..f7d3f44c59d 100644 --- a/doc/manual/meson.build +++ b/doc/manual/meson.build @@ -250,7 +250,6 @@ nix3_manpages = [ 'nix3-print-dev-env', 'nix3-profile-diff-closures', 'nix3-profile-history', - 'nix3-profile-install', 'nix3-profile-list', 'nix3-profile', 'nix3-profile-remove', diff --git a/src/nix/profile.cc b/src/nix/profile.cc index b22421a6069..13ab0f659fe 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -984,10 +984,12 @@ struct CmdProfile : NixMultiCommand {"history", []() { return make_ref(); }}, {"rollback", []() { return make_ref(); }}, {"wipe-history", []() { return make_ref(); }}, - // 2025-04-05 Deprecated in favor of "add" - {"install", []() { return make_ref(); }}, }) - { } + { + aliases = { + {"install", { AliasStatus::Deprecated, {"add"}}}, + }; + } std::string description() override { From 2596288f8800e088721559889cc15926eff25772 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 11 Apr 2025 20:56:51 +0000 Subject: [PATCH 334/361] Prepare release v3.3.0 From 454e0f798db5b4976280557c8d11c57fa1f50f62 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 11 Apr 2025 20:56:54 +0000 Subject: [PATCH 335/361] Set .version-determinate to 3.3.0 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index e4604e3afd0..15a27998172 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -3.2.1 +3.3.0 From 8bd8f5a869575b570913979e42bd1b13b5a1c150 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Fri, 11 Apr 2025 14:00:26 -0700 Subject: [PATCH 336/361] Add Determinate Nix 3.3.0 release notes --- doc/manual/source/SUMMARY.md.in | 1 + doc/manual/source/release-notes-determinate/changes.md | 4 +++- doc/manual/source/release-notes-determinate/rl-3.3.0.md | 5 +++++ 3 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 doc/manual/source/release-notes-determinate/rl-3.3.0.md diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index e2e2ec48cd7..0e1ff7f8455 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -128,6 +128,7 @@ - [Contributing](development/contributing.md) - [Determinate Nix Release Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) + - [Release 3.3.0 (2025-04-11)](release-notes-determinate/rl-3.3.0.md) - [Release 3.1.0 (2025-03-27)](release-notes-determinate/rl-3.1.0.md) - [Release 3.0.0 (2025-03-04)](release-notes-determinate/rl-3.0.0.md) - [Nix Release Notes](release-notes/index.md) diff --git a/doc/manual/source/release-notes-determinate/changes.md b/doc/manual/source/release-notes-determinate/changes.md index 8e6d053d0f6..4e5316708af 100644 --- a/doc/manual/source/release-notes-determinate/changes.md +++ b/doc/manual/source/release-notes-determinate/changes.md @@ -1,9 +1,11 @@ # Changes between Nix and Determinate Nix -This section lists the differences between upstream Nix 2.24 and Determinate Nix 3.1.0. +This section lists the differences between upstream Nix 2.24 and Determinate Nix 3.3.0. * In Determinate Nix, flakes are stable. You no longer need to enable the `flakes` experimental feature. * In Determinate Nix, the new Nix CLI (i.e. the `nix` command) is stable. You no longer need to enable the `nix-command` experimental feature. * Determinate Nix has a setting [`json-log-path`](@docroot@/command-ref/conf-file.md#conf-json-log-path) to send a copy of all Nix log messages (in JSON format) to a file or Unix domain socket. + +* Determinate Nix has made `nix profile install` an alias to `nix profile add`, a more symmetrical antonym of `nix profile remove`. diff --git a/doc/manual/source/release-notes-determinate/rl-3.3.0.md b/doc/manual/source/release-notes-determinate/rl-3.3.0.md new file mode 100644 index 00000000000..badf96415df --- /dev/null +++ b/doc/manual/source/release-notes-determinate/rl-3.3.0.md @@ -0,0 +1,5 @@ +# Release 3.3.0 (2025-04-11) + +* Based on [upstream Nix 2.28.1](../release-notes/rl-2.28.md). + +* The `nix profile install` command is now an alias to `nix profile add`, a more symmetrical antonym of `nix profile remove`. From beab9eb978105cccafd0710f06408b41d872395e Mon Sep 17 00:00:00 2001 From: Philipp Otterbein Date: Wed, 19 Feb 2025 18:51:02 +0100 Subject: [PATCH 337/361] libstore S3: fix progress bar and make file transfers interruptible (cherry picked from commit 9da01e69f96346d73c2d1c03adce109f3e57a9a4) --- src/libstore/filetransfer.cc | 4 - src/libstore/s3-binary-cache-store.cc | 117 ++++++++++++++++++++++---- 2 files changed, 102 insertions(+), 19 deletions(-) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 49453f6dfdf..485250a6bf7 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -789,10 +789,6 @@ struct curlFileTransfer : public FileTransfer S3Helper s3Helper(profile, region, scheme, endpoint); - Activity act(*logger, lvlTalkative, actFileTransfer, - fmt("downloading '%s'", request.uri), - {request.uri}, request.parentAct); - // FIXME: implement ETag auto s3Res = s3Helper.getObject(bucketName, key); FileTransferResult res; diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 87f5feb45a6..ca03c7cd8a7 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -160,7 +160,10 @@ ref S3Helper::makeConfig( S3Helper::FileTransferResult S3Helper::getObject( const std::string & bucketName, const std::string & key) { - debug("fetching 's3://%s/%s'...", bucketName, key); + std::string uri = "s3://" + bucketName + "/" + key; + Activity act(*logger, lvlTalkative, actFileTransfer, + fmt("downloading '%s'", uri), + Logger::Fields{uri}, getCurActivity()); auto request = Aws::S3::Model::GetObjectRequest() @@ -171,6 +174,26 @@ S3Helper::FileTransferResult S3Helper::getObject( return Aws::New("STRINGSTREAM"); }); + size_t bytesDone = 0; + size_t bytesExpected = 0; + request.SetDataReceivedEventHandler([&](const Aws::Http::HttpRequest * req, Aws::Http::HttpResponse * resp, long long l) { + if (!bytesExpected && resp->HasHeader("Content-Length")) { + if (auto length = string2Int(resp->GetHeader("Content-Length"))) { + bytesExpected = *length; + } + } + bytesDone += l; + act.progress(bytesDone, bytesExpected); + }); + + request.SetContinueRequestHandler([](const Aws::Http::HttpRequest*) { + try { + checkInterrupt(); + return true; + } catch(...) {} + return false; + }); + FileTransferResult res; auto now1 = std::chrono::steady_clock::now(); @@ -180,6 +203,8 @@ S3Helper::FileTransferResult S3Helper::getObject( auto result = checkAws(fmt("AWS error fetching '%s'", key), client->GetObject(request)); + act.progress(result.GetContentLength(), result.GetContentLength()); + res.data = decompress(result.GetContentEncoding(), dynamic_cast(result.GetBody()).str()); @@ -307,11 +332,35 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual std::shared_ptr transferManager; std::once_flag transferManagerCreated; + struct AsyncContext : public Aws::Client::AsyncCallerContext + { + mutable std::mutex mutex; + mutable std::condition_variable cv; + const Activity & act; + + void notify() const + { + cv.notify_one(); + } + + void wait() const + { + std::unique_lock lk(mutex); + cv.wait(lk); + } + + AsyncContext(const Activity & act) : act(act) {} + }; + void uploadFile(const std::string & path, std::shared_ptr> istream, const std::string & mimeType, const std::string & contentEncoding) { + std::string uri = "s3://" + bucketName + "/" + path; + Activity act(*logger, lvlTalkative, actFileTransfer, + fmt("uploading '%s'", uri), + Logger::Fields{uri}, getCurActivity()); istream->seekg(0, istream->end); auto size = istream->tellg(); istream->seekg(0, istream->beg); @@ -330,16 +379,25 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual transferConfig.bufferSize = bufferSize; transferConfig.uploadProgressCallback = - [](const TransferManager *transferManager, - const std::shared_ptr - &transferHandle) + [](const TransferManager * transferManager, + const std::shared_ptr & transferHandle) + { + auto context = std::dynamic_pointer_cast(transferHandle->GetContext()); + size_t bytesDone = transferHandle->GetBytesTransferred(); + size_t bytesTotal = transferHandle->GetBytesTotalSize(); + try { + checkInterrupt(); + context->act.progress(bytesDone, bytesTotal); + } catch (...) { + context->notify(); + } + }; + transferConfig.transferStatusUpdatedCallback = + [](const TransferManager * transferManager, + const std::shared_ptr & transferHandle) { - //FIXME: find a way to properly abort the multipart upload. - //checkInterrupt(); - debug("upload progress ('%s'): '%d' of '%d' bytes", - transferHandle->GetKey(), - transferHandle->GetBytesTransferred(), - transferHandle->GetBytesTotalSize()); + auto context = std::dynamic_pointer_cast(transferHandle->GetContext()); + context->notify(); }; transferManager = TransferManager::Create(transferConfig); @@ -353,29 +411,56 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual if (contentEncoding != "") throw Error("setting a content encoding is not supported with S3 multi-part uploads"); + auto context = std::make_shared(act); std::shared_ptr transferHandle = transferManager->UploadFile( istream, bucketName, path, mimeType, Aws::Map(), - nullptr /*, contentEncoding */); - - transferHandle->WaitUntilFinished(); + context /*, contentEncoding */); + + TransferStatus status = transferHandle->GetStatus(); + while (status == TransferStatus::IN_PROGRESS || status == TransferStatus::NOT_STARTED) { + try { + checkInterrupt(); + context->wait(); + } catch (...) { + transferHandle->Cancel(); + transferHandle->WaitUntilFinished(); + } + status = transferHandle->GetStatus(); + } + act.progress(transferHandle->GetBytesTransferred(), transferHandle->GetBytesTotalSize()); - if (transferHandle->GetStatus() == TransferStatus::FAILED) + if (status == TransferStatus::FAILED) throw Error("AWS error: failed to upload 's3://%s/%s': %s", bucketName, path, transferHandle->GetLastError().GetMessage()); - if (transferHandle->GetStatus() != TransferStatus::COMPLETED) + if (status != TransferStatus::COMPLETED) throw Error("AWS error: transfer status of 's3://%s/%s' in unexpected state", bucketName, path); } else { + act.progress(0, size); auto request = Aws::S3::Model::PutObjectRequest() .WithBucket(bucketName) .WithKey(path); + size_t bytesSent = 0; + request.SetDataSentEventHandler([&](const Aws::Http::HttpRequest * req, long long l) { + bytesSent += l; + act.progress(bytesSent, size); + }); + + request.SetContinueRequestHandler([](const Aws::Http::HttpRequest*) { + try { + checkInterrupt(); + return true; + } catch(...) {} + return false; + }); + request.SetContentType(mimeType); if (contentEncoding != "") @@ -385,6 +470,8 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual auto result = checkAws(fmt("AWS error uploading '%s'", path), s3Helper.client->PutObject(request)); + + act.progress(size, size); } auto now2 = std::chrono::steady_clock::now(); From c53bd8905b239bf341df39d6488008f36abd6f8d Mon Sep 17 00:00:00 2001 From: Philipp Otterbein Date: Wed, 12 Mar 2025 00:50:20 +0100 Subject: [PATCH 338/361] libstore: same progress bar behavior for PUT and POST requests - no differentiation between uploads and downloads in CLI (cherry picked from commit db297d3dda12306459341da01e9892b4df2d6d37) --- src/libstore/filetransfer.cc | 24 +++++-------------- .../include/nix/store/filetransfer.hh | 2 +- 2 files changed, 7 insertions(+), 19 deletions(-) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 485250a6bf7..08c78213914 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -95,7 +95,7 @@ struct curlFileTransfer : public FileTransfer : fileTransfer(fileTransfer) , request(request) , act(*logger, lvlTalkative, actFileTransfer, - request.post ? "" : fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri), + fmt("%sing '%s'", request.verb(), request.uri), {request.uri}, request.parentAct) , callback(std::move(callback)) , finalSink([this](std::string_view data) { @@ -272,19 +272,11 @@ struct curlFileTransfer : public FileTransfer return getInterrupted(); } - int silentProgressCallback(curl_off_t dltotal, curl_off_t dlnow) - { - return getInterrupted(); - } - static int progressCallbackWrapper(void * userp, curl_off_t dltotal, curl_off_t dlnow, curl_off_t ultotal, curl_off_t ulnow) { - return ((TransferItem *) userp)->progressCallback(dltotal, dlnow); - } - - static int silentProgressCallbackWrapper(void * userp, curl_off_t dltotal, curl_off_t dlnow, curl_off_t ultotal, curl_off_t ulnow) - { - return ((TransferItem *) userp)->silentProgressCallback(dltotal, dlnow); + auto & item = *static_cast(userp); + auto isUpload = bool(item.request.data); + return item.progressCallback(isUpload ? ultotal : dltotal, isUpload ? ulnow : dlnow); } static int debugCallback(CURL * handle, curl_infotype type, char * data, size_t size, void * userptr) @@ -351,10 +343,7 @@ struct curlFileTransfer : public FileTransfer curl_easy_setopt(req, CURLOPT_HEADERFUNCTION, TransferItem::headerCallbackWrapper); curl_easy_setopt(req, CURLOPT_HEADERDATA, this); - if (request.post) - curl_easy_setopt(req, CURLOPT_XFERINFOFUNCTION, silentProgressCallbackWrapper); - else - curl_easy_setopt(req, CURLOPT_XFERINFOFUNCTION, progressCallbackWrapper); + curl_easy_setopt(req, CURLOPT_XFERINFOFUNCTION, progressCallbackWrapper); curl_easy_setopt(req, CURLOPT_XFERINFODATA, this); curl_easy_setopt(req, CURLOPT_NOPROGRESS, 0); @@ -447,8 +436,7 @@ struct curlFileTransfer : public FileTransfer if (httpStatus == 304 && result.etag == "") result.etag = request.expectedETag; - if (!request.post) - act.progress(result.bodySize, result.bodySize); + act.progress(result.bodySize, result.bodySize); done = true; callback(std::move(result)); } diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 217c52d77f6..f87f68e7fc8 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -77,7 +77,7 @@ struct FileTransferRequest FileTransferRequest(std::string_view uri) : uri(uri), parentAct(getCurActivity()) { } - std::string verb() + std::string verb() const { return data ? "upload" : "download"; } From 61bb40583987ccc2738f488de4f2e24b7cab0c2a Mon Sep 17 00:00:00 2001 From: Philipp Otterbein Date: Fri, 11 Apr 2025 22:34:15 +0200 Subject: [PATCH 339/361] add isInterrupted() call and replace some checkInterrupt() occurrences (cherry picked from commit 49f757c24ae10e6d32c19e27fd646fc21aca7679) --- src/libstore/s3-binary-cache-store.cc | 17 ++++------------- src/libutil/include/nix/util/signals.hh | 5 +++++ .../unix/include/nix/util/signals-impl.hh | 13 +++++++++---- .../windows/include/nix/util/signals-impl.hh | 7 ++++++- 4 files changed, 24 insertions(+), 18 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index ca03c7cd8a7..f9e5833077e 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -187,11 +187,7 @@ S3Helper::FileTransferResult S3Helper::getObject( }); request.SetContinueRequestHandler([](const Aws::Http::HttpRequest*) { - try { - checkInterrupt(); - return true; - } catch(...) {} - return false; + return !isInterrupted(); }); FileTransferResult res; @@ -420,10 +416,9 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual TransferStatus status = transferHandle->GetStatus(); while (status == TransferStatus::IN_PROGRESS || status == TransferStatus::NOT_STARTED) { - try { - checkInterrupt(); + if (!isInterrupted()) { context->wait(); - } catch (...) { + } else { transferHandle->Cancel(); transferHandle->WaitUntilFinished(); } @@ -454,11 +449,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual }); request.SetContinueRequestHandler([](const Aws::Http::HttpRequest*) { - try { - checkInterrupt(); - return true; - } catch(...) {} - return false; + return !isInterrupted(); }); request.SetContentType(mimeType); diff --git a/src/libutil/include/nix/util/signals.hh b/src/libutil/include/nix/util/signals.hh index 45130a90cc4..5a2ba8e75b7 100644 --- a/src/libutil/include/nix/util/signals.hh +++ b/src/libutil/include/nix/util/signals.hh @@ -26,6 +26,11 @@ static inline bool getInterrupted(); */ void setInterruptThrown(); +/** + * @note Does nothing on Windows + */ +static inline bool isInterrupted(); + /** * @note Does nothing on Windows */ diff --git a/src/libutil/unix/include/nix/util/signals-impl.hh b/src/libutil/unix/include/nix/util/signals-impl.hh index ffa96734409..7397744b2ae 100644 --- a/src/libutil/unix/include/nix/util/signals-impl.hh +++ b/src/libutil/unix/include/nix/util/signals-impl.hh @@ -85,17 +85,22 @@ static inline bool getInterrupted() return unix::_isInterrupted; } +static inline bool isInterrupted() +{ + using namespace unix; + return _isInterrupted || (interruptCheck && interruptCheck()); +} + /** * Throw `Interrupted` exception if the process has been interrupted. * * Call this in long-running loops and between slow operations to terminate * them as needed. */ -void inline checkInterrupt() +inline void checkInterrupt() { - using namespace unix; - if (_isInterrupted || (interruptCheck && interruptCheck())) - _interrupted(); + if (isInterrupted()) + unix::_interrupted(); } /** diff --git a/src/libutil/windows/include/nix/util/signals-impl.hh b/src/libutil/windows/include/nix/util/signals-impl.hh index 043f39100ac..f716ffd1a68 100644 --- a/src/libutil/windows/include/nix/util/signals-impl.hh +++ b/src/libutil/windows/include/nix/util/signals-impl.hh @@ -22,7 +22,12 @@ inline void setInterruptThrown() /* Do nothing for now */ } -void inline checkInterrupt() +static inline bool isInterrupted() +{ + /* Do nothing for now */ +} + +inline void checkInterrupt() { /* Do nothing for now */ } From c1c0e20f2ec713951e223c950695ed8f7d068f68 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 12 Apr 2025 02:34:34 +0000 Subject: [PATCH 340/361] Prepare release v3.3.1 From 398104dcbfa4ae55bcb73c048b86444b7a3edacb Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 12 Apr 2025 02:34:37 +0000 Subject: [PATCH 341/361] Set .version-determinate to 3.3.1 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index 15a27998172..bea438e9ade 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -3.3.0 +3.3.1 From 9156550493929be0e49776a4f478fb8b1ae4ee25 Mon Sep 17 00:00:00 2001 From: Anthony Wang Date: Sat, 12 Apr 2025 19:17:27 -0400 Subject: [PATCH 342/361] Fix typo in string context docs (cherry picked from commit f64b8957c7fcedb5d819c6912a5236a1b5fe8433) --- doc/manual/source/language/string-context.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/source/language/string-context.md b/doc/manual/source/language/string-context.md index 6a3482cfd95..979bbf37197 100644 --- a/doc/manual/source/language/string-context.md +++ b/doc/manual/source/language/string-context.md @@ -115,7 +115,7 @@ It creates an [attribute set] representing the string context, which can be insp ## Clearing string contexts -[`buitins.unsafeDiscardStringContext`](./builtins.md#builtins-unsafeDiscardStringContext) will make a copy of a string, but with an empty string context. +[`builtins.unsafeDiscardStringContext`](./builtins.md#builtins-unsafeDiscardStringContext) will make a copy of a string, but with an empty string context. The returned string can be used in more ways, e.g. by operators that require the string context to be empty. The requirement to explicitly discard the string context in such use cases helps ensure that string context elements are not lost by mistake. The "unsafe" marker is only there to remind that Nix normally guarantees that dependencies are tracked, whereas the returned string has lost them. From b1a1f4bd2f4113b5b95280072fb3bec6ea77490a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 14 Apr 2025 15:18:29 +0200 Subject: [PATCH 343/361] Mention BLAKE3 in the Nix 2.27 release notes (cherry picked from commit c0ed07755a409660ca0a4aad40cfe3d1a0ad2162) --- doc/manual/source/release-notes/rl-2.27.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/manual/source/release-notes/rl-2.27.md b/doc/manual/source/release-notes/rl-2.27.md index b4918029aa0..3643f747638 100644 --- a/doc/manual/source/release-notes/rl-2.27.md +++ b/doc/manual/source/release-notes/rl-2.27.md @@ -38,6 +38,15 @@ Curl created sockets without setting `FD_CLOEXEC`/`SOCK_CLOEXEC`. This could previously cause connections to remain open forever when using commands like `nix shell`. This change sets the `FD_CLOEXEC` flag using a `CURLOPT_SOCKOPTFUNCTION` callback. +- Add BLAKE3 hash algorithm [#12379](https://github.com/NixOS/nix/pull/12379) + + Nix now supports the BLAKE3 hash algorithm as an experimental feature (`blake3-hashes`): + + ```console + # nix hash file ./file --type blake3 --extra-experimental-features blake3-hashes + blake3-34P4p+iZXcbbyB1i4uoF7eWCGcZHjmaRn6Y7QdynLwU= + ``` + # Contributors This release was made possible by the following 21 contributors: From a603401cddd4db3f19c27a7f3078dcd3e600074e Mon Sep 17 00:00:00 2001 From: Philipp Otterbein Date: Tue, 4 Mar 2025 18:05:33 +0100 Subject: [PATCH 344/361] libstore: curl retry: reset content-encoding and don't use string after move (cherry picked from commit b129fc8237edea8bf2f55816ac90efd15befb216) --- src/libstore/filetransfer.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 08c78213914..a917188d92f 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -22,10 +22,8 @@ #include -#include #include #include -#include #include #include #include @@ -525,6 +523,8 @@ struct curlFileTransfer : public FileTransfer warn("%s; retrying from offset %d in %d ms", exc.what(), writtenToSink, ms); else warn("%s; retrying in %d ms", exc.what(), ms); + decompressionSink.reset(); + errorSink.reset(); embargo = std::chrono::steady_clock::now() + std::chrono::milliseconds(ms); fileTransfer.enqueueItem(shared_from_this()); } From 9a969e29cf24c8bc73331df131af691384026a4c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 14 Apr 2025 14:09:30 +0200 Subject: [PATCH 345/361] call-flake.nix: refactor: Bring mapAttrs into scope (cherry picked from commit 674375b021ce9e229e575204395357f8d317bef5) --- src/libflake/call-flake.nix | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/libflake/call-flake.nix b/src/libflake/call-flake.nix index 1e9e210481d..03a52c87cfb 100644 --- a/src/libflake/call-flake.nix +++ b/src/libflake/call-flake.nix @@ -14,6 +14,7 @@ overrides: fetchTreeFinal: let + inherit (builtins) mapAttrs; lockFile = builtins.fromJSON lockFileStr; @@ -35,7 +36,7 @@ let (resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path}) (builtins.tail path); - allNodes = builtins.mapAttrs ( + allNodes = mapAttrs ( key: node: let @@ -60,9 +61,7 @@ let flake = import (outPath + "/flake.nix"); - inputs = builtins.mapAttrs (inputName: inputSpec: allNodes.${resolveInput inputSpec}) ( - node.inputs or { } - ); + inputs = mapAttrs (inputName: inputSpec: allNodes.${resolveInput inputSpec}) (node.inputs or { }); outputs = flake.outputs (inputs // { self = result; }); From 671364748c97a47c7aa5cbef025c752a3c79a788 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 15 Apr 2025 09:10:18 +0200 Subject: [PATCH 346/361] call-flake.nix: allNodes.${key} -> allNodes.${key}.result (cherry picked from commit 9de9410f295a3daf5c97ea9fcbdcb0d3c5aafd5d) --- src/libflake/call-flake.nix | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/src/libflake/call-flake.nix b/src/libflake/call-flake.nix index 03a52c87cfb..430dfabddca 100644 --- a/src/libflake/call-flake.nix +++ b/src/libflake/call-flake.nix @@ -48,7 +48,7 @@ let else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/" then parentNode.sourceInfo // { - outPath = parentNode.outPath + ("/" + node.locked.path); + outPath = parentNode.result.outPath + ("/" + node.locked.path); } else # FIXME: remove obsolete node.info. @@ -61,7 +61,9 @@ let flake = import (outPath + "/flake.nix"); - inputs = mapAttrs (inputName: inputSpec: allNodes.${resolveInput inputSpec}) (node.inputs or { }); + inputs = mapAttrs (inputName: inputSpec: allNodes.${resolveInput inputSpec}.result) ( + node.inputs or { } + ); outputs = flake.outputs (inputs // { self = result; }); @@ -84,12 +86,15 @@ let }; in - if node.flake or true then - assert builtins.isFunction flake.outputs; - result - else - sourceInfo + { + result = + if node.flake or true then + assert builtins.isFunction flake.outputs; + result + else + sourceInfo; + } ) lockFile.nodes; in -allNodes.${lockFile.root} +allNodes.${lockFile.root}.result From 818fc68db687ce3bc769760629967eb340ed931d Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 15 Apr 2025 09:28:23 +0200 Subject: [PATCH 347/361] fix: Evaluate flake parent source without evaluating its outputs This requires that we refer to the `sourceInfo` instead of the `result`. However, `sourceInfo` does not create a chain of basedir resolution, so we add that back with `flakeDir`. (cherry picked from commit 2109a5a2066d0d49a1bcc5b44b2a4d84b5d313bd) --- src/libflake/call-flake.nix | 11 ++++++++++- tests/functional/flakes/relative-paths.sh | 21 +++++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/src/libflake/call-flake.nix b/src/libflake/call-flake.nix index 430dfabddca..fe326291f1f 100644 --- a/src/libflake/call-flake.nix +++ b/src/libflake/call-flake.nix @@ -42,13 +42,20 @@ let parentNode = allNodes.${getInputByPath lockFile.root node.parent}; + flakeDir = + let + dir = overrides.${key}.dir or node.locked.path or ""; + parentDir = parentNode.flakeDir; + in + if node ? parent then parentDir + ("/" + dir) else dir; + sourceInfo = if overrides ? ${key} then overrides.${key}.sourceInfo else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/" then parentNode.sourceInfo // { - outPath = parentNode.result.outPath + ("/" + node.locked.path); + outPath = parentNode.sourceInfo.outPath + ("/" + flakeDir); } else # FIXME: remove obsolete node.info. @@ -93,6 +100,8 @@ let result else sourceInfo; + + inherit flakeDir sourceInfo; } ) lockFile.nodes; diff --git a/tests/functional/flakes/relative-paths.sh b/tests/functional/flakes/relative-paths.sh index 3f7ca3f4618..4648ba98c63 100644 --- a/tests/functional/flakes/relative-paths.sh +++ b/tests/functional/flakes/relative-paths.sh @@ -108,3 +108,24 @@ EOF [[ $(nix eval "$rootFlake#z") = 90 ]] fi + +# https://github.com/NixOS/nix/pull/10089#discussion_r2041984987 +# https://github.com/NixOS/nix/issues/13018 +mkdir -p "$TEST_ROOT/issue-13018/example" +( + cd "$TEST_ROOT/issue-13018" + git init + echo '{ outputs = _: { }; }' >flake.nix + cat >example/flake.nix < Date: Mon, 14 Apr 2025 11:18:33 -0400 Subject: [PATCH 348/361] Use the same variable for content addressing in functional tests `CONTENT_ADDRESSED` -> `NIX_TESTS_CA_BY_DEFAULT` (cherry picked from commit 7acc229c8fd5c41c460a5b7aa28debf168cbce3d) --- tests/functional/build-remote-content-addressed-floating.sh | 2 +- tests/functional/build-remote.sh | 2 +- tests/functional/ca/nix-shell.sh | 2 +- tests/functional/nix-shell.sh | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/functional/build-remote-content-addressed-floating.sh b/tests/functional/build-remote-content-addressed-floating.sh index 33d667f9211..37091590573 100755 --- a/tests/functional/build-remote-content-addressed-floating.sh +++ b/tests/functional/build-remote-content-addressed-floating.sh @@ -6,6 +6,6 @@ file=build-hook-ca-floating.nix enableFeatures "ca-derivations" -CONTENT_ADDRESSED=true +NIX_TESTS_CA_BY_DEFAULT=true source build-remote.sh diff --git a/tests/functional/build-remote.sh b/tests/functional/build-remote.sh index 3231341cbf6..62cc8588840 100644 --- a/tests/functional/build-remote.sh +++ b/tests/functional/build-remote.sh @@ -13,7 +13,7 @@ unset NIX_STATE_DIR function join_by { local d=$1; shift; echo -n "$1"; shift; printf "%s" "${@/#/$d}"; } EXTRA_SYSTEM_FEATURES=() -if [[ -n "${CONTENT_ADDRESSED-}" ]]; then +if [[ -n "${NIX_TESTS_CA_BY_DEFAULT-}" ]]; then EXTRA_SYSTEM_FEATURES=("ca-derivations") fi diff --git a/tests/functional/ca/nix-shell.sh b/tests/functional/ca/nix-shell.sh index d1fbe54d19d..7b30b2ac858 100755 --- a/tests/functional/ca/nix-shell.sh +++ b/tests/functional/ca/nix-shell.sh @@ -2,6 +2,6 @@ source common.sh -CONTENT_ADDRESSED=true +NIX_TESTS_CA_BY_DEFAULT=true cd .. source ./nix-shell.sh diff --git a/tests/functional/nix-shell.sh b/tests/functional/nix-shell.sh index b054b7f7519..bc49333b505 100755 --- a/tests/functional/nix-shell.sh +++ b/tests/functional/nix-shell.sh @@ -4,7 +4,7 @@ source common.sh clearStoreIfPossible -if [[ -n ${CONTENT_ADDRESSED:-} ]]; then +if [[ -n ${NIX_TESTS_CA_BY_DEFAULT:-} ]]; then shellDotNix="$PWD/ca-shell.nix" else shellDotNix="$PWD/shell.nix" From f19184191eecaa5e02090ac07260245dffabb472 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 14 Apr 2025 11:15:56 -0400 Subject: [PATCH 349/361] Test derivation options with content-addressing too Now, both the unit and functional tests relating to derivation options are tested both ways -- with input addressing and content-addressing derivations. (cherry picked from commit 307dbe991415404b12992d6bd73bd293f0b743e1) --- .../advanced-attributes-defaults.drv | 1 - ...d-attributes-structured-attrs-defaults.drv | 1 - .../advanced-attributes-structured-attrs.drv | 1 - .../data/derivation/advanced-attributes.drv | 1 - .../ca/advanced-attributes-defaults.drv | 1 + .../ca/advanced-attributes-defaults.json | 25 ++ ...d-attributes-structured-attrs-defaults.drv | 1 + ...-attributes-structured-attrs-defaults.json | 26 ++ .../advanced-attributes-structured-attrs.drv | 1 + .../advanced-attributes-structured-attrs.json | 44 +++ .../derivation/ca/advanced-attributes.drv | 1 + .../derivation/ca/advanced-attributes.json | 50 +++ .../ia/advanced-attributes-defaults.drv | 1 + .../advanced-attributes-defaults.json | 0 ...d-attributes-structured-attrs-defaults.drv | 1 + ...-attributes-structured-attrs-defaults.json | 0 .../advanced-attributes-structured-attrs.drv | 1 + .../advanced-attributes-structured-attrs.json | 0 .../derivation/ia/advanced-attributes.drv | 1 + .../derivation/ia/advanced-attributes.json | 47 +++ .../derivation-advanced-attrs.cc | 333 ++++++++++++++---- src/libstore/derivations.cc | 2 +- .../ca/derivation-advanced-attributes.sh | 6 + tests/functional/ca/meson.build | 3 +- .../derivation-advanced-attributes.sh | 12 +- .../advanced-attributes-defaults.nix | 22 +- ...d-attributes-structured-attrs-defaults.nix | 22 +- .../advanced-attributes-structured-attrs.nix | 23 +- .../derivation/advanced-attributes.nix | 23 +- .../ca/advanced-attributes-defaults.drv | 1 + ...d-attributes-structured-attrs-defaults.drv | 1 + .../advanced-attributes-structured-attrs.drv | 1 + .../derivation/ca/advanced-attributes.drv | 1 + .../{ => ia}/advanced-attributes-defaults.drv | 0 ...d-attributes-structured-attrs-defaults.drv | 0 .../advanced-attributes-structured-attrs.drv | 0 .../{ => ia}/advanced-attributes.drv | 0 37 files changed, 560 insertions(+), 94 deletions(-) delete mode 120000 src/libstore-tests/data/derivation/advanced-attributes-defaults.drv delete mode 120000 src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.drv delete mode 120000 src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.drv delete mode 120000 src/libstore-tests/data/derivation/advanced-attributes.drv create mode 120000 src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.drv create mode 100644 src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json create mode 120000 src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.drv create mode 100644 src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json create mode 120000 src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.drv create mode 100644 src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json create mode 120000 src/libstore-tests/data/derivation/ca/advanced-attributes.drv create mode 100644 src/libstore-tests/data/derivation/ca/advanced-attributes.json create mode 120000 src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.drv rename src/libstore-tests/data/derivation/{ => ia}/advanced-attributes-defaults.json (100%) create mode 120000 src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.drv rename src/libstore-tests/data/derivation/{ => ia}/advanced-attributes-structured-attrs-defaults.json (100%) create mode 120000 src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.drv rename src/libstore-tests/data/derivation/{ => ia}/advanced-attributes-structured-attrs.json (100%) create mode 120000 src/libstore-tests/data/derivation/ia/advanced-attributes.drv create mode 100644 src/libstore-tests/data/derivation/ia/advanced-attributes.json create mode 100755 tests/functional/ca/derivation-advanced-attributes.sh create mode 100644 tests/functional/derivation/ca/advanced-attributes-defaults.drv create mode 100644 tests/functional/derivation/ca/advanced-attributes-structured-attrs-defaults.drv create mode 100644 tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv create mode 100644 tests/functional/derivation/ca/advanced-attributes.drv rename tests/functional/derivation/{ => ia}/advanced-attributes-defaults.drv (100%) rename tests/functional/derivation/{ => ia}/advanced-attributes-structured-attrs-defaults.drv (100%) rename tests/functional/derivation/{ => ia}/advanced-attributes-structured-attrs.drv (100%) rename tests/functional/derivation/{ => ia}/advanced-attributes.drv (100%) diff --git a/src/libstore-tests/data/derivation/advanced-attributes-defaults.drv b/src/libstore-tests/data/derivation/advanced-attributes-defaults.drv deleted file mode 120000 index f8f30ac321c..00000000000 --- a/src/libstore-tests/data/derivation/advanced-attributes-defaults.drv +++ /dev/null @@ -1 +0,0 @@ -../../../../tests/functional/derivation/advanced-attributes-defaults.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.drv b/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.drv deleted file mode 120000 index 837e9a0e437..00000000000 --- a/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.drv +++ /dev/null @@ -1 +0,0 @@ -../../../../tests/functional/derivation/advanced-attributes-structured-attrs-defaults.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.drv b/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.drv deleted file mode 120000 index e08bb573791..00000000000 --- a/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.drv +++ /dev/null @@ -1 +0,0 @@ -../../../../tests/functional/derivation/advanced-attributes-structured-attrs.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/advanced-attributes.drv b/src/libstore-tests/data/derivation/advanced-attributes.drv deleted file mode 120000 index 1dc394a0a4f..00000000000 --- a/src/libstore-tests/data/derivation/advanced-attributes.drv +++ /dev/null @@ -1 +0,0 @@ -../../../../tests/functional/derivation/advanced-attributes.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.drv b/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.drv new file mode 120000 index 00000000000..a9b4f7fa745 --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.drv @@ -0,0 +1 @@ +../../../../../tests/functional/derivation/ca/advanced-attributes-defaults.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json new file mode 100644 index 00000000000..bc67236b54f --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json @@ -0,0 +1,25 @@ +{ + "args": [ + "-c", + "echo hello > $out" + ], + "builder": "/bin/bash", + "env": { + "builder": "/bin/bash", + "name": "advanced-attributes-defaults", + "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9", + "outputHashAlgo": "sha256", + "outputHashMode": "recursive", + "system": "my-system" + }, + "inputDrvs": {}, + "inputSrcs": [], + "name": "advanced-attributes-defaults", + "outputs": { + "out": { + "hashAlgo": "sha256", + "method": "nar" + } + }, + "system": "my-system" +} diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.drv b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.drv new file mode 120000 index 00000000000..61da0470a77 --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.drv @@ -0,0 +1 @@ +../../../../../tests/functional/derivation/ca/advanced-attributes-structured-attrs-defaults.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json new file mode 100644 index 00000000000..7d3c932b213 --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json @@ -0,0 +1,26 @@ +{ + "args": [ + "-c", + "echo hello > $out" + ], + "builder": "/bin/bash", + "env": { + "__json": "{\"builder\":\"/bin/bash\",\"name\":\"advanced-attributes-structured-attrs-defaults\",\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"dev\"],\"system\":\"my-system\"}", + "dev": "/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz", + "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9" + }, + "inputDrvs": {}, + "inputSrcs": [], + "name": "advanced-attributes-structured-attrs-defaults", + "outputs": { + "dev": { + "hashAlgo": "sha256", + "method": "nar" + }, + "out": { + "hashAlgo": "sha256", + "method": "nar" + } + }, + "system": "my-system" +} diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.drv b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.drv new file mode 120000 index 00000000000..c396ee85363 --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.drv @@ -0,0 +1 @@ +../../../../../tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json new file mode 100644 index 00000000000..584fd211385 --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json @@ -0,0 +1,44 @@ +{ + "args": [ + "-c", + "echo hello > $out" + ], + "builder": "/bin/bash", + "env": { + "__json": "{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99\"],\"disallowedRequisites\":[\"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8\"],\"allowedRequisites\":[\"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}", + "bin": "/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m", + "dev": "/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz", + "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9" + }, + "inputDrvs": { + "/nix/store/spfzlnkwb1v8s62yvh8vj1apd1kwjr5f-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "out" + ] + }, + "/nix/store/x1vpzav565aqr7ccmkn0wv0svkm1qrbl-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "out" + ] + } + }, + "inputSrcs": [], + "name": "advanced-attributes-structured-attrs", + "outputs": { + "bin": { + "hashAlgo": "sha256", + "method": "nar" + }, + "dev": { + "hashAlgo": "sha256", + "method": "nar" + }, + "out": { + "hashAlgo": "sha256", + "method": "nar" + } + }, + "system": "my-system" +} diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes.drv b/src/libstore-tests/data/derivation/ca/advanced-attributes.drv new file mode 120000 index 00000000000..acba9064d10 --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes.drv @@ -0,0 +1 @@ +../../../../../tests/functional/derivation/ca/advanced-attributes.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes.json b/src/libstore-tests/data/derivation/ca/advanced-attributes.json new file mode 100644 index 00000000000..69d40b135a6 --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes.json @@ -0,0 +1,50 @@ +{ + "args": [ + "-c", + "echo hello > $out" + ], + "builder": "/bin/bash", + "env": { + "__darwinAllowLocalNetworking": "1", + "__impureHostDeps": "/usr/bin/ditto", + "__noChroot": "1", + "__sandboxProfile": "sandcastle", + "allowSubstitutes": "", + "allowedReferences": "/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8", + "allowedRequisites": "/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8", + "builder": "/bin/bash", + "disallowedReferences": "/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99", + "disallowedRequisites": "/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99", + "impureEnvVars": "UNICORN", + "name": "advanced-attributes", + "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9", + "outputHashAlgo": "sha256", + "outputHashMode": "recursive", + "preferLocalBuild": "1", + "requiredSystemFeatures": "rainbow uid-range", + "system": "my-system" + }, + "inputDrvs": { + "/nix/store/spfzlnkwb1v8s62yvh8vj1apd1kwjr5f-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "out" + ] + }, + "/nix/store/x1vpzav565aqr7ccmkn0wv0svkm1qrbl-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "out" + ] + } + }, + "inputSrcs": [], + "name": "advanced-attributes", + "outputs": { + "out": { + "hashAlgo": "sha256", + "method": "nar" + } + }, + "system": "my-system" +} diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.drv b/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.drv new file mode 120000 index 00000000000..7f1aa367ed2 --- /dev/null +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.drv @@ -0,0 +1 @@ +../../../../../tests/functional/derivation/ia/advanced-attributes-defaults.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/advanced-attributes-defaults.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json similarity index 100% rename from src/libstore-tests/data/derivation/advanced-attributes-defaults.json rename to src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.drv b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.drv new file mode 120000 index 00000000000..77aa67353a3 --- /dev/null +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.drv @@ -0,0 +1 @@ +../../../../../tests/functional/derivation/ia/advanced-attributes-structured-attrs-defaults.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json similarity index 100% rename from src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.json rename to src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.drv b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.drv new file mode 120000 index 00000000000..a4e25feba34 --- /dev/null +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.drv @@ -0,0 +1 @@ +../../../../../tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json similarity index 100% rename from src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.json rename to src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes.drv b/src/libstore-tests/data/derivation/ia/advanced-attributes.drv new file mode 120000 index 00000000000..ecc2f5f3822 --- /dev/null +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes.drv @@ -0,0 +1 @@ +../../../../../tests/functional/derivation/ia/advanced-attributes.drv \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes.json b/src/libstore-tests/data/derivation/ia/advanced-attributes.json new file mode 100644 index 00000000000..d51524e2056 --- /dev/null +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes.json @@ -0,0 +1,47 @@ +{ + "args": [ + "-c", + "echo hello > $out" + ], + "builder": "/bin/bash", + "env": { + "__darwinAllowLocalNetworking": "1", + "__impureHostDeps": "/usr/bin/ditto", + "__noChroot": "1", + "__sandboxProfile": "sandcastle", + "allowSubstitutes": "", + "allowedReferences": "/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo", + "allowedRequisites": "/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo", + "builder": "/bin/bash", + "disallowedReferences": "/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar", + "disallowedRequisites": "/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar", + "impureEnvVars": "UNICORN", + "name": "advanced-attributes", + "out": "/nix/store/33a6fdmn8q9ih9d7npbnrxn2q56a4l8q-advanced-attributes", + "preferLocalBuild": "1", + "requiredSystemFeatures": "rainbow uid-range", + "system": "my-system" + }, + "inputDrvs": { + "/nix/store/4xm4wccqsvagz9gjksn24s7rip2fdy7v-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "out" + ] + }, + "/nix/store/plsq5jbr5nhgqwcgb2qxw7jchc09dnl8-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "out" + ] + } + }, + "inputSrcs": [], + "name": "advanced-attributes", + "outputs": { + "out": { + "path": "/nix/store/33a6fdmn8q9ih9d7npbnrxn2q56a4l8q-advanced-attributes" + } + }, + "system": "my-system" +} diff --git a/src/libstore-tests/derivation-advanced-attrs.cc b/src/libstore-tests/derivation-advanced-attrs.cc index 57b2268262f..e135b8106d2 100644 --- a/src/libstore-tests/derivation-advanced-attrs.cc +++ b/src/libstore-tests/derivation-advanced-attrs.cc @@ -18,68 +18,93 @@ using nlohmann::json; class DerivationAdvancedAttrsTest : public CharacterizationTest, public LibStoreTest { - std::filesystem::path unitTestData = getUnitTestData() / "derivation"; +protected: + std::filesystem::path unitTestData = getUnitTestData() / "derivation" / "ia"; public: std::filesystem::path goldenMaster(std::string_view testStem) const override { return unitTestData / testStem; } + + /** + * We set these in tests rather than the regular globals so we don't have + * to worry about race conditions if the tests run concurrently. + */ + ExperimentalFeatureSettings mockXpSettings; +}; + +class CaDerivationAdvancedAttrsTest : public DerivationAdvancedAttrsTest +{ + void SetUp() override + { + unitTestData = getUnitTestData() / "derivation" / "ca"; + mockXpSettings.set("experimental-features", "ca-derivations"); + } }; -#define TEST_ATERM_JSON(STEM, NAME) \ - TEST_F(DerivationAdvancedAttrsTest, Derivation_##STEM##_from_json) \ - { \ - readTest(NAME ".json", [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - /* Use DRV file instead of C++ literal as source of truth. */ \ - auto aterm = readFile(goldenMaster(NAME ".drv")); \ - auto expected = parseDerivation(*store, std::move(aterm), NAME); \ - Derivation got = Derivation::fromJSON(*store, encoded); \ - EXPECT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(DerivationAdvancedAttrsTest, Derivation_##STEM##_to_json) \ - { \ - writeTest( \ - NAME ".json", \ - [&]() -> json { \ - /* Use DRV file instead of C++ literal as source of truth. */ \ - auto aterm = readFile(goldenMaster(NAME ".drv")); \ - return parseDerivation(*store, std::move(aterm), NAME).toJSON(*store); \ - }, \ - [](const auto & file) { return json::parse(readFile(file)); }, \ - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ - } \ - \ - TEST_F(DerivationAdvancedAttrsTest, Derivation_##STEM##_from_aterm) \ - { \ - readTest(NAME ".drv", [&](auto encoded) { \ - /* Use JSON file instead of C++ literal as source of truth. */ \ - auto json = json::parse(readFile(goldenMaster(NAME ".json"))); \ - auto expected = Derivation::fromJSON(*store, json); \ - auto got = parseDerivation(*store, std::move(encoded), NAME); \ - EXPECT_EQ(got.toJSON(*store), expected.toJSON(*store)); \ - EXPECT_EQ(got, expected); \ - }); \ - } \ - \ +template +class DerivationAdvancedAttrsBothTest : public Fixture +{}; + +using BothFixtures = ::testing::Types; + +TYPED_TEST_SUITE(DerivationAdvancedAttrsBothTest, BothFixtures); + +#define TEST_ATERM_JSON(STEM, NAME) \ + TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_from_json) \ + { \ + this->readTest(NAME ".json", [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + /* Use DRV file instead of C++ literal as source of truth. */ \ + auto aterm = readFile(this->goldenMaster(NAME ".drv")); \ + auto expected = parseDerivation(*this->store, std::move(aterm), NAME, this->mockXpSettings); \ + Derivation got = Derivation::fromJSON(*this->store, encoded, this->mockXpSettings); \ + EXPECT_EQ(got, expected); \ + }); \ + } \ + \ + TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_to_json) \ + { \ + this->writeTest( \ + NAME ".json", \ + [&]() -> json { \ + /* Use DRV file instead of C++ literal as source of truth. */ \ + auto aterm = readFile(this->goldenMaster(NAME ".drv")); \ + return parseDerivation(*this->store, std::move(aterm), NAME, this->mockXpSettings) \ + .toJSON(*this->store); \ + }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ + } \ + \ + TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_from_aterm) \ + { \ + this->readTest(NAME ".drv", [&](auto encoded) { \ + /* Use JSON file instead of C++ literal as source of truth. */ \ + auto json = json::parse(readFile(this->goldenMaster(NAME ".json"))); \ + auto expected = Derivation::fromJSON(*this->store, json, this->mockXpSettings); \ + auto got = parseDerivation(*this->store, std::move(encoded), NAME, this->mockXpSettings); \ + EXPECT_EQ(got.toJSON(*this->store), expected.toJSON(*this->store)); \ + EXPECT_EQ(got, expected); \ + }); \ + } \ + \ /* No corresponding write test, because we need to read the drv to write the json file */ -TEST_ATERM_JSON(advancedAttributes_defaults, "advanced-attributes-defaults"); TEST_ATERM_JSON(advancedAttributes, "advanced-attributes-defaults"); -TEST_ATERM_JSON(advancedAttributes_structuredAttrs_defaults, "advanced-attributes-structured-attrs"); +TEST_ATERM_JSON(advancedAttributes_defaults, "advanced-attributes"); TEST_ATERM_JSON(advancedAttributes_structuredAttrs, "advanced-attributes-structured-attrs-defaults"); +TEST_ATERM_JSON(advancedAttributes_structuredAttrs_defaults, "advanced-attributes-structured-attrs"); #undef TEST_ATERM_JSON -TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes_defaults) +TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_defaults) { - readTest("advanced-attributes-defaults.drv", [&](auto encoded) { - auto got = parseDerivation(*store, std::move(encoded), "foo"); + this->readTest("advanced-attributes-defaults.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - auto drvPath = writeDerivation(*store, got, NoRepair, true); + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); ParsedDerivation parsedDrv(drvPath, got); DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); @@ -101,25 +126,50 @@ TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes_defaults) EXPECT_EQ(checksForAllOutputs.disallowedReferences, StringSet{}); EXPECT_EQ(checksForAllOutputs.disallowedRequisites, StringSet{}); } - EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet()); - EXPECT_EQ(options.canBuildLocally(*store, got), false); - EXPECT_EQ(options.willBuildLocally(*store, got), false); + EXPECT_EQ(options.canBuildLocally(*this->store, got), false); + EXPECT_EQ(options.willBuildLocally(*this->store, got), false); EXPECT_EQ(options.substitutesAllowed(), true); EXPECT_EQ(options.useUidRange(got), false); }); }; -TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes) +TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_defaults) { - readTest("advanced-attributes.drv", [&](auto encoded) { - auto got = parseDerivation(*store, std::move(encoded), "foo"); + this->readTest("advanced-attributes-defaults.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - auto drvPath = writeDerivation(*store, got, NoRepair, true); + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); ParsedDerivation parsedDrv(drvPath, got); DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); - StringSet systemFeatures{"rainbow", "uid-range"}; + EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{}); + }); +}; + +TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_defaults) +{ + this->readTest("advanced-attributes-defaults.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); + + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); + + ParsedDerivation parsedDrv(drvPath, got); + DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); + + EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{"ca-derivations"}); + }); +}; + +TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes) +{ + this->readTest("advanced-attributes.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); + + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); + + ParsedDerivation parsedDrv(drvPath, got); + DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); EXPECT_TRUE(!parsedDrv.hasStructuredAttrs()); @@ -128,6 +178,23 @@ TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes) EXPECT_EQ(options.impureHostDeps, StringSet{"/usr/bin/ditto"}); EXPECT_EQ(options.impureEnvVars, StringSet{"UNICORN"}); EXPECT_EQ(options.allowLocalNetworking, true); + EXPECT_EQ(options.canBuildLocally(*this->store, got), false); + EXPECT_EQ(options.willBuildLocally(*this->store, got), false); + EXPECT_EQ(options.substitutesAllowed(), false); + EXPECT_EQ(options.useUidRange(got), true); + }); +}; + +TEST_F(DerivationAdvancedAttrsTest, advancedAttributes) +{ + this->readTest("advanced-attributes.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); + + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); + + ParsedDerivation parsedDrv(drvPath, got); + DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); + { auto * checksForAllOutputs_ = std::get_if<0>(&options.outputChecks); ASSERT_TRUE(checksForAllOutputs_ != nullptr); @@ -142,20 +209,55 @@ TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes) EXPECT_EQ( checksForAllOutputs.disallowedRequisites, StringSet{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"}); } + + StringSet systemFeatures{"rainbow", "uid-range"}; + + EXPECT_EQ(options.getRequiredSystemFeatures(got), systemFeatures); + }); +}; + +TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes) +{ + this->readTest("advanced-attributes.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); + + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); + + ParsedDerivation parsedDrv(drvPath, got); + DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); + + { + auto * checksForAllOutputs_ = std::get_if<0>(&options.outputChecks); + ASSERT_TRUE(checksForAllOutputs_ != nullptr); + auto & checksForAllOutputs = *checksForAllOutputs_; + + EXPECT_EQ( + checksForAllOutputs.allowedReferences, + StringSet{"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"}); + EXPECT_EQ( + checksForAllOutputs.allowedRequisites, + StringSet{"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"}); + EXPECT_EQ( + checksForAllOutputs.disallowedReferences, + StringSet{"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"}); + EXPECT_EQ( + checksForAllOutputs.disallowedRequisites, + StringSet{"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"}); + } + + StringSet systemFeatures{"rainbow", "uid-range"}; + systemFeatures.insert("ca-derivations"); + EXPECT_EQ(options.getRequiredSystemFeatures(got), systemFeatures); - EXPECT_EQ(options.canBuildLocally(*store, got), false); - EXPECT_EQ(options.willBuildLocally(*store, got), false); - EXPECT_EQ(options.substitutesAllowed(), false); - EXPECT_EQ(options.useUidRange(got), true); }); }; -TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes_structuredAttrs_defaults) +TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_structuredAttrs_defaults) { - readTest("advanced-attributes-structured-attrs-defaults.drv", [&](auto encoded) { - auto got = parseDerivation(*store, std::move(encoded), "foo"); + this->readTest("advanced-attributes-structured-attrs-defaults.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - auto drvPath = writeDerivation(*store, got, NoRepair, true); + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); ParsedDerivation parsedDrv(drvPath, got); DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); @@ -176,25 +278,50 @@ TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes_structuredAttr EXPECT_EQ(checksPerOutput.size(), 0); } - EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet()); - EXPECT_EQ(options.canBuildLocally(*store, got), false); - EXPECT_EQ(options.willBuildLocally(*store, got), false); + EXPECT_EQ(options.canBuildLocally(*this->store, got), false); + EXPECT_EQ(options.willBuildLocally(*this->store, got), false); EXPECT_EQ(options.substitutesAllowed(), true); EXPECT_EQ(options.useUidRange(got), false); }); }; -TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes_structuredAttrs) +TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs_defaults) { - readTest("advanced-attributes-structured-attrs.drv", [&](auto encoded) { - auto got = parseDerivation(*store, std::move(encoded), "foo"); + this->readTest("advanced-attributes-structured-attrs-defaults.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - auto drvPath = writeDerivation(*store, got, NoRepair, true); + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); ParsedDerivation parsedDrv(drvPath, got); DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); - StringSet systemFeatures{"rainbow", "uid-range"}; + EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{}); + }); +}; + +TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs_defaults) +{ + this->readTest("advanced-attributes-structured-attrs-defaults.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); + + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); + + ParsedDerivation parsedDrv(drvPath, got); + DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); + + EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{"ca-derivations"}); + }); +}; + +TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_structuredAttrs) +{ + this->readTest("advanced-attributes-structured-attrs.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); + + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); + + ParsedDerivation parsedDrv(drvPath, got); + DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); EXPECT_TRUE(parsedDrv.hasStructuredAttrs()); @@ -204,6 +331,32 @@ TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes_structuredAttr EXPECT_EQ(options.impureEnvVars, StringSet{"UNICORN"}); EXPECT_EQ(options.allowLocalNetworking, true); + { + auto output_ = get(std::get<1>(options.outputChecks), "dev"); + ASSERT_TRUE(output_); + auto & output = *output_; + + EXPECT_EQ(output.maxSize, 789); + EXPECT_EQ(output.maxClosureSize, 5909); + } + + EXPECT_EQ(options.canBuildLocally(*this->store, got), false); + EXPECT_EQ(options.willBuildLocally(*this->store, got), false); + EXPECT_EQ(options.substitutesAllowed(), false); + EXPECT_EQ(options.useUidRange(got), true); + }); +}; + +TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) +{ + this->readTest("advanced-attributes-structured-attrs.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); + + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); + + ParsedDerivation parsedDrv(drvPath, got); + DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); + { { auto output_ = get(std::get<1>(options.outputChecks), "out"); @@ -222,22 +375,50 @@ TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes_structuredAttr EXPECT_EQ(output.disallowedReferences, StringSet{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"}); EXPECT_EQ(output.disallowedRequisites, StringSet{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"}); } + } + + StringSet systemFeatures{"rainbow", "uid-range"}; + EXPECT_EQ(options.getRequiredSystemFeatures(got), systemFeatures); + }); +}; + +TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) +{ + this->readTest("advanced-attributes-structured-attrs.drv", [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); + + auto drvPath = writeDerivation(*this->store, got, NoRepair, true); + + ParsedDerivation parsedDrv(drvPath, got); + DerivationOptions options = DerivationOptions::fromParsedDerivation(parsedDrv); + + { { - auto output_ = get(std::get<1>(options.outputChecks), "dev"); + auto output_ = get(std::get<1>(options.outputChecks), "out"); ASSERT_TRUE(output_); auto & output = *output_; - EXPECT_EQ(output.maxSize, 789); - EXPECT_EQ(output.maxClosureSize, 5909); + EXPECT_EQ(output.allowedReferences, StringSet{"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"}); + EXPECT_EQ(output.allowedRequisites, StringSet{"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"}); + } + + { + auto output_ = get(std::get<1>(options.outputChecks), "bin"); + ASSERT_TRUE(output_); + auto & output = *output_; + + EXPECT_EQ( + output.disallowedReferences, StringSet{"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"}); + EXPECT_EQ( + output.disallowedRequisites, StringSet{"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"}); } } + StringSet systemFeatures{"rainbow", "uid-range"}; + systemFeatures.insert("ca-derivations"); + EXPECT_EQ(options.getRequiredSystemFeatures(got), systemFeatures); - EXPECT_EQ(options.canBuildLocally(*store, got), false); - EXPECT_EQ(options.willBuildLocally(*store, got), false); - EXPECT_EQ(options.substitutesAllowed(), false); - EXPECT_EQ(options.useUidRange(got), true); }); }; diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 360d19afee2..fdfdc37b41f 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -1368,7 +1368,7 @@ Derivation Derivation::fromJSON( for (auto & [outputName, output] : getObject(valueAt(json, "outputs"))) { res.outputs.insert_or_assign( outputName, - DerivationOutput::fromJSON(store, res.name, outputName, output)); + DerivationOutput::fromJSON(store, res.name, outputName, output, xpSettings)); } } catch (Error & e) { e.addTrace({}, "while reading key 'outputs'"); diff --git a/tests/functional/ca/derivation-advanced-attributes.sh b/tests/functional/ca/derivation-advanced-attributes.sh new file mode 100755 index 00000000000..b70463e5c48 --- /dev/null +++ b/tests/functional/ca/derivation-advanced-attributes.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +export NIX_TESTS_CA_BY_DEFAULT=1 + +cd .. +source derivation-advanced-attributes.sh diff --git a/tests/functional/ca/meson.build b/tests/functional/ca/meson.build index 7a7fcc5cf6f..a4611ca4200 100644 --- a/tests/functional/ca/meson.build +++ b/tests/functional/ca/meson.build @@ -8,10 +8,11 @@ suites += { 'name': 'ca', 'deps': [], 'tests': [ + 'build-cache.sh', 'build-with-garbage-path.sh', 'build.sh', - 'build-cache.sh', 'concurrent-builds.sh', + 'derivation-advanced-attributes.sh', 'derivation-json.sh', 'duplicate-realisation-in-closure.sh', 'eval-store.sh', diff --git a/tests/functional/derivation-advanced-attributes.sh b/tests/functional/derivation-advanced-attributes.sh index 6707b345cc3..a7530e11c67 100755 --- a/tests/functional/derivation-advanced-attributes.sh +++ b/tests/functional/derivation-advanced-attributes.sh @@ -12,11 +12,19 @@ badExitCode=0 store="$TEST_ROOT/store" +if [[ -z "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then + drvDir=ia + flags=(--arg contentAddress false) +else + drvDir=ca + flags=(--arg contentAddress true --extra-experimental-features ca-derivations) +fi + for nixFile in derivation/*.nix; do - drvPath=$(env -u NIX_STORE nix-instantiate --store "$store" --pure-eval --expr "$(< "$nixFile")") + drvPath=$(env -u NIX_STORE nix-instantiate --store "$store" --pure-eval "${flags[@]}" --expr "$(< "$nixFile")") testName=$(basename "$nixFile" .nix) got="${store}${drvPath}" - expected="derivation/$testName.drv" + expected="derivation/${drvDir}/${testName}.drv" diffAndAcceptInner "$testName" "$got" "$expected" done diff --git a/tests/functional/derivation/advanced-attributes-defaults.nix b/tests/functional/derivation/advanced-attributes-defaults.nix index d466003b00d..51f359cf042 100644 --- a/tests/functional/derivation/advanced-attributes-defaults.nix +++ b/tests/functional/derivation/advanced-attributes-defaults.nix @@ -1,6 +1,24 @@ -derivation { - name = "advanced-attributes-defaults"; +{ contentAddress }: + +let + caArgs = + if contentAddress then + { + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + } + else + { }; + + derivation' = args: derivation (caArgs // args); + system = "my-system"; + +in +derivation' { + inherit system; + name = "advanced-attributes-defaults"; builder = "/bin/bash"; args = [ "-c" diff --git a/tests/functional/derivation/advanced-attributes-structured-attrs-defaults.nix b/tests/functional/derivation/advanced-attributes-structured-attrs-defaults.nix index 3c6ad4900d6..ec51f0e288f 100644 --- a/tests/functional/derivation/advanced-attributes-structured-attrs-defaults.nix +++ b/tests/functional/derivation/advanced-attributes-structured-attrs-defaults.nix @@ -1,6 +1,24 @@ -derivation { - name = "advanced-attributes-structured-attrs-defaults"; +{ contentAddress }: + +let + caArgs = + if contentAddress then + { + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + } + else + { }; + + derivation' = args: derivation (caArgs // args); + system = "my-system"; + +in +derivation' { + inherit system; + name = "advanced-attributes-structured-attrs-defaults"; builder = "/bin/bash"; args = [ "-c" diff --git a/tests/functional/derivation/advanced-attributes-structured-attrs.nix b/tests/functional/derivation/advanced-attributes-structured-attrs.nix index 4c596be45e9..b789cdaa720 100644 --- a/tests/functional/derivation/advanced-attributes-structured-attrs.nix +++ b/tests/functional/derivation/advanced-attributes-structured-attrs.nix @@ -1,6 +1,21 @@ +{ contentAddress }: + let + caArgs = + if contentAddress then + { + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + } + else + { }; + + derivation' = args: derivation (caArgs // args); + system = "my-system"; - foo = derivation { + + foo = derivation' { inherit system; name = "foo"; builder = "/bin/bash"; @@ -9,7 +24,8 @@ let "echo foo > $out" ]; }; - bar = derivation { + + bar = derivation' { inherit system; name = "bar"; builder = "/bin/bash"; @@ -18,8 +34,9 @@ let "echo bar > $out" ]; }; + in -derivation { +derivation' { inherit system; name = "advanced-attributes-structured-attrs"; builder = "/bin/bash"; diff --git a/tests/functional/derivation/advanced-attributes.nix b/tests/functional/derivation/advanced-attributes.nix index 7f365ce65e2..52786783faa 100644 --- a/tests/functional/derivation/advanced-attributes.nix +++ b/tests/functional/derivation/advanced-attributes.nix @@ -1,6 +1,21 @@ +{ contentAddress }: + let + caArgs = + if contentAddress then + { + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + } + else + { }; + + derivation' = args: derivation (caArgs // args); + system = "my-system"; - foo = derivation { + + foo = derivation' { inherit system; name = "foo"; builder = "/bin/bash"; @@ -9,7 +24,8 @@ let "echo foo > $out" ]; }; - bar = derivation { + + bar = derivation' { inherit system; name = "bar"; builder = "/bin/bash"; @@ -18,8 +34,9 @@ let "echo bar > $out" ]; }; + in -derivation { +derivation' { inherit system; name = "advanced-attributes"; builder = "/bin/bash"; diff --git a/tests/functional/derivation/ca/advanced-attributes-defaults.drv b/tests/functional/derivation/ca/advanced-attributes-defaults.drv new file mode 100644 index 00000000000..2c81609639b --- /dev/null +++ b/tests/functional/derivation/ca/advanced-attributes-defaults.drv @@ -0,0 +1 @@ +Derive([("out","","r:sha256","")],[],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("builder","/bin/bash"),("name","advanced-attributes-defaults"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("system","my-system")]) \ No newline at end of file diff --git a/tests/functional/derivation/ca/advanced-attributes-structured-attrs-defaults.drv b/tests/functional/derivation/ca/advanced-attributes-structured-attrs-defaults.drv new file mode 100644 index 00000000000..bf56e05d600 --- /dev/null +++ b/tests/functional/derivation/ca/advanced-attributes-structured-attrs-defaults.drv @@ -0,0 +1 @@ +Derive([("dev","","r:sha256",""),("out","","r:sha256","")],[],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"builder\":\"/bin/bash\",\"name\":\"advanced-attributes-structured-attrs-defaults\",\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"dev\"],\"system\":\"my-system\"}"),("dev","/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")]) \ No newline at end of file diff --git a/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv b/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv new file mode 100644 index 00000000000..307beb53e62 --- /dev/null +++ b/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv @@ -0,0 +1 @@ +Derive([("bin","","r:sha256",""),("dev","","r:sha256",""),("out","","r:sha256","")],[("/nix/store/spfzlnkwb1v8s62yvh8vj1apd1kwjr5f-foo.drv",["out"]),("/nix/store/x1vpzav565aqr7ccmkn0wv0svkm1qrbl-bar.drv",["out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99\"],\"disallowedRequisites\":[\"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8\"],\"allowedRequisites\":[\"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m"),("dev","/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")]) \ No newline at end of file diff --git a/tests/functional/derivation/ca/advanced-attributes.drv b/tests/functional/derivation/ca/advanced-attributes.drv new file mode 100644 index 00000000000..343f895ca7a --- /dev/null +++ b/tests/functional/derivation/ca/advanced-attributes.drv @@ -0,0 +1 @@ +Derive([("out","","r:sha256","")],[("/nix/store/spfzlnkwb1v8s62yvh8vj1apd1kwjr5f-foo.drv",["out"]),("/nix/store/x1vpzav565aqr7ccmkn0wv0svkm1qrbl-bar.drv",["out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"),("allowedRequisites","/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"),("builder","/bin/bash"),("disallowedReferences","/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"),("disallowedRequisites","/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file diff --git a/tests/functional/derivation/advanced-attributes-defaults.drv b/tests/functional/derivation/ia/advanced-attributes-defaults.drv similarity index 100% rename from tests/functional/derivation/advanced-attributes-defaults.drv rename to tests/functional/derivation/ia/advanced-attributes-defaults.drv diff --git a/tests/functional/derivation/advanced-attributes-structured-attrs-defaults.drv b/tests/functional/derivation/ia/advanced-attributes-structured-attrs-defaults.drv similarity index 100% rename from tests/functional/derivation/advanced-attributes-structured-attrs-defaults.drv rename to tests/functional/derivation/ia/advanced-attributes-structured-attrs-defaults.drv diff --git a/tests/functional/derivation/advanced-attributes-structured-attrs.drv b/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv similarity index 100% rename from tests/functional/derivation/advanced-attributes-structured-attrs.drv rename to tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv diff --git a/tests/functional/derivation/advanced-attributes.drv b/tests/functional/derivation/ia/advanced-attributes.drv similarity index 100% rename from tests/functional/derivation/advanced-attributes.drv rename to tests/functional/derivation/ia/advanced-attributes.drv From 30d900b313b9dad3b78ec05d07368c8e83811dc5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 15 Apr 2025 11:53:17 -0400 Subject: [PATCH 350/361] Derivation "advanced attrs" test: Ensure fields are set to distinct values We had fields set to the same values before in our test data. This is not a problem per-se, but does mean we wouldn't catch certain mixups. Now, the fields are set to distinct values (where possible), which makes the test more robust. (cherry picked from commit a0b2b75f59496ff4e199dd28eb932f181659c1f0) --- .../advanced-attributes-structured-attrs.json | 8 +++-- .../derivation/ca/advanced-attributes.json | 14 ++++---- .../advanced-attributes-structured-attrs.json | 20 ++++++----- .../derivation/ia/advanced-attributes.json | 18 +++++----- .../derivation-advanced-attrs.cc | 35 ++++++++++--------- .../advanced-attributes-structured-attrs.nix | 12 +++++-- .../derivation/advanced-attributes.nix | 12 +++++-- .../advanced-attributes-structured-attrs.drv | 2 +- .../derivation/ca/advanced-attributes.drv | 2 +- .../advanced-attributes-structured-attrs.drv | 2 +- .../derivation/ia/advanced-attributes.drv | 2 +- 11 files changed, 77 insertions(+), 50 deletions(-) diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json index 584fd211385..f6cdc1f1602 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json @@ -5,21 +5,23 @@ ], "builder": "/bin/bash", "env": { - "__json": "{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99\"],\"disallowedRequisites\":[\"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8\"],\"allowedRequisites\":[\"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}", + "__json": "{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g\"],\"disallowedRequisites\":[\"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"allowedRequisites\":[\"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}", "bin": "/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m", "dev": "/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz", "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9" }, "inputDrvs": { - "/nix/store/spfzlnkwb1v8s62yvh8vj1apd1kwjr5f-foo.drv": { + "/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { "dynamicOutputs": {}, "outputs": [ + "dev", "out" ] }, - "/nix/store/x1vpzav565aqr7ccmkn0wv0svkm1qrbl-bar.drv": { + "/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { "dynamicOutputs": {}, "outputs": [ + "dev", "out" ] } diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes.json b/src/libstore-tests/data/derivation/ca/advanced-attributes.json index 69d40b135a6..2105c6256c0 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes.json @@ -10,11 +10,11 @@ "__noChroot": "1", "__sandboxProfile": "sandcastle", "allowSubstitutes": "", - "allowedReferences": "/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8", - "allowedRequisites": "/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8", + "allowedReferences": "/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9", + "allowedRequisites": "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z", "builder": "/bin/bash", - "disallowedReferences": "/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99", - "disallowedRequisites": "/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99", + "disallowedReferences": "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g", + "disallowedRequisites": "/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8", "impureEnvVars": "UNICORN", "name": "advanced-attributes", "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9", @@ -25,15 +25,17 @@ "system": "my-system" }, "inputDrvs": { - "/nix/store/spfzlnkwb1v8s62yvh8vj1apd1kwjr5f-foo.drv": { + "/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { "dynamicOutputs": {}, "outputs": [ + "dev", "out" ] }, - "/nix/store/x1vpzav565aqr7ccmkn0wv0svkm1qrbl-bar.drv": { + "/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { "dynamicOutputs": {}, "outputs": [ + "dev", "out" ] } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json index 32442812467..b45a0d62453 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json @@ -5,21 +5,23 @@ ], "builder": "/bin/bash", "env": { - "__json": "{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar\"],\"disallowedRequisites\":[\"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo\"],\"allowedRequisites\":[\"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}", - "bin": "/nix/store/pbzb48v0ycf80jgligcp4n8z0rblna4n-advanced-attributes-structured-attrs-bin", - "dev": "/nix/store/7xapi8jv7flcz1qq8jhw55ar8ag8hldh-advanced-attributes-structured-attrs-dev", - "out": "/nix/store/mpq3l1l1qc2yr50q520g08kprprwv79f-advanced-attributes-structured-attrs" + "__json": "{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar\"],\"disallowedRequisites\":[\"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"allowedRequisites\":[\"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}", + "bin": "/nix/store/qjjj3zrlimpjbkk686m052b3ks9iz2sl-advanced-attributes-structured-attrs-bin", + "dev": "/nix/store/lpz5grl48v93pdadavyg5is1rqvfdipf-advanced-attributes-structured-attrs-dev", + "out": "/nix/store/nzvz1bmh1g89a5dkpqcqan0av7q3hgv3-advanced-attributes-structured-attrs" }, "inputDrvs": { - "/nix/store/4xm4wccqsvagz9gjksn24s7rip2fdy7v-foo.drv": { + "/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { "dynamicOutputs": {}, "outputs": [ + "dev", "out" ] }, - "/nix/store/plsq5jbr5nhgqwcgb2qxw7jchc09dnl8-bar.drv": { + "/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { "dynamicOutputs": {}, "outputs": [ + "dev", "out" ] } @@ -28,13 +30,13 @@ "name": "advanced-attributes-structured-attrs", "outputs": { "bin": { - "path": "/nix/store/pbzb48v0ycf80jgligcp4n8z0rblna4n-advanced-attributes-structured-attrs-bin" + "path": "/nix/store/qjjj3zrlimpjbkk686m052b3ks9iz2sl-advanced-attributes-structured-attrs-bin" }, "dev": { - "path": "/nix/store/7xapi8jv7flcz1qq8jhw55ar8ag8hldh-advanced-attributes-structured-attrs-dev" + "path": "/nix/store/lpz5grl48v93pdadavyg5is1rqvfdipf-advanced-attributes-structured-attrs-dev" }, "out": { - "path": "/nix/store/mpq3l1l1qc2yr50q520g08kprprwv79f-advanced-attributes-structured-attrs" + "path": "/nix/store/nzvz1bmh1g89a5dkpqcqan0av7q3hgv3-advanced-attributes-structured-attrs" } }, "system": "my-system" diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes.json b/src/libstore-tests/data/derivation/ia/advanced-attributes.json index d51524e2056..1eb8de86e7c 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes.json @@ -10,28 +10,30 @@ "__noChroot": "1", "__sandboxProfile": "sandcastle", "allowSubstitutes": "", - "allowedReferences": "/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo", - "allowedRequisites": "/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo", + "allowedReferences": "/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo", + "allowedRequisites": "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev", "builder": "/bin/bash", - "disallowedReferences": "/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar", - "disallowedRequisites": "/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar", + "disallowedReferences": "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar", + "disallowedRequisites": "/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev", "impureEnvVars": "UNICORN", "name": "advanced-attributes", - "out": "/nix/store/33a6fdmn8q9ih9d7npbnrxn2q56a4l8q-advanced-attributes", + "out": "/nix/store/swkj0mrq0cq3dfli95v4am0427mi2hxf-advanced-attributes", "preferLocalBuild": "1", "requiredSystemFeatures": "rainbow uid-range", "system": "my-system" }, "inputDrvs": { - "/nix/store/4xm4wccqsvagz9gjksn24s7rip2fdy7v-foo.drv": { + "/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { "dynamicOutputs": {}, "outputs": [ + "dev", "out" ] }, - "/nix/store/plsq5jbr5nhgqwcgb2qxw7jchc09dnl8-bar.drv": { + "/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { "dynamicOutputs": {}, "outputs": [ + "dev", "out" ] } @@ -40,7 +42,7 @@ "name": "advanced-attributes", "outputs": { "out": { - "path": "/nix/store/33a6fdmn8q9ih9d7npbnrxn2q56a4l8q-advanced-attributes" + "path": "/nix/store/swkj0mrq0cq3dfli95v4am0427mi2hxf-advanced-attributes" } }, "system": "my-system" diff --git a/src/libstore-tests/derivation-advanced-attrs.cc b/src/libstore-tests/derivation-advanced-attrs.cc index e135b8106d2..f82cea026b6 100644 --- a/src/libstore-tests/derivation-advanced-attrs.cc +++ b/src/libstore-tests/derivation-advanced-attrs.cc @@ -201,13 +201,15 @@ TEST_F(DerivationAdvancedAttrsTest, advancedAttributes) auto & checksForAllOutputs = *checksForAllOutputs_; EXPECT_EQ( - checksForAllOutputs.allowedReferences, StringSet{"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"}); + checksForAllOutputs.allowedReferences, StringSet{"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"}); EXPECT_EQ( - checksForAllOutputs.allowedRequisites, StringSet{"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"}); + checksForAllOutputs.allowedRequisites, + StringSet{"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"}); EXPECT_EQ( - checksForAllOutputs.disallowedReferences, StringSet{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"}); + checksForAllOutputs.disallowedReferences, StringSet{"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"}); EXPECT_EQ( - checksForAllOutputs.disallowedRequisites, StringSet{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"}); + checksForAllOutputs.disallowedRequisites, + StringSet{"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"}); } StringSet systemFeatures{"rainbow", "uid-range"}; @@ -233,16 +235,16 @@ TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes) EXPECT_EQ( checksForAllOutputs.allowedReferences, - StringSet{"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"}); + StringSet{"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"}); EXPECT_EQ( checksForAllOutputs.allowedRequisites, - StringSet{"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"}); + StringSet{"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"}); EXPECT_EQ( checksForAllOutputs.disallowedReferences, - StringSet{"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"}); + StringSet{"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"}); EXPECT_EQ( checksForAllOutputs.disallowedRequisites, - StringSet{"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"}); + StringSet{"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"}); } StringSet systemFeatures{"rainbow", "uid-range"}; @@ -363,8 +365,8 @@ TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) ASSERT_TRUE(output_); auto & output = *output_; - EXPECT_EQ(output.allowedReferences, StringSet{"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"}); - EXPECT_EQ(output.allowedRequisites, StringSet{"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"}); + EXPECT_EQ(output.allowedReferences, StringSet{"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"}); + EXPECT_EQ(output.allowedRequisites, StringSet{"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"}); } { @@ -372,8 +374,9 @@ TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) ASSERT_TRUE(output_); auto & output = *output_; - EXPECT_EQ(output.disallowedReferences, StringSet{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"}); - EXPECT_EQ(output.disallowedRequisites, StringSet{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"}); + EXPECT_EQ(output.disallowedReferences, StringSet{"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"}); + EXPECT_EQ( + output.disallowedRequisites, StringSet{"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"}); } } @@ -399,8 +402,8 @@ TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) ASSERT_TRUE(output_); auto & output = *output_; - EXPECT_EQ(output.allowedReferences, StringSet{"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"}); - EXPECT_EQ(output.allowedRequisites, StringSet{"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"}); + EXPECT_EQ(output.allowedReferences, StringSet{"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"}); + EXPECT_EQ(output.allowedRequisites, StringSet{"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"}); } { @@ -409,9 +412,9 @@ TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) auto & output = *output_; EXPECT_EQ( - output.disallowedReferences, StringSet{"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"}); + output.disallowedReferences, StringSet{"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"}); EXPECT_EQ( - output.disallowedRequisites, StringSet{"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"}); + output.disallowedRequisites, StringSet{"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"}); } } diff --git a/tests/functional/derivation/advanced-attributes-structured-attrs.nix b/tests/functional/derivation/advanced-attributes-structured-attrs.nix index b789cdaa720..27d9e7cf938 100644 --- a/tests/functional/derivation/advanced-attributes-structured-attrs.nix +++ b/tests/functional/derivation/advanced-attributes-structured-attrs.nix @@ -23,6 +23,10 @@ let "-c" "echo foo > $out" ]; + outputs = [ + "out" + "dev" + ]; }; bar = derivation' { @@ -33,6 +37,10 @@ let "-c" "echo bar > $out" ]; + outputs = [ + "out" + "dev" + ]; }; in @@ -58,11 +66,11 @@ derivation' { outputChecks = { out = { allowedReferences = [ foo ]; - allowedRequisites = [ foo ]; + allowedRequisites = [ foo.dev ]; }; bin = { disallowedReferences = [ bar ]; - disallowedRequisites = [ bar ]; + disallowedRequisites = [ bar.dev ]; }; dev = { maxSize = 789; diff --git a/tests/functional/derivation/advanced-attributes.nix b/tests/functional/derivation/advanced-attributes.nix index 52786783faa..e988e0a70c1 100644 --- a/tests/functional/derivation/advanced-attributes.nix +++ b/tests/functional/derivation/advanced-attributes.nix @@ -23,6 +23,10 @@ let "-c" "echo foo > $out" ]; + outputs = [ + "out" + "dev" + ]; }; bar = derivation' { @@ -33,6 +37,10 @@ let "-c" "echo bar > $out" ]; + outputs = [ + "out" + "dev" + ]; }; in @@ -50,9 +58,9 @@ derivation' { impureEnvVars = [ "UNICORN" ]; __darwinAllowLocalNetworking = true; allowedReferences = [ foo ]; - allowedRequisites = [ foo ]; + allowedRequisites = [ foo.dev ]; disallowedReferences = [ bar ]; - disallowedRequisites = [ bar ]; + disallowedRequisites = [ bar.dev ]; requiredSystemFeatures = [ "rainbow" "uid-range" diff --git a/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv b/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv index 307beb53e62..a81e74d4195 100644 --- a/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv +++ b/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv @@ -1 +1 @@ -Derive([("bin","","r:sha256",""),("dev","","r:sha256",""),("out","","r:sha256","")],[("/nix/store/spfzlnkwb1v8s62yvh8vj1apd1kwjr5f-foo.drv",["out"]),("/nix/store/x1vpzav565aqr7ccmkn0wv0svkm1qrbl-bar.drv",["out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99\"],\"disallowedRequisites\":[\"/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8\"],\"allowedRequisites\":[\"/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m"),("dev","/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")]) \ No newline at end of file +Derive([("bin","","r:sha256",""),("dev","","r:sha256",""),("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g\"],\"disallowedRequisites\":[\"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"allowedRequisites\":[\"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m"),("dev","/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")]) \ No newline at end of file diff --git a/tests/functional/derivation/ca/advanced-attributes.drv b/tests/functional/derivation/ca/advanced-attributes.drv index 343f895ca7a..dded6c62086 100644 --- a/tests/functional/derivation/ca/advanced-attributes.drv +++ b/tests/functional/derivation/ca/advanced-attributes.drv @@ -1 +1 @@ -Derive([("out","","r:sha256","")],[("/nix/store/spfzlnkwb1v8s62yvh8vj1apd1kwjr5f-foo.drv",["out"]),("/nix/store/x1vpzav565aqr7ccmkn0wv0svkm1qrbl-bar.drv",["out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"),("allowedRequisites","/08cr1k2yfw44g21w1h850285vqhsciy7y3siqjdzz1m9yvwlqfm8"),("builder","/bin/bash"),("disallowedReferences","/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"),("disallowedRequisites","/05pdic30acaypbz73ivw4wlsi9whq08jxsimml2h0inwqya2hn99"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file +Derive([("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"),("allowedRequisites","/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"),("builder","/bin/bash"),("disallowedReferences","/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"),("disallowedRequisites","/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file diff --git a/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv b/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv index e47a41ad525..1560bca6645 100644 --- a/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv +++ b/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv @@ -1 +1 @@ -Derive([("bin","/nix/store/pbzb48v0ycf80jgligcp4n8z0rblna4n-advanced-attributes-structured-attrs-bin","",""),("dev","/nix/store/7xapi8jv7flcz1qq8jhw55ar8ag8hldh-advanced-attributes-structured-attrs-dev","",""),("out","/nix/store/mpq3l1l1qc2yr50q520g08kprprwv79f-advanced-attributes-structured-attrs","","")],[("/nix/store/4xm4wccqsvagz9gjksn24s7rip2fdy7v-foo.drv",["out"]),("/nix/store/plsq5jbr5nhgqwcgb2qxw7jchc09dnl8-bar.drv",["out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar\"],\"disallowedRequisites\":[\"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo\"],\"allowedRequisites\":[\"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/nix/store/pbzb48v0ycf80jgligcp4n8z0rblna4n-advanced-attributes-structured-attrs-bin"),("dev","/nix/store/7xapi8jv7flcz1qq8jhw55ar8ag8hldh-advanced-attributes-structured-attrs-dev"),("out","/nix/store/mpq3l1l1qc2yr50q520g08kprprwv79f-advanced-attributes-structured-attrs")]) \ No newline at end of file +Derive([("bin","/nix/store/qjjj3zrlimpjbkk686m052b3ks9iz2sl-advanced-attributes-structured-attrs-bin","",""),("dev","/nix/store/lpz5grl48v93pdadavyg5is1rqvfdipf-advanced-attributes-structured-attrs-dev","",""),("out","/nix/store/nzvz1bmh1g89a5dkpqcqan0av7q3hgv3-advanced-attributes-structured-attrs","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar\"],\"disallowedRequisites\":[\"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"allowedRequisites\":[\"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/nix/store/qjjj3zrlimpjbkk686m052b3ks9iz2sl-advanced-attributes-structured-attrs-bin"),("dev","/nix/store/lpz5grl48v93pdadavyg5is1rqvfdipf-advanced-attributes-structured-attrs-dev"),("out","/nix/store/nzvz1bmh1g89a5dkpqcqan0av7q3hgv3-advanced-attributes-structured-attrs")]) \ No newline at end of file diff --git a/tests/functional/derivation/ia/advanced-attributes.drv b/tests/functional/derivation/ia/advanced-attributes.drv index ec3112ab2b1..2c5d5a6929c 100644 --- a/tests/functional/derivation/ia/advanced-attributes.drv +++ b/tests/functional/derivation/ia/advanced-attributes.drv @@ -1 +1 @@ -Derive([("out","/nix/store/33a6fdmn8q9ih9d7npbnrxn2q56a4l8q-advanced-attributes","","")],[("/nix/store/4xm4wccqsvagz9gjksn24s7rip2fdy7v-foo.drv",["out"]),("/nix/store/plsq5jbr5nhgqwcgb2qxw7jchc09dnl8-bar.drv",["out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"),("allowedRequisites","/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"),("builder","/bin/bash"),("disallowedReferences","/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"),("disallowedRequisites","/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/nix/store/33a6fdmn8q9ih9d7npbnrxn2q56a4l8q-advanced-attributes"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file +Derive([("out","/nix/store/swkj0mrq0cq3dfli95v4am0427mi2hxf-advanced-attributes","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"),("allowedRequisites","/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"),("builder","/bin/bash"),("disallowedReferences","/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"),("disallowedRequisites","/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/nix/store/swkj0mrq0cq3dfli95v4am0427mi2hxf-advanced-attributes"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file From 29ae14114e825fc563434e7a2c2e0445d7e2f50b Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 15 Apr 2025 11:54:11 -0400 Subject: [PATCH 351/361] Remove stray assignment side affect in lambda This was almost a bug! It wasn't simply because another assignment would clobber it later. (cherry picked from commit 32409dd7d750576153657beb075bb303840c0c3a) --- src/libstore/derivation-options.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index 962222f6d54..af3a319e978 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -68,7 +68,6 @@ DerivationOptions DerivationOptions::fromParsedDerivation(const ParsedDerivation throw Error("attribute '%s' must be a list of strings", name); res.insert(j->get()); } - checks.disallowedRequisites = res; return res; } return {}; From d74acf195427c9d28a0beaa070d0320b185489d7 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Wed, 23 Apr 2025 20:54:53 -0400 Subject: [PATCH 352/361] Hide the "dirty" notice when running nix develop In the common case, nix develop is running against a dirty checkout of a project. This patch removes the warning about a dirty tree on nix develop only. Close FH-736 --- src/libcmd/include/nix/cmd/command.hh | 2 ++ src/libcmd/installables.cc | 5 +++++ src/nix/develop.cc | 6 ++++++ 3 files changed, 13 insertions(+) diff --git a/src/libcmd/include/nix/cmd/command.hh b/src/libcmd/include/nix/cmd/command.hh index 6b6418f51e5..11981a76995 100644 --- a/src/libcmd/include/nix/cmd/command.hh +++ b/src/libcmd/include/nix/cmd/command.hh @@ -214,6 +214,8 @@ struct InstallableCommand : virtual Args, SourceExprCommand { InstallableCommand(); + virtual void preRun(ref store); + virtual void run(ref store, ref installable) = 0; void run(ref store) override; diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index edfe8c15ad0..1047f94f1f9 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -903,8 +903,13 @@ InstallableCommand::InstallableCommand() }); } +void InstallableCommand::preRun(ref store) +{ +} + void InstallableCommand::run(ref store) { + preRun(store); auto installable = parseInstallable(store, _installable); run(store, std::move(installable)); } diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 00572697aee..02947ff4181 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -1,5 +1,6 @@ #include "nix/util/config-global.hh" #include "nix/expr/eval.hh" +#include "nix/fetchers/fetch-settings.hh" #include "nix/cmd/installable-flake.hh" #include "nix/cmd/command-installable-value.hh" #include "nix/main/common-args.hh" @@ -583,6 +584,11 @@ struct CmdDevelop : Common, MixEnvironment ; } + void preRun(ref store) override + { + fetchSettings.warnDirty = false; + } + void run(ref store, ref installable) override { auto [buildEnvironment, gcroot] = getBuildEnvironment(store, installable); From cecbb2b22c22aaf53251631cb929900c5b24312a Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Wed, 23 Apr 2025 20:38:59 -0400 Subject: [PATCH 353/361] Improve the "dirty" message, by clarifying what the jargon means FH-735 --- src/libfetchers/git.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 9a0b8c65a35..ef74397ff90 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -393,10 +393,10 @@ struct GitInputScheme : InputScheme { if (workdirInfo.isDirty) { if (!settings.allowDirty) - throw Error("Git tree '%s' is dirty", locationToArg()); + throw Error("Git tree '%s' has uncommitted changes", locationToArg()); if (settings.warnDirty) - warn("Git tree '%s' is dirty", locationToArg()); + warn("Git tree '%s' has uncommitted changes", locationToArg()); } } From d97d311ddfbb656bc4ccd1e81f9059d0ddea8c8d Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Thu, 24 Apr 2025 20:10:54 -0400 Subject: [PATCH 354/361] Emit a warning about channel deprecation. --- doc/manual/source/command-ref/nix-channel.md | 6 ++++++ src/nix-channel/nix-channel.cc | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/doc/manual/source/command-ref/nix-channel.md b/doc/manual/source/command-ref/nix-channel.md index 8b58392b7b5..bc0a90b11c4 100644 --- a/doc/manual/source/command-ref/nix-channel.md +++ b/doc/manual/source/command-ref/nix-channel.md @@ -8,6 +8,12 @@ # Description +> **Warning** +> +> nix-channel is deprecated in favor of flakes in Determinate Nix. +> For a guide on Nix flakes, see: . +> For details and to offer feedback on the deprecation process, see: . + Channels are a mechanism for referencing remote Nix expressions and conveniently retrieving their latest version. The moving parts of channels are: diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index c0baa4aa2a4..a6ca6f711c1 100644 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -164,6 +164,11 @@ static void update(const StringSet & channelNames) static int main_nix_channel(int argc, char ** argv) { + warn( + "nix-channel is deprecated in favor of flakes in Determinate Nix. \ +For a guide on Nix flakes, see: https://zero-to-nix.com/. \ +For details and to offer feedback on the deprecation process, see: https://github.com/DeterminateSystems/nix-src/issues/34."); + { // Figure out the name of the `.nix-channels' file to use auto home = getHome(); From 17a40e5195705316468fd795ec78b5ec38496911 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 25 Apr 2025 11:22:21 +0200 Subject: [PATCH 355/361] Warn about the use of channel URLs --- src/libexpr/eval-settings.cc | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc index 659c01a9e63..85ec9881669 100644 --- a/src/libexpr/eval-settings.cc +++ b/src/libexpr/eval-settings.cc @@ -84,9 +84,14 @@ bool EvalSettings::isPseudoUrl(std::string_view s) std::string EvalSettings::resolvePseudoUrl(std::string_view url) { - if (hasPrefix(url, "channel:")) + if (hasPrefix(url, "channel:")) { + static bool haveWarned = false; + warnOnce(haveWarned, + "Channels are deprecated in favor of flakes in Determinate Nix. " + "For a guide on Nix flakes, see: https://zero-to-nix.com/. " + "For details and to offer feedback on the deprecation process, see: https://github.com/DeterminateSystems/nix-src/issues/34."); return "https://nixos.org/channels/" + std::string(url.substr(8)) + "/nixexprs.tar.xz"; - else + } else return std::string(url); } @@ -103,4 +108,4 @@ Path getNixDefExpr() : getHome() + "/.nix-defexpr"; } -} // namespace nix \ No newline at end of file +} // namespace nix From 797c716f746fe1474600a5836042b598b8e6f20d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 25 Apr 2025 16:05:17 +0200 Subject: [PATCH 356/361] Suggest fix --- src/libexpr/eval-settings.cc | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc index 85ec9881669..8fbe94aef19 100644 --- a/src/libexpr/eval-settings.cc +++ b/src/libexpr/eval-settings.cc @@ -85,12 +85,15 @@ bool EvalSettings::isPseudoUrl(std::string_view s) std::string EvalSettings::resolvePseudoUrl(std::string_view url) { if (hasPrefix(url, "channel:")) { + auto realUrl = "https://nixos.org/channels/" + std::string(url.substr(8)) + "/nixexprs.tar.xz"; static bool haveWarned = false; warnOnce(haveWarned, "Channels are deprecated in favor of flakes in Determinate Nix. " + "Instead of '%s', use '%s'. " "For a guide on Nix flakes, see: https://zero-to-nix.com/. " - "For details and to offer feedback on the deprecation process, see: https://github.com/DeterminateSystems/nix-src/issues/34."); - return "https://nixos.org/channels/" + std::string(url.substr(8)) + "/nixexprs.tar.xz"; + "For details and to offer feedback on the deprecation process, see: https://github.com/DeterminateSystems/nix-src/issues/34.", + url, realUrl); + return realUrl; } else return std::string(url); } From ca1b2dc6179f0a4d04f5ed117df9df1f04b38274 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 25 Apr 2025 21:16:27 +0200 Subject: [PATCH 357/361] Warn against the use of indirect flakerefs in flake inputs --- src/libflake/flake/flake.cc | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index 6ea9626b900..0c219e26787 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -740,6 +740,27 @@ LockedFlake lockFlake( use --no-write-lock-file. */ auto ref = (input2.ref && explicitCliOverrides.contains(inputAttrPath)) ? *input2.ref : *input.ref; + /* Warn against the use of indirect flakerefs + (but only at top-level since we don't want + to annoy users about flakes that are not + under their control). */ + auto warnRegistry = [&](const FlakeRef & resolvedRef) + { + if (inputAttrPath.size() == 1 && !input.ref->input.isDirect()) { + std::ostringstream s; + printLiteralString(s, resolvedRef.to_string()); + warn( + "Flake input '%1%' uses the flake registry. " + "Using the registry in flake inputs is deprecated. " + "To make your flake future-proof, add the following to '%2%':\n" + "\n" + " inputs.%1%.url = %3%;", + inputAttrPathS, + flake.path, + s.str()); + } + }; + if (input.isFlake) { auto inputFlake = getInputFlake(*input.ref); @@ -771,6 +792,8 @@ LockedFlake lockFlake( oldLock ? followsPrefix : inputAttrPath, inputFlake.path, false); + + warnRegistry(inputFlake.resolvedRef); } else { @@ -783,6 +806,8 @@ LockedFlake lockFlake( auto [accessor, resolvedRef, lockedRef] = fetchOrSubstituteTree( state, *input.ref, useRegistries, flakeCache); + warnRegistry(resolvedRef); + // FIXME: allow input to be lazy. auto storePath = copyInputToStore(state, lockedRef.input, input.ref->input, accessor); From a9c1751e2f9d52304db452de86466892aa4fad03 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 25 Apr 2025 21:51:32 +0200 Subject: [PATCH 358/361] Update src/libflake/flake/flake.cc Co-authored-by: Graham Christensen --- src/libflake/flake/flake.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index 0c219e26787..3eb1333d5c6 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -751,10 +751,12 @@ LockedFlake lockFlake( printLiteralString(s, resolvedRef.to_string()); warn( "Flake input '%1%' uses the flake registry. " - "Using the registry in flake inputs is deprecated. " + "Using the registry in flake inputs is deprecated in Determinate Nix. " "To make your flake future-proof, add the following to '%2%':\n" "\n" - " inputs.%1%.url = %3%;", + " inputs.%1%.url = %3%;\n" + "\n" + "For more information, see: https://github.com/DeterminateSystems/nix-src/issues/37", inputAttrPathS, flake.path, s.str()); From 050e2e07bc147e7a3f1901569ea16c5278d5c482 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 25 Apr 2025 20:35:32 +0000 Subject: [PATCH 359/361] Prepare release v3.4.0 From 48e976af6314609df071b28847bf2d4d5aa7f4c2 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 25 Apr 2025 20:35:35 +0000 Subject: [PATCH 360/361] Set .version-determinate to 3.4.0 --- .version-determinate | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version-determinate b/.version-determinate index bea438e9ade..18091983f59 100644 --- a/.version-determinate +++ b/.version-determinate @@ -1 +1 @@ -3.3.1 +3.4.0 From 173c742afcd96e621a83abd08480e78a56692ad0 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 25 Apr 2025 16:56:57 -0400 Subject: [PATCH 361/361] Prep 3.4.0 release notes (#40) --- doc/manual/source/SUMMARY.md.in | 1 + .../release-notes-determinate/changes.md | 8 ++- .../release-notes-determinate/rl-3.4.0.md | 50 +++++++++++++++++++ 3 files changed, 58 insertions(+), 1 deletion(-) create mode 100644 doc/manual/source/release-notes-determinate/rl-3.4.0.md diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 0e1ff7f8455..1492abb62d9 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -128,6 +128,7 @@ - [Contributing](development/contributing.md) - [Determinate Nix Release Notes](release-notes-determinate/index.md) - [Changes between Nix and Determinate Nix](release-notes-determinate/changes.md) + - [Release 3.4.0 (2025-04-25)](release-notes-determinate/rl-3.4.0.md) - [Release 3.3.0 (2025-04-11)](release-notes-determinate/rl-3.3.0.md) - [Release 3.1.0 (2025-03-27)](release-notes-determinate/rl-3.1.0.md) - [Release 3.0.0 (2025-03-04)](release-notes-determinate/rl-3.0.0.md) diff --git a/doc/manual/source/release-notes-determinate/changes.md b/doc/manual/source/release-notes-determinate/changes.md index 4e5316708af..f0cc1af5463 100644 --- a/doc/manual/source/release-notes-determinate/changes.md +++ b/doc/manual/source/release-notes-determinate/changes.md @@ -1,6 +1,6 @@ # Changes between Nix and Determinate Nix -This section lists the differences between upstream Nix 2.24 and Determinate Nix 3.3.0. +This section lists the differences between upstream Nix 2.24 and Determinate Nix 3.4.0. * In Determinate Nix, flakes are stable. You no longer need to enable the `flakes` experimental feature. @@ -9,3 +9,9 @@ This section lists the differences between upstream Nix 2.24 and Determinate Nix * Determinate Nix has a setting [`json-log-path`](@docroot@/command-ref/conf-file.md#conf-json-log-path) to send a copy of all Nix log messages (in JSON format) to a file or Unix domain socket. * Determinate Nix has made `nix profile install` an alias to `nix profile add`, a more symmetrical antonym of `nix profile remove`. + +* `nix-channel` and `channel:` url syntax (like `channel:nixos-24.11`) is deprecated, see: https://github.com/DeterminateSystems/nix-src/issues/34 + +* Using indirect flake references and implicit inputs is deprecated, see: https://github.com/DeterminateSystems/nix-src/issues/37 + +* Warnings around "dirty trees" are updated to reduce "dirty" jargon, and now refers to "uncommitted changes". diff --git a/doc/manual/source/release-notes-determinate/rl-3.4.0.md b/doc/manual/source/release-notes-determinate/rl-3.4.0.md new file mode 100644 index 00000000000..24ae03ca554 --- /dev/null +++ b/doc/manual/source/release-notes-determinate/rl-3.4.0.md @@ -0,0 +1,50 @@ +# Release 3.4.0 (2025-04-25) + +* Based on [upstream Nix 2.28.2](../release-notes/rl-2.28.md). + +* **Warn users that `nix-channel` is deprecated.** + +This is the first change accomplishing our roadmap item of deprecating Nix channels: https://github.com/DeterminateSystems/nix-src/issues/34 + +This is due to user confusion and surprising behavior of channels, especially in the context of user vs. root channels. + +The goal of this change is to make the user experience of Nix more predictable. +In particular, these changes are to support users with lower levels of experience who are following guides that focus on channels as the mechanism of distribution. + +Users will now see this message: + +> nix-channel is deprecated in favor of flakes in Determinate Nix. For a guide on Nix flakes, see: https://zero-to-nix.com/. or details and to offer feedback on the deprecation process, see: https://github.com/DeterminateSystems/nix-src/issues/34. + + +* **Warn users that `channel:` URLs are deprecated.** + +This is the second change regarding our deprecation of Nix channels. +Using a `channel:` URL (like `channel:nixos-24.11`) will yield a warning like this: + +> Channels are deprecated in favor of flakes in Determinate Nix. Instead of 'channel:nixos-24.11', use 'https://nixos.org/channels/nixos-24.11/nixexprs.tar.xz'. For a guide on Nix flakes, see: https://zero-to-nix.com/. For details and to offer feedback on the deprecation process, see: https://github.com/DeterminateSystems/nix-src/issues/34. + +* **Warn users against indirect flake references in `flake.nix` inputs** + +This is the first change accomplishing our roadmap item of deprecating implicit and indirect flake inputs: https://github.com/DeterminateSystems/nix-src/issues/37 + +The flake registry provides an important UX affordance for using Nix flakes and remote sources in command line uses. +For that reason, the registry is not being deprecated entirely and will still be used for command-line incantations, like nix run. + +This move will eliminate user confusion and surprising behavior around global and local registries during flake input resolution. + +The goal of this change is to make the user experience of Nix more predictable. +We have seen a pattern of confusion when using automatic flake inputs and local registries. +Specifically, users' flake inputs resolving and locking inconsistently depending on the configuration of the host system. + +Users will now see the following warning if their flake.nix uses an implicit or indirect Flake reference input: + +> Flake input 'nixpkgs' uses the flake registry. Using the registry in flake inputs is deprecated in Determinate Nix. To make your flake future-proof, add the following to 'xxx/flake.nix': +> +> inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11"; +> +> For more information, see: https://github.com/DeterminateSystems/nix-src/issues/37 + + +### Other updates: +* Improve the "dirty tree" message. Determinate Nix will now say `Git tree '...' has uncommitted changes` instead of `Git tree '...' is dirty` +* Stop warning about uncommitted changes in a Git repository when using `nix develop`