diff --git a/.coderabbit.yaml b/.coderabbit.yaml new file mode 100644 index 00000000000..2220649caa2 --- /dev/null +++ b/.coderabbit.yaml @@ -0,0 +1,18 @@ +# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json +# Disable CodeRabbit auto-review to prevent verbose comments on PRs. +# When enabled: false, CodeRabbit won't attempt reviews and won't post +# "Review skipped" or other automated comments. +reviews: + auto_review: + enabled: false + review_status: false + high_level_summary: false + poem: false + sequence_diagrams: false + changed_files_summary: false + tools: + github-checks: + enabled: false +chat: + art: false + auto_reply: false diff --git a/.github/actions/install-nix-action/action.yaml b/.github/actions/install-nix-action/action.yaml index c299b39565b..00d02d6a2f2 100644 --- a/.github/actions/install-nix-action/action.yaml +++ b/.github/actions/install-nix-action/action.yaml @@ -4,15 +4,29 @@ inputs: dogfood: description: "Whether to use Nix installed from the latest artifact from master branch" required: true # Be explicit about the fact that we are using unreleased artifacts + experimental-installer: + description: "Whether to use the experimental installer to install Nix" + default: false + experimental-installer-version: + description: "Version of the experimental installer to use. If `latest`, the newest artifact from the default branch is used." + # TODO: This should probably be pinned to a release after https://github.com/NixOS/experimental-nix-installer/pull/49 lands in one + default: "latest" extra_nix_config: description: "Gets appended to `/etc/nix/nix.conf` if passed." install_url: description: "URL of the Nix installer" required: false - default: "https://releases.nixos.org/nix/nix-2.30.2/install" + default: "https://releases.nixos.org/nix/nix-2.32.1/install" + tarball_url: + description: "URL of the Nix tarball to use with the experimental installer" + required: false github_token: description: "Github token" required: true + use_cache: + description: "Whether to setup magic-nix-cache" + default: true + required: false runs: using: "composite" steps: @@ -37,14 +51,81 @@ runs: gh run download "$RUN_ID" --repo "$DOGFOOD_REPO" -n "$INSTALLER_ARTIFACT" -D "$INSTALLER_DOWNLOAD_DIR" echo "installer-path=file://$INSTALLER_DOWNLOAD_DIR" >> "$GITHUB_OUTPUT" + TARBALL_PATH="$(find "$INSTALLER_DOWNLOAD_DIR" -name 'nix*.tar.xz' -print | head -n 1)" + echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT" echo "::notice ::Dogfooding Nix installer from master (https://github.com/$DOGFOOD_REPO/actions/runs/$RUN_ID)" env: GH_TOKEN: ${{ inputs.github_token }} DOGFOOD_REPO: "NixOS/nix" + - name: "Gather system info for experimental installer" + shell: bash + if: ${{ inputs.experimental-installer == 'true' }} + run: | + echo "::notice Using experimental installer from $EXPERIMENTAL_INSTALLER_REPO (https://github.com/$EXPERIMENTAL_INSTALLER_REPO)" + + if [ "$RUNNER_OS" == "Linux" ]; then + EXPERIMENTAL_INSTALLER_SYSTEM="linux" + echo "EXPERIMENTAL_INSTALLER_SYSTEM=$EXPERIMENTAL_INSTALLER_SYSTEM" >> "$GITHUB_ENV" + elif [ "$RUNNER_OS" == "macOS" ]; then + EXPERIMENTAL_INSTALLER_SYSTEM="darwin" + echo "EXPERIMENTAL_INSTALLER_SYSTEM=$EXPERIMENTAL_INSTALLER_SYSTEM" >> "$GITHUB_ENV" + else + echo "::error ::Unsupported RUNNER_OS: $RUNNER_OS" + exit 1 + fi + + if [ "$RUNNER_ARCH" == "X64" ]; then + EXPERIMENTAL_INSTALLER_ARCH=x86_64 + echo "EXPERIMENTAL_INSTALLER_ARCH=$EXPERIMENTAL_INSTALLER_ARCH" >> "$GITHUB_ENV" + elif [ "$RUNNER_ARCH" == "ARM64" ]; then + EXPERIMENTAL_INSTALLER_ARCH=aarch64 + echo "EXPERIMENTAL_INSTALLER_ARCH=$EXPERIMENTAL_INSTALLER_ARCH" >> "$GITHUB_ENV" + else + echo "::error ::Unsupported RUNNER_ARCH: $RUNNER_ARCH" + exit 1 + fi + + echo "EXPERIMENTAL_INSTALLER_ARTIFACT=nix-installer-$EXPERIMENTAL_INSTALLER_ARCH-$EXPERIMENTAL_INSTALLER_SYSTEM" >> "$GITHUB_ENV" + env: + EXPERIMENTAL_INSTALLER_REPO: "NixOS/experimental-nix-installer" + - name: "Download latest experimental installer" + shell: bash + id: download-latest-experimental-installer + if: ${{ inputs.experimental-installer == 'true' && inputs.experimental-installer-version == 'latest' }} + run: | + RUN_ID=$(gh run list --repo "$EXPERIMENTAL_INSTALLER_REPO" --workflow ci.yml --branch main --status success --json databaseId --jq ".[0].databaseId") + + EXPERIMENTAL_INSTALLER_DOWNLOAD_DIR="$GITHUB_WORKSPACE/$EXPERIMENTAL_INSTALLER_ARTIFACT" + mkdir -p "$EXPERIMENTAL_INSTALLER_DOWNLOAD_DIR" + + gh run download "$RUN_ID" --repo "$EXPERIMENTAL_INSTALLER_REPO" -n "$EXPERIMENTAL_INSTALLER_ARTIFACT" -D "$EXPERIMENTAL_INSTALLER_DOWNLOAD_DIR" + # Executable permissions are lost in artifacts + find $EXPERIMENTAL_INSTALLER_DOWNLOAD_DIR -type f -exec chmod +x {} + + echo "installer-path=$EXPERIMENTAL_INSTALLER_DOWNLOAD_DIR" >> "$GITHUB_OUTPUT" + env: + GH_TOKEN: ${{ inputs.github_token }} + EXPERIMENTAL_INSTALLER_REPO: "NixOS/experimental-nix-installer" - uses: cachix/install-nix-action@c134e4c9e34bac6cab09cf239815f9339aaaf84e # v31.5.1 + if: ${{ inputs.experimental-installer != 'true' }} with: # Ternary operator in GHA: https://www.github.com/actions/runner/issues/409#issuecomment-752775072 install_url: ${{ inputs.dogfood == 'true' && format('{0}/install', steps.download-nix-installer.outputs.installer-path) || inputs.install_url }} install_options: ${{ inputs.dogfood == 'true' && format('--tarball-url-prefix {0}', steps.download-nix-installer.outputs.installer-path) || '' }} extra_nix_config: ${{ inputs.extra_nix_config }} + - uses: DeterminateSystems/nix-installer-action@786fff0690178f1234e4e1fe9b536e94f5433196 # v20 + if: ${{ inputs.experimental-installer == 'true' }} + with: + diagnostic-endpoint: "" + # TODO: It'd be nice to use `artifacts.nixos.org` for both of these, maybe through an `/experimental-installer/latest` endpoint? or `/commit/`? + local-root: ${{ inputs.experimental-installer-version == 'latest' && steps.download-latest-experimental-installer.outputs.installer-path || '' }} + source-url: ${{ inputs.experimental-installer-version != 'latest' && 'https://artifacts.nixos.org/experimental-installer/tag/${{ inputs.experimental-installer-version }}/${{ env.EXPERIMENTAL_INSTALLER_ARTIFACT }}' || '' }} + nix-package-url: ${{ inputs.dogfood == 'true' && steps.download-nix-installer.outputs.tarball-path || (inputs.tarball_url || '') }} + extra-conf: ${{ inputs.extra_nix_config }} + - uses: DeterminateSystems/magic-nix-cache-action@565684385bcd71bad329742eefe8d12f2e765b39 # v13 + if: ${{ inputs.use_cache == 'true' }} + with: + diagnostic-endpoint: '' + use-flakehub: false + use-gha-cache: true + source-revision: 92d9581367be2233c2d5714a2640e1339f4087d8 # main diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml new file mode 100644 index 00000000000..5ad073785cd --- /dev/null +++ b/.github/workflows/backport.yml @@ -0,0 +1,37 @@ +name: Backport +on: + pull_request_target: + types: [closed, labeled] +permissions: + contents: read +jobs: + backport: + name: Backport Pull Request + permissions: + # for korthout/backport-action + contents: write + pull-requests: write + if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name)) + runs-on: ubuntu-24.04-arm + steps: + - name: Generate GitHub App token + id: generate-token + uses: actions/create-github-app-token@v2 + with: + app-id: ${{ vars.CI_APP_ID }} + private-key: ${{ secrets.CI_APP_PRIVATE_KEY }} + - uses: actions/checkout@v6 + with: + ref: ${{ github.event.pull_request.head.sha }} + # required to find all branches + fetch-depth: 0 + - name: Create backport PRs + uses: korthout/backport-action@d07416681cab29bf2661702f925f020aaa962997 # v3.4.1 + id: backport + with: + # Config README: https://github.com/korthout/backport-action#backport-action + github_token: ${{ steps.generate-token.outputs.token }} + github_workspace: ${{ github.workspace }} + auto_merge_enabled: true + pull_description: |- + Automatic backport to `${target_branch}`, triggered by a label in #${pull_number}. diff --git a/.mergify.yml b/.mergify.yml deleted file mode 100644 index 1c220045aba..00000000000 --- a/.mergify.yml +++ /dev/null @@ -1,174 +0,0 @@ -queue_rules: - - name: default - # all required tests need to go here - merge_conditions: - - check-success=tests on macos - - check-success=tests on ubuntu - - check-success=installer test on macos - - check-success=installer test on ubuntu - - check-success=vm_tests - batch_size: 5 - -pull_request_rules: - - name: merge using the merge queue - conditions: - - base~=master|.+-maintenance - - label~=merge-queue|dependencies - actions: - queue: {} - -# The rules below will first create backport pull requests and put those in a merge queue. - - - name: backport patches to 2.18 - conditions: - - label=backport 2.18-maintenance - actions: - backport: - branches: - - 2.18-maintenance - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.19 - conditions: - - label=backport 2.19-maintenance - actions: - backport: - branches: - - 2.19-maintenance - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.20 - conditions: - - label=backport 2.20-maintenance - actions: - backport: - branches: - - 2.20-maintenance - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.21 - conditions: - - label=backport 2.21-maintenance - actions: - backport: - branches: - - 2.21-maintenance - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.22 - conditions: - - label=backport 2.22-maintenance - actions: - backport: - branches: - - 2.22-maintenance - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.23 - conditions: - - label=backport 2.23-maintenance - actions: - backport: - branches: - - 2.23-maintenance - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.24 - conditions: - - label=backport 2.24-maintenance - actions: - backport: - branches: - - "2.24-maintenance" - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.25 - conditions: - - label=backport 2.25-maintenance - actions: - backport: - branches: - - "2.25-maintenance" - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.26 - conditions: - - label=backport 2.26-maintenance - actions: - backport: - branches: - - "2.26-maintenance" - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.27 - conditions: - - label=backport 2.27-maintenance - actions: - backport: - branches: - - "2.27-maintenance" - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.28 - conditions: - - label=backport 2.28-maintenance - actions: - backport: - branches: - - "2.28-maintenance" - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.29 - conditions: - - label=backport 2.29-maintenance - actions: - backport: - branches: - - "2.29-maintenance" - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.30 - conditions: - - label=backport 2.30-maintenance - actions: - backport: - branches: - - "2.30-maintenance" - labels: - - automatic backport - - merge-queue - - - name: backport patches to 2.31 - conditions: - - label=backport 2.31-maintenance - actions: - backport: - branches: - - "2.31-maintenance" - labels: - - automatic backport - - merge-queue diff --git a/.version b/.version index fb3a0677958..3afbaeb2b33 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.32.4 +2.33.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7231730bb75..9c170ae4a77 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -94,6 +94,8 @@ The underlying source files are located in [`doc/manual/source`](./doc/manual/so For small changes you can [use GitHub to edit these files](https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files) For larger changes see the [Nix reference manual](https://nix.dev/manual/nix/development/development/contributing.html). +You're encouraged to add line breaks at semantic boundaries, per [sembr](https://sembr.org). + ## Getting help Whenever you're stuck or do not know how to proceed, you can always ask for help. diff --git a/ci/gha/tests/default.nix b/ci/gha/tests/default.nix index e181ee67024..c179174e6e2 100644 --- a/ci/gha/tests/default.nix +++ b/ci/gha/tests/default.nix @@ -21,16 +21,6 @@ let packages' = nixFlake.packages.${system}; stdenv = (getStdenv pkgs); - enableSanitizersLayer = finalAttrs: prevAttrs: { - mesonFlags = - (prevAttrs.mesonFlags or [ ]) - ++ [ (lib.mesonOption "b_sanitize" "address,undefined") ] - ++ (lib.optionals stdenv.cc.isClang [ - # https://www.github.com/mesonbuild/meson/issues/764 - (lib.mesonBool "b_lundef" false) - ]); - }; - collectCoverageLayer = finalAttrs: prevAttrs: { env = let @@ -53,14 +43,15 @@ let ''; }; - componentOverrides = - (lib.optional withSanitizers enableSanitizersLayer) - ++ (lib.optional withCoverage collectCoverageLayer); + componentOverrides = (lib.optional withCoverage collectCoverageLayer); in rec { nixComponentsInstrumented = nixComponents.overrideScope ( final: prev: { + withASan = withSanitizers; + withUBSan = withSanitizers; + nix-store-tests = prev.nix-store-tests.override { withBenchmarks = true; }; # Boehm is incompatible with ASAN. nix-expr = prev.nix-expr.override { enableGC = !withSanitizers; }; @@ -71,6 +62,14 @@ rec { } ); + # Import NixOS tests using the instrumented components + nixosTests = import ../../../tests/nixos { + inherit lib pkgs; + nixComponents = nixComponentsInstrumented; + nixpkgs = nixFlake.inputs.nixpkgs; + inherit (nixFlake.inputs) nixpkgs-23-11; + }; + /** Top-level tests for the flake outputs, as they would be built by hydra. These tests generally can't be overridden to run with sanitizers. @@ -107,15 +106,33 @@ rec { }; }; + disable = + let + inherit (pkgs.stdenv) hostPlatform; + in + args@{ + pkgName, + testName, + test, + }: + lib.any (b: b) [ + # FIXME: Nix manual is impure and does not produce all settings on darwin + (hostPlatform.isDarwin && pkgName == "nix-manual" && testName == "linkcheck") + ]; + componentTests = (lib.concatMapAttrs ( pkgName: pkg: - lib.concatMapAttrs (testName: test: { - "${componentTestsPrefix}${pkgName}-${testName}" = test; - }) (pkg.tests or { }) + lib.concatMapAttrs ( + testName: test: + lib.optionalAttrs (!disable { inherit pkgName testName test; }) { + "${componentTestsPrefix}${pkgName}-${testName}" = test; + } + ) (pkg.tests or { }) ) nixComponentsInstrumented) // lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) { "${componentTestsPrefix}nix-functional-tests" = nixComponentsInstrumented.nix-functional-tests; + "${componentTestsPrefix}nix-json-schema-checks" = nixComponentsInstrumented.nix-json-schema-checks; }; codeCoverage = @@ -220,4 +237,20 @@ rec { { inherit coverageProfileDrvs mergedProfdata coverageReports; }; + + vmTests = { + inherit (nixosTests) s3-binary-cache-store; + } + // lib.optionalAttrs (!withSanitizers && !withCoverage) { + # evalNixpkgs uses non-instrumented components from hydraJobs, so only run it + # when not testing with sanitizers to avoid rebuilding nix + inherit (hydraJobs.tests) evalNixpkgs; + # FIXME: CI times out when building vm tests instrumented + inherit (nixosTests) + functional_user + githubFlakes + nix-docker + tarballFlakes + ; + }; } diff --git a/doc/manual/anchors.jq b/doc/manual/anchors.jq index 72309779c34..8a48017c1e7 100755 --- a/doc/manual/anchors.jq +++ b/doc/manual/anchors.jq @@ -3,7 +3,7 @@ def transform_anchors_html: - . | gsub($empty_anchor_regex; "") + . | gsub($empty_anchor_regex; "") | gsub($anchor_regex; "" + .text + ""); @@ -24,8 +24,15 @@ def map_contents_recursively(transformer): def process_command: .[0] as $context | .[1] as $body | - $body + { - sections: $body.sections | map(map_contents_recursively(if $context.renderer == "html" then transform_anchors_html else transform_anchors_strip end)), - }; + # mdbook 0.5.x uses 'items' instead of 'sections' + if $body.items then + $body + { + items: $body.items | map(map_contents_recursively(if $context.renderer == "html" then transform_anchors_html else transform_anchors_strip end)), + } + else + $body + { + sections: $body.sections | map(map_contents_recursively(if $context.renderer == "html" then transform_anchors_html else transform_anchors_strip end)), + } + end; process_command diff --git a/doc/manual/book.toml.in b/doc/manual/book.toml.in index f3fd2722f3c..11efca75f11 100644 --- a/doc/manual/book.toml.in +++ b/doc/manual/book.toml.in @@ -7,6 +7,7 @@ additional-css = ["custom.css"] additional-js = ["redirects.js"] edit-url-template = "https://github.com/DeterminateSystems/nix-src/tree/master/doc/manual/{path}" git-repository-url = "https://github.com/DeterminateSystems/nix-src" +mathjax-support = true # Handles replacing @docroot@ with a path to ./source relative to that markdown file, # {{#include handlebars}}, and the @generated@ syntax used within these. it mostly @@ -23,12 +24,3 @@ renderers = ["html"] command = "jq --from-file ./anchors.jq" [output.markdown] - -[output.linkcheck] -# no Internet during the build (in the sandbox) -follow-web-links = false - -# mdbook-linkcheck does not understand [foo]{#bar} style links, resulting in -# excessive "Potential incomplete link" warnings. No other kind of warning was -# produced at the time of writing. -warning-policy = "ignore" diff --git a/doc/manual/expand-includes.py b/doc/manual/expand-includes.py new file mode 100644 index 00000000000..59c687f2324 --- /dev/null +++ b/doc/manual/expand-includes.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python3 +""" +Standalone markdown preprocessor for manpage generation. + +Expands {{#include}} directives and handles @docroot@ references +without requiring mdbook. +""" + +from pathlib import Path +import sys +import argparse +import re + + +def expand_includes( + content: str, + current_file: Path, + source_root: Path, + generated_root: Path | None, + visited: set[Path] | None = None, +) -> str: + """ + Recursively expand {{#include path}} directives. + + Args: + content: Markdown content to process + current_file: Path to the current file (for resolving relative includes) + source_root: Root of the source directory + generated_root: Root of generated files (for @generated@/ includes) + visited: Set of already-visited files (for cycle detection) + """ + if visited is None: + visited = set() + + # Track current file to detect cycles + visited.add(current_file.resolve()) + + lines = [] + include_pattern = re.compile(r'^\s*\{\{#include\s+(.+?)\}\}\s*$') + + for line in content.splitlines(keepends=True): + match = include_pattern.match(line) + if not match: + lines.append(line) + continue + + # Found an include directive + include_path_str = match.group(1).strip() + + # Resolve the include path + if include_path_str.startswith("@generated@/"): + # Generated file + if generated_root is None: + raise ValueError( + f"Cannot resolve @generated@ path '{include_path_str}' " + f"without --generated-root" + ) + include_path = generated_root / include_path_str[12:] + else: + # Relative to current file + include_path = (current_file.parent / include_path_str).resolve() + + # Check for cycles + if include_path.resolve() in visited: + raise RuntimeError( + f"Include cycle detected: {include_path} is already being processed" + ) + + # Check that file exists + if not include_path.exists(): + raise FileNotFoundError( + f"Include file not found: {include_path_str}\n" + f" Resolved to: {include_path}\n" + f" From: {current_file}" + ) + + # Recursively expand the included file + included_content = include_path.read_text() + expanded = expand_includes( + included_content, + include_path, + source_root, + generated_root, + visited.copy(), # Copy visited set for this branch + ) + lines.append(expanded) + # Add newline if the included content doesn't end with one + if not expanded.endswith('\n'): + lines.append('\n') + + return ''.join(lines) + + +def resolve_docroot(content: str, current_file: Path, source_root: Path, docroot_url: str) -> str: + """ + Replace @docroot@ with nix.dev URL and convert .md to .html. + + For manpages, absolute URLs are more useful than relative paths since + manpages are viewed standalone. lowdown will display these as proper + references in the manpage output. + """ + # Replace @docroot@ with the base URL + content = content.replace("@docroot@", docroot_url) + + # Convert .md extensions to .html for web links + # Use lookahead to ensure that .md occurs before a fragment or a possible URL end. + content = re.sub( + r'(https://nix\.dev/[^)\s]*?)\.md(?=[#)\s]|$)', + r'\1.html', + content + ) + + return content + + +def resolve_at_escapes(content: str) -> str: + """Replace @_at_ with @""" + return content.replace("@_at_", "@") + + +def process_file( + input_file: Path, + source_root: Path, + generated_root: Path | None, + docroot_url: str, +) -> str: + """Process a single markdown file.""" + content = input_file.read_text() + + # Expand includes + content = expand_includes(content, input_file, source_root, generated_root) + + # Resolve @docroot@ references + content = resolve_docroot(content, input_file, source_root, docroot_url) + + # Resolve @_at_ escapes + content = resolve_at_escapes(content) + + return content + + +def main(): + parser = argparse.ArgumentParser( + description="Expand markdown includes for manpage generation", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + # Expand a manpage source file + %(prog)s \\ + --source-root doc/manual/source \\ + --generated-root build/doc/manual/source \\ + doc/manual/source/command-ref/nix-store/query.md + + # Pipe to lowdown for manpage generation + %(prog)s -s doc/manual/source -g build/doc/manual/source \\ + doc/manual/source/command-ref/nix-env.md | \\ + lowdown -sT man -M section=1 -o nix-env.1 + """, + ) + parser.add_argument( + "input_file", + type=Path, + help="Input markdown file to process", + ) + parser.add_argument( + "-s", "--source-root", + type=Path, + required=True, + help="Root directory of markdown sources", + ) + parser.add_argument( + "-g", "--generated-root", + type=Path, + help="Root directory of generated files (for @generated@/ includes)", + ) + parser.add_argument( + "-o", "--output", + type=Path, + help="Output file (default: stdout)", + ) + parser.add_argument( + "-u", "--doc-url", + type=str, + default="https://nix.dev/manual/nix/latest", + help="Base URL for documentation links (default: https://nix.dev/manual/nix/latest)", + ) + + args = parser.parse_args() + + # Validate paths + if not args.input_file.exists(): + print(f"Error: Input file not found: {args.input_file}", file=sys.stderr) + return 1 + + if not args.source_root.is_dir(): + print(f"Error: Source root is not a directory: {args.source_root}", file=sys.stderr) + return 1 + + if args.generated_root and not args.generated_root.is_dir(): + print(f"Error: Generated root is not a directory: {args.generated_root}", file=sys.stderr) + return 1 + + try: + # Process the file + output = process_file(args.input_file, args.source_root, args.generated_root, args.doc_url) + + # Write output + if args.output: + args.output.write_text(output) + else: + print(output, end='') + + return 0 + + except Exception as e: + print(f"Error processing {args.input_file}: {e}", file=sys.stderr) + import traceback + traceback.print_exc(file=sys.stderr) + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/doc/manual/generate-redirects.py b/doc/manual/generate-redirects.py new file mode 100644 index 00000000000..c93bb03ef4d --- /dev/null +++ b/doc/manual/generate-redirects.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 +"""Generate redirects.js from template and JSON data.""" + +import sys + +template_path, json_path, output_path = sys.argv[1:] + +with open(json_path) as f: + json_content = f.read().rstrip() + +with open(template_path) as f: + template = f.read() + +with open(output_path, 'w') as f: + f.write(template.replace('@REDIRECTS_JSON@', json_content)) diff --git a/doc/manual/meson.build b/doc/manual/meson.build index 7991c8e9993..1b9a325df2a 100644 --- a/doc/manual/meson.build +++ b/doc/manual/meson.build @@ -7,17 +7,22 @@ project( fs = import('fs') +doc_url = 'https://manual.determinate.systems/' + nix = find_program('nix', native : true) -mdbook = find_program('mdbook', native : true) bash = find_program('bash', native : true) -rsync = find_program('rsync', required : true, native : true) + +# HTML manual dependencies (conditional) +if get_option('html-manual') + mdbook = find_program('mdbook', native : true) + rsync = find_program('rsync', required : true, native : true) +endif pymod = import('python') python = pymod.find_installation('python3') nix_env_for_docs = { - 'ASAN_OPTIONS' : 'abort_on_error=1:print_summary=1:detect_leaks=0', 'HOME' : '/dummy', 'NIX_CONF_DIR' : '/dummy', 'NIX_SSL_CERT_FILE' : '/dummy/no-ca-bundle.crt', @@ -60,6 +65,24 @@ generate_manual_deps = files( 'generate-deps.py', ) +# Generate redirects.js from template and JSON data +redirects_js = custom_target( + 'redirects.js', + command : [ + python, + '@INPUT0@', + '@INPUT1@', + '@INPUT2@', + '@OUTPUT@', + ], + input : [ + 'generate-redirects.py', + 'redirects.js.in', + 'redirects.json', + ], + output : 'redirects.js', +) + # Generates types subdir('source/store') # Generates builtins.md and builtin-constants.md. @@ -80,63 +103,71 @@ else nix_input = [] endif -manual = custom_target( - 'manual', - command : [ - bash, - '-euo', - 'pipefail', - '-c', - ''' - @0@ @INPUT0@ @CURRENT_SOURCE_DIR@ > @DEPFILE@ - @0@ @INPUT1@ summary @2@ < @CURRENT_SOURCE_DIR@/source/SUMMARY.md.in > @2@/source/SUMMARY.md - sed -e 's|@version@|@3@|g' < @INPUT2@ > @2@/book.toml - @4@ -r --include='*.md' @CURRENT_SOURCE_DIR@/ @2@/ - (cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3 - rm -rf @2@/manual - mv @2@/html @2@/manual - find @2@/manual -iname meson.build -delete - '''.format( - python.full_path(), - mdbook.full_path(), - meson.current_build_dir(), - fs.read('../../.version-determinate').strip(), - rsync.full_path(), - ), - ], - input : [ - generate_manual_deps, - 'substitute.py', - 'book.toml.in', - 'anchors.jq', - 'custom.css', - nix3_cli_files, - experimental_features_shortlist_md, - experimental_feature_descriptions_md, - types_dir, - conf_file_md, - builtins_md, - rl_next_generated, - summary_rl_next, - nix_input, - ], - output : [ +# HTML manual build (conditional) +if get_option('html-manual') + manual = custom_target( 'manual', - 'markdown', - ], - depfile : 'manual.d', - env : { - 'RUST_LOG' : 'info', - 'MDBOOK_SUBSTITUTE_SEARCH' : meson.current_build_dir() / 'source', - }, -) -manual_html = manual[0] -manual_md = manual[1] + command : [ + bash, + '-euo', + 'pipefail', + '-c', + ''' + @0@ @INPUT0@ @CURRENT_SOURCE_DIR@ > @DEPFILE@ + @0@ @INPUT1@ summary @2@ < @CURRENT_SOURCE_DIR@/source/SUMMARY.md.in > @2@/source/SUMMARY.md + sed -e 's|@version@|@3@|g' < @INPUT2@ > @2@/book.toml + @4@ -r -L --exclude='*.drv' --include='*.md' @CURRENT_SOURCE_DIR@/ @2@/ + (cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3 + rm -rf @2@/manual + mv @2@/html @2@/manual + # Remove Mathjax 2.7, because we will actually use MathJax 3.x + find @2@/manual | grep .html | xargs sed -i -e '/2.7.1.MathJax.js/d' + find @2@/manual -iname meson.build -delete + '''.format( + python.full_path(), + mdbook.full_path(), + meson.current_build_dir(), + fs.read('../../.version-determinate').strip(), + rsync.full_path(), + ), + ], + input : [ + generate_manual_deps, + 'substitute.py', + 'book.toml.in', + 'anchors.jq', + 'custom.css', + redirects_js, + nix3_cli_files, + experimental_features_shortlist_md, + experimental_feature_descriptions_md, + types_dir, + conf_file_md, + builtins_md, + rl_next_generated, + summary_rl_next, + json_schema_generated_files, + nix_input, + ], + output : [ + 'manual', + 'markdown', + ], + depfile : 'manual.d', + build_by_default : true, + env : { + 'RUST_LOG' : 'info', + 'MDBOOK_SUBSTITUTE_SEARCH' : meson.current_build_dir() / 'source', + }, + ) + manual_html = manual[0] + manual_md = manual[1] -install_subdir( - manual_html.full_path(), - install_dir : get_option('datadir') / 'doc/nix', -) + install_subdir( + manual_html.full_path(), + install_dir : get_option('datadir') / 'doc/nix', + ) +endif nix_nested_manpages = [ [ @@ -182,6 +213,7 @@ nix_nested_manpages = [ ], ] +# Manpage generation (standalone, no mdbook dependency) foreach command : nix_nested_manpages foreach page : command[1] title = command[0] + ' --' + page @@ -189,15 +221,19 @@ foreach command : nix_nested_manpages custom_target( command : [ bash, - files('./render-manpage.sh'), + '@INPUT0@', '--out-no-smarty', title, section, - '@INPUT0@/command-ref' / command[0] / (page + '.md'), + meson.current_source_dir() / 'source', + meson.current_build_dir() / 'source', + doc_url, + meson.current_source_dir() / 'source/command-ref' / command[0] / (page + '.md'), '@OUTPUT0@', ], input : [ - manual_md, + files('./render-manpage.sh'), + files('./expand-includes.py'), nix_input, ], output : command[0] + '-' + page + '.1', @@ -306,14 +342,21 @@ foreach page : nix3_manpages command : [ bash, '@INPUT0@', + # Note: no --out-no-smarty flag (original behavior) page, section, - '@INPUT1@/command-ref/new-cli/@0@.md'.format(page), + meson.current_source_dir() / 'source', + meson.current_build_dir() / 'source', + doc_url, + meson.current_build_dir() / 'source/command-ref/new-cli/@0@.md'.format( + page, + ), '@OUTPUT@', ], input : [ files('./render-manpage.sh'), - manual_md, + files('./expand-includes.py'), + nix3_cli_files, nix_input, ], output : page + '.1', @@ -333,7 +376,12 @@ nix_manpages = [ [ 'nix-channel', 1 ], [ 'nix-hash', 1 ], [ 'nix-copy-closure', 1 ], - [ 'nix.conf', 5, conf_file_md.full_path() ], + [ + 'nix.conf', + 5, + conf_file_md.full_path(), + [ conf_file_md, experimental_features_shortlist_md ], + ], [ 'nix-daemon', 8 ], [ 'nix-profiles', 5, 'files/profiles.md' ], ] @@ -345,19 +393,24 @@ foreach entry : nix_manpages # Therefore we use an optional third element of this array to override the name pattern md_file = entry.get(2, title + '.md') section = entry[1].to_string() - md_file_resolved = join_paths('@INPUT1@/command-ref/', md_file) + input_file = meson.current_source_dir() / 'source/command-ref' / md_file + custom_target( command : [ bash, '@INPUT0@', + # Note: no --out-no-smarty flag (original behavior) title, section, - md_file_resolved, + meson.current_source_dir() / 'source', + meson.current_build_dir() / 'source', + doc_url, + input_file, '@OUTPUT@', ], input : [ files('./render-manpage.sh'), - manual_md, + files('./expand-includes.py'), entry.get(3, []), nix_input, ], diff --git a/doc/manual/meson.options b/doc/manual/meson.options new file mode 100644 index 00000000000..1d6b7c73a81 --- /dev/null +++ b/doc/manual/meson.options @@ -0,0 +1,13 @@ +option( + 'official-release', + type : 'boolean', + value : true, + description : 'Whether this is an official release build (affects documentation URLs)', +) + +option( + 'html-manual', + type : 'boolean', + value : true, + description : 'Whether to build the HTML manual (requires mdbook)', +) diff --git a/doc/manual/package.nix b/doc/manual/package.nix index a74ee3f57b2..0b3d8ca940a 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -1,22 +1,32 @@ { lib, + callPackage, mkMesonDerivation, + runCommand, meson, ninja, lowdown-unsandboxed, mdbook, - mdbook-linkcheck, jq, python3, rsync, nix-cli, changelog-d, + json-schema-for-humans, officialRelease, # Configuration Options version, + /** + Whether to build the HTML manual. + When false, only manpages are built, avoiding the mdbook dependency. + */ + buildHtmlManual ? true, + + # `tests` attribute + testers, }: let @@ -33,6 +43,18 @@ mkMesonDerivation (finalAttrs: { (fileset.unions [ ../../.version ../../.version-determinate + # For example JSON + ../../src/libutil-tests/data/memory-source-accessor + ../../src/libutil-tests/data/hash + ../../src/libstore-tests/data/content-address + ../../src/libstore-tests/data/store-path + ../../src/libstore-tests/data/realisation + ../../src/libstore-tests/data/derivation + ../../src/libstore-tests/data/derived-path + ../../src/libstore-tests/data/path-info + ../../src/libstore-tests/data/nar-info + ../../src/libstore-tests/data/build-result + ../../src/libstore-tests/data/dummy-store # Too many different types of files to filter for now ../../doc/manual ./. @@ -41,44 +63,92 @@ mkMesonDerivation (finalAttrs: { ../../doc/manual/package.nix; # TODO the man pages should probably be separate - outputs = [ - "out" - "man" + outputs = + if buildHtmlManual then + [ + "out" + "man" + ] + else + [ "out" ]; # Only one output when HTML manual is disabled; use "out" for manpages + + # When HTML manual is disabled, install manpages to "out" instead of "man" + mesonFlags = [ + (lib.mesonBool "official-release" officialRelease) + (lib.mesonBool "html-manual" buildHtmlManual) + ] + ++ lib.optionals (!buildHtmlManual) [ + "--mandir=${placeholder "out"}/share/man" ]; - # Hack for sake of the dev shell - passthru.externalNativeBuildInputs = [ + nativeBuildInputs = [ + nix-cli meson ninja (lib.getBin lowdown-unsandboxed) - mdbook - mdbook-linkcheck jq python3 + ] + ++ lib.optionals buildHtmlManual [ + mdbook rsync - changelog-d + json-schema-for-humans ] - ++ lib.optionals (!officialRelease) [ + ++ lib.optionals (!officialRelease && buildHtmlManual) [ # When not an official release, we likely have changelog entries that have # yet to be rendered. # When released, these are rendered into a committed file to save a dependency. changelog-d ]; - nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [ - nix-cli - ]; - preConfigure = '' chmod u+w ./.version echo ${finalAttrs.version} > ./.version ''; - postInstall = '' + postInstall = lib.optionalString buildHtmlManual '' mkdir -p ''$out/nix-support echo "doc manual ''$out/share/doc/nix/manual" >> ''$out/nix-support/hydra-build-products ''; + passthru = lib.optionalAttrs buildHtmlManual { + /** + The root of the HTML manual. + E.g. "${nix-manual.site}/index.html" exists. + */ + + site = finalAttrs.finalPackage + "/share/doc/nix/manual"; + + tests = + let + redirect-targets = callPackage ./redirect-targets-html.nix { }; + in + { + # https://nixos.org/manual/nixpkgs/stable/index.html#tester-lycheeLinkCheck + linkcheck = testers.lycheeLinkCheck { + site = + let + plain = finalAttrs.finalPackage.site; + in + runCommand "nix-manual-with-redirect-targets" { } '' + cp -r ${plain} $out + chmod -R u+w $out + cp ${redirect-targets}/redirect-targets.html $out/redirect-targets.html + ''; + extraConfig = { + exclude = [ + # Exclude auto-generated JSON schema documentation which has + # auto-generated fragment IDs that don't match the link references + ".*/protocols/json/.*\\.html" + # Exclude undocumented builtins + ".*/language/builtins\\.html#builtins-addErrorContext" + ".*/language/builtins\\.html#builtins-appendContext" + ]; + }; + }; + }; + }; + meta = { platforms = lib.platforms.all; }; diff --git a/doc/manual/redirect-targets-html.nix b/doc/manual/redirect-targets-html.nix new file mode 100644 index 00000000000..b456895b259 --- /dev/null +++ b/doc/manual/redirect-targets-html.nix @@ -0,0 +1,62 @@ +# Generates redirect-targets.html containing all redirect targets for link checking. +# Used by: doc/manual/package.nix (passthru.tests.linkcheck) + +{ + stdenv, + lib, + jq, +}: + +stdenv.mkDerivation { + name = "redirect-targets-html"; + + src = lib.fileset.toSource { + root = ./.; + fileset = ./redirects.json; + }; + + nativeBuildInputs = [ jq ]; + + installPhase = '' + mkdir -p $out + + { + echo '' + echo 'Nix Manual Redirect Targets' + echo '

Redirect Targets to Check

' + echo '

This document contains all redirect targets from the Nix manual.

' + + echo '

Client-side redirects (from redirects.json)

' + echo '' + echo '' + } > $out/redirect-targets.html + + echo "Generated redirect targets document with $(grep -c '
  • ' $out/redirect-targets.html) links" + ''; + + meta = { + description = "HTML document listing all Nix manual redirect targets for link checking"; + }; +} diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js deleted file mode 100644 index b2295cf4fc5..00000000000 --- a/doc/manual/redirects.js +++ /dev/null @@ -1,456 +0,0 @@ -// redirect rules for URL fragments (client-side) to prevent link rot. -// this must be done on the client side, as web servers do not see the fragment part of the URL. -// it will only work with JavaScript enabled in the browser, but this is the best we can do here. -// see source/_redirects for path redirects (server-side) - -// redirects are declared as follows: -// each entry has as its key a path matching the requested URL path, relative to the mdBook document root. -// -// IMPORTANT: it must specify the full path with file name and suffix -// -// each entry is itself a set of key-value pairs, where -// - keys are anchors on the matched path. -// - values are redirection targets relative to the current path. - -const redirects = { - "index.html": { - "part-advanced-topics": "advanced-topics/index.html", - "chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html", - "chap-diff-hook": "advanced-topics/diff-hook.html", - "check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered", - "chap-distributed-builds": "command-ref/conf-file.html#conf-builders", - "chap-post-build-hook": "advanced-topics/post-build-hook.html", - "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats", - "chap-writing-nix-expressions": "language/index.html", - "part-command-ref": "command-ref/index.html", - "conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation", - "conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges", - "conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris", - "conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users", - "conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store", - "conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-binary-cache-public-keys", - "conf-binary-caches": "command-ref/conf-file.html#conf-binary-caches", - "conf-build-compress-log": "command-ref/conf-file.html#conf-build-compress-log", - "conf-build-cores": "command-ref/conf-file.html#conf-build-cores", - "conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-build-extra-chroot-dirs", - "conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-build-extra-sandbox-paths", - "conf-build-fallback": "command-ref/conf-file.html#conf-build-fallback", - "conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs", - "conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size", - "conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time", - "conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout", - "conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot", - "conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox", - "conf-build-use-substitutes": "command-ref/conf-file.html#conf-build-use-substitutes", - "conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group", - "conf-builders": "command-ref/conf-file.html#conf-builders", - "conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes", - "conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log", - "conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout", - "conf-cores": "command-ref/conf-file.html#conf-cores", - "conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook", - "conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations", - "conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches", - "conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms", - "conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-extra-sandbox-paths", - "conf-extra-substituters": "command-ref/conf-file.html#conf-extra-substituters", - "conf-fallback": "command-ref/conf-file.html#conf-fallback", - "conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata", - "conf-gc-keep-derivations": "command-ref/conf-file.html#conf-gc-keep-derivations", - "conf-gc-keep-outputs": "command-ref/conf-file.html#conf-gc-keep-outputs", - "conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors", - "conf-http-connections": "command-ref/conf-file.html#conf-http-connections", - "conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log", - "conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations", - "conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations", - "conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs", - "conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size", - "conf-max-free": "command-ref/conf-file.html#conf-max-free", - "conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs", - "conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time", - "conf-min-free": "command-ref/conf-file.html#conf-min-free", - "conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl", - "conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl", - "conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file", - "conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files", - "conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook", - "conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook", - "conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs", - "conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval", - "conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook", - "conf-sandbox": "command-ref/conf-file.html#conf-sandbox", - "conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size", - "conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths", - "conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files", - "conf-show-trace": "command-ref/conf-file.html#conf-show-trace", - "conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout", - "conf-substitute": "command-ref/conf-file.html#conf-substitute", - "conf-substituters": "command-ref/conf-file.html#conf-substituters", - "conf-system": "command-ref/conf-file.html#conf-system", - "conf-system-features": "command-ref/conf-file.html#conf-system-features", - "conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl", - "conf-timeout": "command-ref/conf-file.html#conf-timeout", - "conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls", - "conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-binary-caches", - "conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys", - "conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters", - "conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users", - "extra-sandbox-paths": "command-ref/conf-file.html#extra-sandbox-paths", - "sec-conf-file": "command-ref/conf-file.html", - "env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH", - "env-common": "command-ref/env-common.html", - "envar-remote": "command-ref/env-common.html#env-NIX_REMOTE", - "sec-common-env": "command-ref/env-common.html", - "ch-files": "command-ref/files.html", - "ch-main-commands": "command-ref/main-commands.html", - "opt-out-link": "command-ref/nix-build.html#opt-out-link", - "sec-nix-build": "command-ref/nix-build.html", - "sec-nix-channel": "command-ref/nix-channel.html", - "sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html", - "sec-nix-copy-closure": "command-ref/nix-copy-closure.html", - "sec-nix-daemon": "command-ref/nix-daemon.html", - "refsec-nix-env-install-examples": "command-ref/nix-env.html#examples", - "rsec-nix-env-install": "command-ref/nix-env.html#operation---install", - "rsec-nix-env-set": "command-ref/nix-env.html#operation---set", - "rsec-nix-env-set-flag": "command-ref/nix-env.html#operation---set-flag", - "rsec-nix-env-upgrade": "command-ref/nix-env.html#operation---upgrade", - "sec-nix-env": "command-ref/nix-env.html", - "ssec-version-comparisons": "command-ref/nix-env.html#versions", - "sec-nix-hash": "command-ref/nix-hash.html", - "sec-nix-instantiate": "command-ref/nix-instantiate.html", - "sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html", - "sec-nix-shell": "command-ref/nix-shell.html", - "ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter", - "nixref-queries": "command-ref/nix-store.html#queries", - "opt-add-root": "command-ref/nix-store.html#opt-add-root", - "refsec-nix-store-dump": "command-ref/nix-store.html#operation---dump", - "refsec-nix-store-export": "command-ref/nix-store.html#operation---export", - "refsec-nix-store-import": "command-ref/nix-store.html#operation---import", - "refsec-nix-store-query": "command-ref/nix-store.html#operation---query", - "refsec-nix-store-verify": "command-ref/nix-store.html#operation---verify", - "rsec-nix-store-gc": "command-ref/nix-store.html#operation---gc", - "rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store.html#operation---generate-binary-cache-key", - "rsec-nix-store-realise": "command-ref/nix-store.html#operation---realise", - "rsec-nix-store-serve": "command-ref/nix-store.html#operation---serve", - "sec-nix-store": "command-ref/nix-store.html", - "opt-I": "command-ref/opt-common.html#opt-I", - "opt-attr": "command-ref/opt-common.html#opt-attr", - "opt-common": "command-ref/opt-common.html", - "opt-cores": "command-ref/opt-common.html#opt-cores", - "opt-log-format": "command-ref/opt-common.html#opt-log-format", - "opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs", - "opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time", - "opt-timeout": "command-ref/opt-common.html#opt-timeout", - "sec-common-options": "command-ref/opt-common.html", - "ch-utilities": "command-ref/utilities.html", - "chap-hacking": "development/building.html", - "adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes", - "adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences", - "adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites", - "adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences", - "adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites", - "adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph", - "adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars", - "adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash", - "adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo", - "adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode", - "adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile", - "adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild", - "fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash", - "sec-advanced-attributes": "language/advanced-attributes.html", - "builtin-abort": "language/builtins.html#builtins-abort", - "builtin-add": "language/builtins.html#builtins-add", - "builtin-all": "language/builtins.html#builtins-all", - "builtin-any": "language/builtins.html#builtins-any", - "builtin-attrNames": "language/builtins.html#builtins-attrNames", - "builtin-attrValues": "language/builtins.html#builtins-attrValues", - "builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf", - "builtin-bitAnd": "language/builtins.html#builtins-bitAnd", - "builtin-bitOr": "language/builtins.html#builtins-bitOr", - "builtin-bitXor": "language/builtins.html#builtins-bitXor", - "builtin-builtins": "language/builtins.html#builtins-builtins", - "builtin-compareVersions": "language/builtins.html#builtins-compareVersions", - "builtin-concatLists": "language/builtins.html#builtins-concatLists", - "builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep", - "builtin-currentSystem": "language/builtins.html#builtins-currentSystem", - "builtin-deepSeq": "language/builtins.html#builtins-deepSeq", - "builtin-derivation": "language/builtins.html#builtins-derivation", - "builtin-dirOf": "language/builtins.html#builtins-dirOf", - "builtin-div": "language/builtins.html#builtins-div", - "builtin-elem": "language/builtins.html#builtins-elem", - "builtin-elemAt": "language/builtins.html#builtins-elemAt", - "builtin-fetchGit": "language/builtins.html#builtins-fetchGit", - "builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball", - "builtin-fetchurl": "language/builtins.html#builtins-fetchurl", - "builtin-filterSource": "language/builtins.html#builtins-filterSource", - "builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime", - "builtin-fromJSON": "language/builtins.html#builtins-fromJSON", - "builtin-functionArgs": "language/builtins.html#builtins-functionArgs", - "builtin-genList": "language/builtins.html#builtins-genList", - "builtin-getAttr": "language/builtins.html#builtins-getAttr", - "builtin-getEnv": "language/builtins.html#builtins-getEnv", - "builtin-hasAttr": "language/builtins.html#builtins-hasAttr", - "builtin-hashFile": "language/builtins.html#builtins-hashFile", - "builtin-hashString": "language/builtins.html#builtins-hashString", - "builtin-head": "language/builtins.html#builtins-head", - "builtin-import": "language/builtins.html#builtins-import", - "builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs", - "builtin-isAttrs": "language/builtins.html#builtins-isAttrs", - "builtin-isBool": "language/builtins.html#builtins-isBool", - "builtin-isFloat": "language/builtins.html#builtins-isFloat", - "builtin-isFunction": "language/builtins.html#builtins-isFunction", - "builtin-isInt": "language/builtins.html#builtins-isInt", - "builtin-isList": "language/builtins.html#builtins-isList", - "builtin-isNull": "language/builtins.html#builtins-isNull", - "builtin-isString": "language/builtins.html#builtins-isString", - "builtin-length": "language/builtins.html#builtins-length", - "builtin-lessThan": "language/builtins.html#builtins-lessThan", - "builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs", - "builtin-map": "language/builtins.html#builtins-map", - "builtin-match": "language/builtins.html#builtins-match", - "builtin-mul": "language/builtins.html#builtins-mul", - "builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName", - "builtin-path": "language/builtins.html#builtins-path", - "builtin-pathExists": "language/builtins.html#builtins-pathExists", - "builtin-placeholder": "language/builtins.html#builtins-placeholder", - "builtin-readDir": "language/builtins.html#builtins-readDir", - "builtin-readFile": "language/builtins.html#builtins-readFile", - "builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs", - "builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings", - "builtin-seq": "language/builtins.html#builtins-seq", - "builtin-sort": "language/builtins.html#builtins-sort", - "builtin-split": "language/builtins.html#builtins-split", - "builtin-splitVersion": "language/builtins.html#builtins-splitVersion", - "builtin-stringLength": "language/builtins.html#builtins-stringLength", - "builtin-sub": "language/builtins.html#builtins-sub", - "builtin-substring": "language/builtins.html#builtins-substring", - "builtin-tail": "language/builtins.html#builtins-tail", - "builtin-throw": "language/builtins.html#builtins-throw", - "builtin-toFile": "language/builtins.html#builtins-toFile", - "builtin-toJSON": "language/builtins.html#builtins-toJSON", - "builtin-toPath": "language/builtins.html#builtins-toPath", - "builtin-toString": "language/builtins.html#builtins-toString", - "builtin-toXML": "language/builtins.html#builtins-toXML", - "builtin-trace": "language/builtins.html#builtins-trace", - "builtin-tryEval": "language/builtins.html#builtins-tryEval", - "builtin-typeOf": "language/builtins.html#builtins-typeOf", - "ssec-builtins": "language/builtins.html", - "attr-system": "language/derivations.html#attr-system", - "ssec-derivation": "language/derivations.html", - "ch-expression-language": "language/index.html", - "sec-constructs": "language/syntax.html", - "sect-let-language": "language/syntax.html#let-expressions", - "ss-functions": "language/syntax.html#functions", - "sec-language-operators": "language/operators.html", - "table-operators": "language/operators.html", - "ssec-values": "language/types.html", - "gloss-closure": "glossary.html#gloss-closure", - "gloss-derivation": "glossary.html#gloss-derivation", - "gloss-deriver": "glossary.html#gloss-deriver", - "gloss-nar": "glossary.html#gloss-nar", - "gloss-output-path": "glossary.html#gloss-output-path", - "gloss-profile": "glossary.html#gloss-profile", - "gloss-reachable": "glossary.html#gloss-reachable", - "gloss-reference": "glossary.html#gloss-reference", - "gloss-substitute": "glossary.html#gloss-substitute", - "gloss-user-env": "glossary.html#gloss-user-env", - "gloss-validity": "glossary.html#gloss-validity", - "part-glossary": "glossary.html", - "sec-building-source": "installation/building-source.html", - "ch-env-variables": "installation/env-variables.html", - "sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables", - "sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file", - "sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon", - "chap-installation": "installation/index.html", - "ch-installing-binary": "installation/installing-binary.html", - "sect-macos-installation": "installation/installing-binary.html#macos-installation", - "sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation", - "sect-macos-installation-encrypted-volume": "installation/installing-binary.html#macos-installation", - "sect-macos-installation-recommended-notes": "installation/installing-binary.html#macos-installation", - "sect-macos-installation-symlink": "installation/installing-binary.html#macos-installation", - "sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation", - "sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball", - "sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url", - "ch-installing-source": "installation/installing-source.html", - "ssec-multi-user": "installation/multi-user.html", - "sec-obtaining-source": "installation/obtaining-source.html", - "sec-prerequisites-source": "installation/prerequisites-source.html", - "ch-upgrading-nix": "installation/upgrading.html", - "ch-about-nix": "introduction.html", - "chap-introduction": "introduction.html", - "ch-basic-package-mgmt": "package-management/basic-package-mgmt.html", - "ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html", - "sec-channels": "command-ref/nix-channel.html", - "ssec-copy-closure": "command-ref/nix-copy-closure.html", - "sec-garbage-collection": "package-management/garbage-collection.html", - "ssec-gc-roots": "package-management/garbage-collector-roots.html", - "chap-package-management": "package-management/index.html", - "sec-profiles": "package-management/profiles.html", - "ssec-s3-substituter": "store/types/s3-substituter.html", - "ssec-s3-substituter-anonymous-reads": "store/types/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache", - "ssec-s3-substituter-authenticated-reads": "store/types/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache", - "ssec-s3-substituter-authenticated-writes": "store/types/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache", - "sec-sharing-packages": "package-management/sharing-packages.html", - "ssec-ssh-substituter": "package-management/ssh-substituter.html", - "chap-quick-start": "quick-start.html", - "sec-relnotes": "release-notes/index.html", - "ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html", - "ch-relnotes-0.10": "release-notes/rl-0.10.html", - "ssec-relnotes-0.11": "release-notes/rl-0.11.html", - "ssec-relnotes-0.12": "release-notes/rl-0.12.html", - "ssec-relnotes-0.13": "release-notes/rl-0.13.html", - "ssec-relnotes-0.14": "release-notes/rl-0.14.html", - "ssec-relnotes-0.15": "release-notes/rl-0.15.html", - "ssec-relnotes-0.16": "release-notes/rl-0.16.html", - "ch-relnotes-0.5": "release-notes/rl-0.5.html", - "ch-relnotes-0.6": "release-notes/rl-0.6.html", - "ch-relnotes-0.7": "release-notes/rl-0.7.html", - "ch-relnotes-0.8.1": "release-notes/rl-0.8.1.html", - "ch-relnotes-0.8": "release-notes/rl-0.8.html", - "ch-relnotes-0.9.1": "release-notes/rl-0.9.1.html", - "ch-relnotes-0.9.2": "release-notes/rl-0.9.2.html", - "ch-relnotes-0.9": "release-notes/rl-0.9.html", - "ssec-relnotes-1.0": "release-notes/rl-1.0.html", - "ssec-relnotes-1.1": "release-notes/rl-1.1.html", - "ssec-relnotes-1.10": "release-notes/rl-1.10.html", - "ssec-relnotes-1.11.10": "release-notes/rl-1.11.10.html", - "ssec-relnotes-1.11": "release-notes/rl-1.11.html", - "ssec-relnotes-1.2": "release-notes/rl-1.2.html", - "ssec-relnotes-1.3": "release-notes/rl-1.3.html", - "ssec-relnotes-1.4": "release-notes/rl-1.4.html", - "ssec-relnotes-1.5.1": "release-notes/rl-1.5.1.html", - "ssec-relnotes-1.5.2": "release-notes/rl-1.5.2.html", - "ssec-relnotes-1.5": "release-notes/rl-1.5.html", - "ssec-relnotes-1.6.1": "release-notes/rl-1.6.1.html", - "ssec-relnotes-1.6.0": "release-notes/rl-1.6.html", - "ssec-relnotes-1.7": "release-notes/rl-1.7.html", - "ssec-relnotes-1.8": "release-notes/rl-1.8.html", - "ssec-relnotes-1.9": "release-notes/rl-1.9.html", - "ssec-relnotes-2.0": "release-notes/rl-2.0.html", - "ssec-relnotes-2.1": "release-notes/rl-2.1.html", - "ssec-relnotes-2.2": "release-notes/rl-2.2.html", - "ssec-relnotes-2.3": "release-notes/rl-2.3.html", - }, - "language/types.html": { - "simple-values": "#primitives", - "lists": "#list", - "strings": "#string", - "attribute-sets": "#attribute-set", - "type-number": "#type-int", - }, - "language/syntax.html": { - "scoping-rules": "scoping.html", - "string-literal": "string-literals.html", - }, - "language/derivations.md": { - "builder-execution": "store/drv/building.md#builder-execution", - }, - "installation/installing-binary.html": { - "linux": "uninstall.html#linux", - "macos": "uninstall.html#macos", - "uninstalling": "uninstall.html", - }, - "development/building.html": { - "nix-with-flakes": "#building-nix-with-flakes", - "classic-nix": "#building-nix", - "running-tests": "testing.html#running-tests", - "unit-tests": "testing.html#unit-tests", - "functional-tests": "testing.html#functional-tests", - "debugging-failing-functional-tests": "testing.html#debugging-failing-functional-tests", - "integration-tests": "testing.html#integration-tests", - "installer-tests": "testing.html#installer-tests", - "one-time-setup": "testing.html#one-time-setup", - "using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing", - "characterization-testing": "testing.html#characterisation-testing-unit", - "add-a-release-note": "contributing.html#add-a-release-note", - "add-an-entry": "contributing.html#add-an-entry", - "build-process": "contributing.html#build-process", - "reverting": "contributing.html#reverting", - "branches": "contributing.html#branches", - }, - "glossary.html": { - "gloss-local-store": "store/types/local-store.html", - "package-attribute-set": "#package", - "gloss-chroot-store": "store/types/local-store.html", - "gloss-content-addressed-derivation": "#gloss-content-addressing-derivation", - }, -}; - -// the following code matches the current page's URL against the set of redirects. -// -// it is written to minimize the latency between page load and redirect. -// therefore we avoid function calls, copying data, and unnecessary loops. -// IMPORTANT: we use stateful array operations and their order matters! -// -// matching URLs is more involved than it should be: -// -// 1. `document.location.pathname` can have an arbitrary prefix. -// -// 2. `path_to_root` is set by mdBook. it consists only of `../`s and -// determines the depth of `` relative to the prefix: -// -// `document.location.pathname` -// |------------------------------| -// ///[[.html]][#] -// |----| -// `path_to_root` has same number of path segments -// -// source: https://phaiax.github.io/mdBook/format/theme/index-hbs.html#data -// -// 3. the following paths are equivalent: -// -// /foo/bar/ -// /foo/bar/index.html -// /foo/bar/index -// -// 4. the following paths are also equivalent: -// -// /foo/bar/baz -// /foo/bar/baz.html -// - -let segments = document.location.pathname.split('/'); - -let file = segments.pop(); - -// normalize file name -if (file === '') { file = "index.html"; } -else if (!file.endsWith('.html')) { file = file + '.html'; } - -segments.push(file); - -// use `path_to_root` to discern prefix from path. -const depth = path_to_root.split('/').length; - -// remove segments containing prefix. the following works because -// 1. the original `document.location.pathname` is absolute, -// hence first element of `segments` is always empty. -// 2. last element of splitting `path_to_root` is also always empty. -// 3. last element of `segments` is the file name. -// -// visual example: -// -// '/foo/bar/baz.html'.split('/') -> [ '', 'foo', 'bar', 'baz.html' ] -// '../'.split('/') -> [ '..', '' ] -// -// the following operations will then result in -// -// path = 'bar/baz.html' -// -segments.splice(0, segments.length - depth); -const path = segments.join('/'); - -// anchor starts with the hash character (`#`), -// but our redirect declarations don't, so we strip it. -// example: -// document.location.hash -> '#foo' -// document.location.hash.substring(1) -> 'foo' -const anchor = document.location.hash.substring(1); - -const redirect = redirects[path]; -if (redirect) { - const target = redirect[anchor]; - if (target) { - document.location.href = target; - } -} diff --git a/doc/manual/redirects.js.in b/doc/manual/redirects.js.in new file mode 100644 index 00000000000..36ebe0560d4 --- /dev/null +++ b/doc/manual/redirects.js.in @@ -0,0 +1,94 @@ +// redirect rules for URL fragments (client-side) to prevent link rot. +// this must be done on the client side, as web servers do not see the fragment part of the URL. +// it will only work with JavaScript enabled in the browser, but this is the best we can do here. +// see source/_redirects for path redirects (server-side) + +// redirects are declared as follows: +// each entry has as its key a path matching the requested URL path, relative to the mdBook document root. +// +// IMPORTANT: it must specify the full path with file name and suffix +// +// each entry is itself a set of key-value pairs, where +// - keys are anchors on the matched path. +// - values are redirection targets relative to the current path. + +const redirects = @REDIRECTS_JSON@; + +// the following code matches the current page's URL against the set of redirects. +// +// it is written to minimize the latency between page load and redirect. +// therefore we avoid function calls, copying data, and unnecessary loops. +// IMPORTANT: we use stateful array operations and their order matters! +// +// matching URLs is more involved than it should be: +// +// 1. `document.location.pathname` can have an arbitrary prefix. +// +// 2. `path_to_root` is set by mdBook. it consists only of `../`s and +// determines the depth of `` relative to the prefix: +// +// `document.location.pathname` +// |------------------------------| +// ///[[.html]][#] +// |----| +// `path_to_root` has same number of path segments +// +// source: https://phaiax.github.io/mdBook/format/theme/index-hbs.html#data +// +// 3. the following paths are equivalent: +// +// /foo/bar/ +// /foo/bar/index.html +// /foo/bar/index +// +// 4. the following paths are also equivalent: +// +// /foo/bar/baz +// /foo/bar/baz.html +// + +let segments = document.location.pathname.split('/'); + +let file = segments.pop(); + +// normalize file name +if (file === '') { file = "index.html"; } +else if (!file.endsWith('.html')) { file = file + '.html'; } + +segments.push(file); + +// use `path_to_root` to discern prefix from path. +const depth = path_to_root.split('/').length; + +// remove segments containing prefix. the following works because +// 1. the original `document.location.pathname` is absolute, +// hence first element of `segments` is always empty. +// 2. last element of splitting `path_to_root` is also always empty. +// 3. last element of `segments` is the file name. +// +// visual example: +// +// '/foo/bar/baz.html'.split('/') -> [ '', 'foo', 'bar', 'baz.html' ] +// '../'.split('/') -> [ '..', '' ] +// +// the following operations will then result in +// +// path = 'bar/baz.html' +// +segments.splice(0, segments.length - depth); +const path = segments.join('/'); + +// anchor starts with the hash character (`#`), +// but our redirect declarations don't, so we strip it. +// example: +// document.location.hash -> '#foo' +// document.location.hash.substring(1) -> 'foo' +const anchor = document.location.hash.substring(1); + +const redirect = redirects[path]; +if (redirect) { + const target = redirect[anchor]; + if (target) { + document.location.href = target; + } +} diff --git a/doc/manual/redirects.json b/doc/manual/redirects.json new file mode 100644 index 00000000000..07a6f36627f --- /dev/null +++ b/doc/manual/redirects.json @@ -0,0 +1,316 @@ +{ + "index.html": { + "part-advanced-topics": "advanced-topics/index.html", + "chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html", + "chap-diff-hook": "advanced-topics/diff-hook.html", + "check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered", + "chap-distributed-builds": "command-ref/conf-file.html#conf-builders", + "chap-post-build-hook": "advanced-topics/post-build-hook.html", + "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats", + "chap-writing-nix-expressions": "language/index.html", + "part-command-ref": "command-ref/index.html", + "conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation", + "conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges", + "conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris", + "conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users", + "conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store", + "conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys", + "conf-binary-caches": "command-ref/conf-file.html#conf-substituters", + "conf-build-compress-log": "command-ref/conf-file.html#conf-compress-build-log", + "conf-build-cores": "command-ref/conf-file.html#conf-cores", + "conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-sandbox-paths", + "conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths", + "conf-build-fallback": "command-ref/conf-file.html#conf-fallback", + "conf-build-max-jobs": "command-ref/conf-file.html#conf-max-jobs", + "conf-build-max-log-size": "command-ref/conf-file.html#conf-max-build-log-size", + "conf-build-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time", + "conf-build-timeout": "command-ref/conf-file.html#conf-timeout", + "conf-build-use-chroot": "command-ref/conf-file.html#conf-sandbox", + "conf-build-use-sandbox": "command-ref/conf-file.html#conf-sandbox", + "conf-build-use-substitutes": "command-ref/conf-file.html#conf-substitute", + "conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group", + "conf-builders": "command-ref/conf-file.html#conf-builders", + "conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes", + "conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log", + "conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout", + "conf-cores": "command-ref/conf-file.html#conf-cores", + "conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook", + "conf-env-keep-derivations": "command-ref/conf-file.html#conf-keep-env-derivations", + "conf-extra-binary-caches": "command-ref/conf-file.html#conf-substituters", + "conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms", + "conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths", + "conf-extra-substituters": "command-ref/conf-file.html#conf-substituters", + "conf-fallback": "command-ref/conf-file.html#conf-fallback", + "conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata", + "conf-gc-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations", + "conf-gc-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs", + "conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors", + "conf-http-connections": "command-ref/conf-file.html#conf-http-connections", + "conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log", + "conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations", + "conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations", + "conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs", + "conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size", + "conf-max-free": "command-ref/conf-file.html#conf-max-free", + "conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs", + "conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time", + "conf-min-free": "command-ref/conf-file.html#conf-min-free", + "conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl", + "conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl", + "conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file", + "conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files", + "conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook", + "conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook", + "conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs", + "conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval", + "conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook", + "conf-sandbox": "command-ref/conf-file.html#conf-sandbox", + "conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size", + "conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths", + "conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files", + "conf-show-trace": "command-ref/conf-file.html#conf-show-trace", + "conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout", + "conf-substitute": "command-ref/conf-file.html#conf-substitute", + "conf-substituters": "command-ref/conf-file.html#conf-substituters", + "conf-system": "command-ref/conf-file.html#conf-system", + "conf-system-features": "command-ref/conf-file.html#conf-system-features", + "conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl", + "conf-timeout": "command-ref/conf-file.html#conf-timeout", + "conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls", + "conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-substituters", + "conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys", + "conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters", + "conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users", + "extra-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths", + "sec-conf-file": "command-ref/conf-file.html", + "env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH", + "env-common": "command-ref/env-common.html", + "envar-remote": "command-ref/env-common.html#env-NIX_REMOTE", + "sec-common-env": "command-ref/env-common.html", + "ch-files": "command-ref/files.html", + "ch-main-commands": "command-ref/main-commands.html", + "opt-out-link": "command-ref/nix-build.html#opt-out-link", + "sec-nix-build": "command-ref/nix-build.html", + "sec-nix-channel": "command-ref/nix-channel.html", + "sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html", + "sec-nix-copy-closure": "command-ref/nix-copy-closure.html", + "sec-nix-daemon": "command-ref/nix-daemon.html", + "refsec-nix-env-install-examples": "command-ref/nix-env/install.html#examples", + "rsec-nix-env-install": "command-ref/nix-env/install.html", + "rsec-nix-env-set": "command-ref/nix-env/set.html", + "rsec-nix-env-set-flag": "command-ref/nix-env/set-flag.html", + "rsec-nix-env-upgrade": "command-ref/nix-env/upgrade.html", + "sec-nix-env": "command-ref/nix-env.html", + "ssec-version-comparisons": "command-ref/nix-env.html#selectors", + "sec-nix-hash": "command-ref/nix-hash.html", + "sec-nix-instantiate": "command-ref/nix-instantiate.html", + "sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html", + "sec-nix-shell": "command-ref/nix-shell.html", + "ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter", + "nixref-queries": "command-ref/nix-store/query.html#queries", + "opt-add-root": "command-ref/nix-store/query.html#opt-add-root", + "refsec-nix-store-dump": "command-ref/nix-store/dump.html", + "refsec-nix-store-export": "command-ref/nix-store/export.html", + "refsec-nix-store-import": "command-ref/nix-store/import.html", + "refsec-nix-store-query": "command-ref/nix-store/query.html", + "refsec-nix-store-verify": "command-ref/nix-store/verify.html", + "rsec-nix-store-gc": "command-ref/nix-store/gc.html", + "rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store/generate-binary-cache-key.html", + "rsec-nix-store-realise": "command-ref/nix-store/realise.html", + "rsec-nix-store-serve": "command-ref/nix-store/serve.html", + "sec-nix-store": "command-ref/nix-store.html", + "opt-I": "command-ref/opt-common.html#opt-I", + "opt-attr": "command-ref/opt-common.html#opt-attr", + "opt-common": "command-ref/opt-common.html", + "opt-cores": "command-ref/opt-common.html#opt-cores", + "opt-log-format": "command-ref/opt-common.html#opt-log-format", + "opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs", + "opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time", + "opt-timeout": "command-ref/opt-common.html#opt-timeout", + "sec-common-options": "command-ref/opt-common.html", + "ch-utilities": "command-ref/utilities.html", + "chap-hacking": "development/building.html", + "adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes", + "adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences", + "adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites", + "adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences", + "adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites", + "adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph", + "adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars", + "adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash", + "adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo", + "adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode", + "adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile", + "adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild", + "fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash", + "sec-advanced-attributes": "language/advanced-attributes.html", + "builtin-abort": "language/builtins.html#builtins-abort", + "builtin-add": "language/builtins.html#builtins-add", + "builtin-all": "language/builtins.html#builtins-all", + "builtin-any": "language/builtins.html#builtins-any", + "builtin-attrNames": "language/builtins.html#builtins-attrNames", + "builtin-attrValues": "language/builtins.html#builtins-attrValues", + "builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf", + "builtin-bitAnd": "language/builtins.html#builtins-bitAnd", + "builtin-bitOr": "language/builtins.html#builtins-bitOr", + "builtin-bitXor": "language/builtins.html#builtins-bitXor", + "builtin-builtins": "language/builtins.html#builtins-builtins", + "builtin-compareVersions": "language/builtins.html#builtins-compareVersions", + "builtin-concatLists": "language/builtins.html#builtins-concatLists", + "builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep", + "builtin-currentSystem": "language/builtins.html#builtins-currentSystem", + "builtin-deepSeq": "language/builtins.html#builtins-deepSeq", + "builtin-derivation": "language/builtins.html#builtins-derivation", + "builtin-dirOf": "language/builtins.html#builtins-dirOf", + "builtin-div": "language/builtins.html#builtins-div", + "builtin-elem": "language/builtins.html#builtins-elem", + "builtin-elemAt": "language/builtins.html#builtins-elemAt", + "builtin-fetchGit": "language/builtins.html#builtins-fetchGit", + "builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball", + "builtin-fetchurl": "language/builtins.html#builtins-fetchurl", + "builtin-filterSource": "language/builtins.html#builtins-filterSource", + "builtin-foldl-prime": "language/builtins.html#builtins-foldl'", + "builtin-fromJSON": "language/builtins.html#builtins-fromJSON", + "builtin-functionArgs": "language/builtins.html#builtins-functionArgs", + "builtin-genList": "language/builtins.html#builtins-genList", + "builtin-getAttr": "language/builtins.html#builtins-getAttr", + "builtin-getEnv": "language/builtins.html#builtins-getEnv", + "builtin-hasAttr": "language/builtins.html#builtins-hasAttr", + "builtin-hashFile": "language/builtins.html#builtins-hashFile", + "builtin-hashString": "language/builtins.html#builtins-hashString", + "builtin-head": "language/builtins.html#builtins-head", + "builtin-import": "language/builtins.html#builtins-import", + "builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs", + "builtin-isAttrs": "language/builtins.html#builtins-isAttrs", + "builtin-isBool": "language/builtins.html#builtins-isBool", + "builtin-isFloat": "language/builtins.html#builtins-isFloat", + "builtin-isFunction": "language/builtins.html#builtins-isFunction", + "builtin-isInt": "language/builtins.html#builtins-isInt", + "builtin-isList": "language/builtins.html#builtins-isList", + "builtin-isNull": "language/builtins.html#builtins-isNull", + "builtin-isString": "language/builtins.html#builtins-isString", + "builtin-length": "language/builtins.html#builtins-length", + "builtin-lessThan": "language/builtins.html#builtins-lessThan", + "builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs", + "builtin-map": "language/builtins.html#builtins-map", + "builtin-match": "language/builtins.html#builtins-match", + "builtin-mul": "language/builtins.html#builtins-mul", + "builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName", + "builtin-path": "language/builtins.html#builtins-path", + "builtin-pathExists": "language/builtins.html#builtins-pathExists", + "builtin-placeholder": "language/builtins.html#builtins-placeholder", + "builtin-readDir": "language/builtins.html#builtins-readDir", + "builtin-readFile": "language/builtins.html#builtins-readFile", + "builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs", + "builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings", + "builtin-seq": "language/builtins.html#builtins-seq", + "builtin-sort": "language/builtins.html#builtins-sort", + "builtin-split": "language/builtins.html#builtins-split", + "builtin-splitVersion": "language/builtins.html#builtins-splitVersion", + "builtin-stringLength": "language/builtins.html#builtins-stringLength", + "builtin-sub": "language/builtins.html#builtins-sub", + "builtin-substring": "language/builtins.html#builtins-substring", + "builtin-tail": "language/builtins.html#builtins-tail", + "builtin-throw": "language/builtins.html#builtins-throw", + "builtin-toFile": "language/builtins.html#builtins-toFile", + "builtin-toJSON": "language/builtins.html#builtins-toJSON", + "builtin-toPath": "language/builtins.html#builtins-toPath", + "builtin-toString": "language/builtins.html#builtins-toString", + "builtin-toXML": "language/builtins.html#builtins-toXML", + "builtin-trace": "language/builtins.html#builtins-trace", + "builtin-tryEval": "language/builtins.html#builtins-tryEval", + "builtin-typeOf": "language/builtins.html#builtins-typeOf", + "ssec-builtins": "language/builtins.html", + "attr-system": "language/derivations.html#attr-system", + "ssec-derivation": "language/derivations.html", + "ch-expression-language": "language/index.html", + "sec-constructs": "language/syntax.html", + "sect-let-language": "language/syntax.html#let-expressions", + "ss-functions": "language/syntax.html#functions", + "sec-language-operators": "language/operators.html", + "table-operators": "language/operators.html", + "ssec-values": "language/types.html", + "gloss-closure": "glossary.html#gloss-closure", + "gloss-derivation": "glossary.html#gloss-derivation", + "gloss-deriver": "glossary.html#gloss-deriver", + "gloss-nar": "glossary.html#gloss-nar", + "gloss-output-path": "glossary.html#gloss-output-path", + "gloss-profile": "glossary.html#gloss-profile", + "gloss-reachable": "glossary.html#gloss-reachable", + "gloss-reference": "glossary.html#gloss-reference", + "gloss-substitute": "glossary.html#gloss-substitute", + "gloss-user-env": "glossary.html#gloss-user-env", + "gloss-validity": "glossary.html#gloss-validity", + "part-glossary": "glossary.html", + "sec-building-source": "installation/building-source.html", + "chap-installation": "installation/index.html", + "ch-installing-source": "installation/installing-source.html", + "ch-nix-security": "installation/nix-security.html", + "sec-obtaining-source": "installation/obtaining-source.html", + "sec-prerequisites-source": "installation/prerequisites-source.html", + "ch-upgrading-nix": "installation/upgrading.html", + "ch-about-nix": "introduction.html", + "chap-introduction": "introduction.html", + "ch-basic-package-mgmt": "package-management/index.html", + "ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html", + "sec-channels": "command-ref/nix-channel.html", + "ssec-copy-closure": "command-ref/nix-copy-closure.html", + "sec-garbage-collection": "package-management/garbage-collection.html", + "ssec-gc-roots": "package-management/garbage-collector-roots.html", + "chap-package-management": "package-management/index.html", + "sec-profiles": "package-management/profiles.html", + "ssec-s3-substituter": "store/types/s3-binary-cache-store.html", + "ssec-s3-substituter-anonymous-reads": + "store/types/s3-binary-cache-store.html#anonymous-reads-to-your-s3-compatible-binary-cache", + "ssec-s3-substituter-authenticated-reads": + "store/types/s3-binary-cache-store.html#authenticated-reads-to-your-s3-binary-cache", + "ssec-s3-substituter-authenticated-writes": + "store/types/s3-binary-cache-store.html#authenticated-writes-to-your-s3-compatible-binary-cache", + "sec-sharing-packages": "package-management/sharing-packages.html", + "ssec-ssh-substituter": "package-management/ssh-substituter.html", + "chap-quick-start": "quick-start.html", + "sec-relnotes": "release-notes/index.html" + }, + "language/types.html": { + "simple-values": "#primitives", + "lists": "#type-list", + "strings": "#type-string", + "attribute-sets": "#type-attrs", + "type-number": "#type-int" + }, + "language/syntax.html": { + "scoping-rules": "scope.html", + "string-literal": "string-literals.html" + }, + "language/derivations.html": { + "builder-execution": "../store/building.html#builder-execution" + }, + "installation/installing-binary.html": { + "uninstalling": "uninstall.html" + }, + "development/building.html": { + "nix-with-flakes": "#building-nix", + "classic-nix": "#building-nix", + "running-tests": "testing.html#running-tests", + "unit-tests": "testing.html#unit-tests", + "functional-tests": "testing.html#functional-tests", + "debugging-failing-functional-tests": "testing.html#debugging-failing-functional-tests", + "integration-tests": "testing.html#integration-tests", + "installer-tests": "testing.html#installer-tests", + "one-time-setup": "testing.html#one-time-setup", + "using-the-ci-generated-installer-for-manual-testing": + "testing.html#using-the-ci-generated-installer-for-manual-testing", + "characterization-testing": "testing.html#characterisation-testing-unit", + "add-a-release-note": "contributing.html#add-a-release-note", + "add-an-entry": "contributing.html#add-an-entry", + "build-process": "contributing.html#build-process", + "reverting": "contributing.html#reverting", + "branches": "contributing.html#branches" + }, + "glossary.html": { + "gloss-local-store": "store/types/local-store.html", + "package-attribute-set": "#package", + "gloss-chroot-store": "store/types/local-store.html", + "gloss-content-addressed-derivation": "#gloss-content-addressing-derivation" + } +} diff --git a/doc/manual/render-manpage.sh b/doc/manual/render-manpage.sh old mode 100755 new mode 100644 index 65a9c124e6b..6577809b00c --- a/doc/manual/render-manpage.sh +++ b/doc/manual/render-manpage.sh @@ -1,25 +1,55 @@ #!/usr/bin/env bash +# +# Standalone manpage renderer that doesn't require mdbook. +# Uses expand-includes.py to preprocess markdown, then lowdown to generate manpages. set -euo pipefail +script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + lowdown_args= +# Optional --out-no-smarty flag for compatibility with nix_nested_manpages if [ "$1" = --out-no-smarty ]; then lowdown_args=--out-no-smarty shift fi -[ "$#" = 4 ] || { - echo "wrong number of args passed" >&2 +[ "$#" = 7 ] || { + cat >&2 <
    + +Arguments: + title - Manpage title (e.g., "nix-env --install") + section - Manpage section number (1, 5, 8, etc.) + source-root - Root directory of markdown sources + generated-root - Root directory of generated markdown files + doc-url - Base URL for documentation links + infile - Input markdown file (relative to build directory) + outfile - Output manpage file + +Examples: + $0 "nix-store --query" 1 doc/manual/source build/doc/manual/source \\ + https://nix.dev/manual/nix/latest \\ + build/doc/manual/source/command-ref/nix-store/query.md nix-store-query.1 +EOF exit 1 } title="$1" section="$2" -infile="$3" -outfile="$4" +source_root="$3" +generated_root="$4" +doc_url="$5" +infile="$6" +outfile="$7" +# Expand includes and pipe to lowdown ( printf "Title: %s\n\n" "$title" - cat "$infile" + python3 "$script_dir/expand-includes.py" \ + --source-root "$source_root" \ + --generated-root "$generated_root" \ + --doc-url "$doc_url" \ + "$infile" ) | lowdown -sT man --nroff-nolinks $lowdown_args -M section="$section" -o "$outfile" diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 39747286ff7..bf5ae1ea499 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -21,9 +21,13 @@ - [Derivation Outputs and Types of Derivations](store/derivation/outputs/index.md) - [Content-addressing derivation outputs](store/derivation/outputs/content-address.md) - [Input-addressing derivation outputs](store/derivation/outputs/input-address.md) + - [Build Trace](store/build-trace.md) + - [Derivation Resolution](store/resolution.md) - [Building](store/building.md) + - [Secrets](store/secrets.md) - [Store Types](store/types/index.md) {{#include ./store/types/SUMMARY.md}} + - [Appendix: Math notation](store/math-notation.md) - [Nix Language](language/index.md) - [Data Types](language/types.md) - [String context](language/string-context.md) @@ -110,11 +114,20 @@ - [Architecture and Design](architecture/architecture.md) - [Formats and Protocols](protocols/index.md) - [JSON Formats](protocols/json/index.md) + - [File System Object](protocols/json/file-system-object.md) + - [Hash](protocols/json/hash.md) + - [Content Address](protocols/json/content-address.md) + - [Store Path](protocols/json/store-path.md) - [Store Object Info](protocols/json/store-object-info.md) - - [Derivation](protocols/json/derivation.md) + - [Derivation](protocols/json/derivation/index.md) + - [Derivation Options](protocols/json/derivation/options.md) + - [Deriving Path](protocols/json/deriving-path.md) + - [Build Trace Entry](protocols/json/build-trace-entry.md) + - [Build Result](protocols/json/build-result.md) + - [Store](protocols/json/store.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Store Path Specification](protocols/store-path.md) - - [Nix Archive (NAR) Format](protocols/nix-archive.md) + - [Nix Archive (NAR) Format](protocols/nix-archive/index.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [C API](c-api.md) - [Glossary](glossary.md) @@ -171,6 +184,7 @@ - [Release 3.0.0 (2025-03-04)](release-notes-determinate/rl-3.0.0.md) - [Nix Release Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} + - [Release 2.33 (2025-12-09)](release-notes/rl-2.33.md) - [Release 2.32 (2025-10-06)](release-notes/rl-2.32.md) - [Release 2.31 (2025-08-21)](release-notes/rl-2.31.md) - [Release 2.30 (2025-07-07)](release-notes/rl-2.30.md) diff --git a/doc/manual/source/command-ref/nix-build.md b/doc/manual/source/command-ref/nix-build.md index 3bb59cbed12..860a1fc9745 100644 --- a/doc/manual/source/command-ref/nix-build.md +++ b/doc/manual/source/command-ref/nix-build.md @@ -36,7 +36,7 @@ to a temporary location. The tarball must include a single top-level directory containing at least a file named `default.nix`. `nix-build` is essentially a wrapper around -[`nix-instantiate`](nix-instantiate.md) (to translate a high-level Nix +[`nix-instantiate`](./nix-instantiate.md) (to translate a high-level Nix expression to a low-level [store derivation]) and [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) (to build the store derivation). @@ -52,8 +52,8 @@ derivation). # Options All options not listed here are passed to -[`nix-store --realise`](nix-store/realise.md), -except for `--arg` and `--attr` / `-A` which are passed to [`nix-instantiate`](nix-instantiate.md). +[`nix-store --realise`](./nix-store/realise.md), +except for `--arg` and `--attr` / `-A` which are passed to [`nix-instantiate`](./nix-instantiate.md). - [`--no-out-link`](#opt-no-out-link) diff --git a/doc/manual/source/command-ref/nix-channel.md b/doc/manual/source/command-ref/nix-channel.md index a65ec97c558..59817be974b 100644 --- a/doc/manual/source/command-ref/nix-channel.md +++ b/doc/manual/source/command-ref/nix-channel.md @@ -17,10 +17,10 @@ Channels are a mechanism for referencing remote Nix expressions and conveniently retrieving their latest version. The moving parts of channels are: -- The official channels listed at +- The official channels listed at - The user-specific list of [subscribed channels](#subscribed-channels) - The [downloaded channel contents](#channels) -- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-i) or the [`NIX_PATH` environment variable](#env-NIX_PATH) +- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-I) or the [`NIX_PATH` environment variable](#env-NIX_PATH) > **Note** > @@ -94,9 +94,9 @@ This command has the following operations: Subscribe to the Nixpkgs channel and run `hello` from the GNU Hello package: ```console -$ nix-channel --add https://nixos.org/channels/nixpkgs-unstable +$ nix-channel --add https://channels.nixos.org/nixpkgs-unstable $ nix-channel --list -nixpkgs https://nixos.org/channels/nixpkgs +nixpkgs https://channels.nixos.org/nixpkgs $ nix-channel --update $ nix-shell -p hello --run hello hello diff --git a/doc/manual/source/command-ref/nix-env/upgrade.md b/doc/manual/source/command-ref/nix-env/upgrade.md index 2779363c34a..bf4c1a8ed3c 100644 --- a/doc/manual/source/command-ref/nix-env/upgrade.md +++ b/doc/manual/source/command-ref/nix-env/upgrade.md @@ -22,7 +22,7 @@ left untouched; this is not an error. It is also not an error if an element of *args* matches no installed derivations. For a description of how *args* is mapped to a set of store paths, see -[`--install`](#operation---install). If *args* describes multiple +[`--install`](./install.md). If *args* describes multiple store paths with the same symbolic name, only the one with the highest version is installed. diff --git a/doc/manual/source/command-ref/nix-hash.md b/doc/manual/source/command-ref/nix-hash.md index 0860f312d94..7c17ce9095b 100644 --- a/doc/manual/source/command-ref/nix-hash.md +++ b/doc/manual/source/command-ref/nix-hash.md @@ -34,7 +34,7 @@ md5sum`. Print the cryptographic hash of the contents of each regular file *path*. That is, instead of computing the hash of the [Nix Archive (NAR)](@docroot@/store/file-system-object/content-address.md#serial-nix-archive) of *path*, - just [directly hash]((@docroot@/store/file-system-object/content-address.md#serial-flat) *path* as is. + just [directly hash](@docroot@/store/file-system-object/content-address.md#serial-flat) *path* as is. This requires *path* to resolve to a regular file rather than directory. The result is identical to that produced by the GNU commands `md5sum` and `sha1sum`. diff --git a/doc/manual/source/command-ref/nix-instantiate.md b/doc/manual/source/command-ref/nix-instantiate.md index 38454515d57..bac9f365933 100644 --- a/doc/manual/source/command-ref/nix-instantiate.md +++ b/doc/manual/source/command-ref/nix-instantiate.md @@ -32,7 +32,7 @@ standard input. - `--add-root` *path* - See the [corresponding option](nix-store.md) in `nix-store`. + See the [corresponding option](./nix-store.md) in `nix-store`. - `--parse` diff --git a/doc/manual/source/command-ref/nix-shell.md b/doc/manual/source/command-ref/nix-shell.md index f2e2e35936e..307f1934a9e 100644 --- a/doc/manual/source/command-ref/nix-shell.md +++ b/doc/manual/source/command-ref/nix-shell.md @@ -19,7 +19,7 @@ This man page describes the command `nix-shell`, which is distinct from `nix shell`. For documentation on the latter, run `nix shell --help` or see `man -nix3-shell`. +nix3-env-shell`. # Description diff --git a/doc/manual/source/command-ref/nix-store/gc.md b/doc/manual/source/command-ref/nix-store/gc.md index f432e00eb96..8ec59d906c2 100644 --- a/doc/manual/source/command-ref/nix-store/gc.md +++ b/doc/manual/source/command-ref/nix-store/gc.md @@ -48,8 +48,7 @@ The behaviour of the collector is also influenced by the configuration file. By default, the collector prints the total number of freed bytes when it -finishes (or when it is interrupted). With `--print-dead`, it prints the -number of bytes that would be freed. +finishes (or when it is interrupted). {{#include ./opt-common.md}} diff --git a/doc/manual/source/development/building.md b/doc/manual/source/development/building.md index bc4b2ed02e6..da72204107c 100644 --- a/doc/manual/source/development/building.md +++ b/doc/manual/source/development/building.md @@ -172,7 +172,7 @@ You can use any of the other supported environments in place of `nix-cli-ccacheS ## Editor integration The `clangd` LSP server is installed by default on the `clang`-based `devShell`s. -See [supported compilation environments](#compilation-environments) and instructions how to [set up a shell with flakes](#nix-with-flakes). +See [supported compilation environments](#compilation-environments) and instructions how to [set up a shell](#building-nix). To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code. Meson's configure always produces this inside the build directory. diff --git a/doc/manual/source/development/debugging.md b/doc/manual/source/development/debugging.md index ccc6614b75a..d2450495e50 100644 --- a/doc/manual/source/development/debugging.md +++ b/doc/manual/source/development/debugging.md @@ -15,7 +15,7 @@ In the development shell, set the `mesonBuildType` environment variable to `debu Then, proceed to build Nix as described in [Building Nix](./building.md). This will build Nix with debug symbols, which are essential for effective debugging. -It is also possible to build without debugging for faster build: +It is also possible to build without optimization for faster build: ```console [nix-shell]$ NIX_HARDENING_ENABLE=$(printLines $NIX_HARDENING_ENABLE | grep -v fortify) diff --git a/doc/manual/source/development/documentation.md b/doc/manual/source/development/documentation.md index 30cc8adc44a..dd40ef481a7 100644 --- a/doc/manual/source/development/documentation.md +++ b/doc/manual/source/development/documentation.md @@ -25,20 +25,31 @@ nix build .#nix-manual and open `./result/share/doc/nix/manual/index.html`. -To build the manual incrementally, [enter the development shell](./building.md) and run: +To build the manual incrementally, [enter the development shell](./building.md) and configure with `doc-gen` enabled: + +**If using interactive `nix develop`:** ```console -make manual-html-open -j $NIX_BUILD_CORES +$ nix develop +$ mesonFlags="$mesonFlags -Ddoc-gen=true" mesonConfigurePhase ``` -In order to reflect changes to the [Makefile for the manual], clear all generated files before re-building: +**If using direnv:** + +```console +$ direnv allow +$ bash -c 'source $stdenv/setup && mesonFlags="$mesonFlags -Ddoc-gen=true" mesonConfigurePhase' +``` -[Makefile for the manual]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk +Then build the manual: ```console -rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/source/command-ref/new-cli && make manual-html -j $NIX_BUILD_CORES +$ cd build +$ meson compile manual ``` +The HTML manual will be generated at `build/src/nix-manual/manual/index.html`. + ## Style guide The goal of this style guide is to make it such that @@ -229,3 +240,9 @@ $ configurePhase $ ninja src/external-api-docs/html $ xdg-open src/external-api-docs/html/index.html ``` + +If you use direnv, or otherwise want to run `configurePhase` in a transient shell, use: + +```bash +nix-shell -A devShells.x86_64-linux.native-clangStdenv --command 'appendToVar mesonFlags "-Ddoc-gen=true"; mesonConfigurePhase' +``` diff --git a/doc/manual/source/development/testing.md b/doc/manual/source/development/testing.md index 69e3637ced0..35654d16393 100644 --- a/doc/manual/source/development/testing.md +++ b/doc/manual/source/development/testing.md @@ -119,7 +119,7 @@ This will: 3. Stop the program when the test fails, allowing the user to then issue arbitrary commands to GDB. -### Characterisation testing { #characaterisation-testing-unit } +### Characterisation testing { #characterisation-testing-unit } See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing. @@ -137,6 +137,12 @@ $ _NIX_TEST_ACCEPT=1 meson test nix-store-tests -v will regenerate the "golden master" expected result for the `libnixstore` characterisation tests. The characterisation tests will mark themselves "skipped" since they regenerated the expected result instead of actually testing anything. +### JSON Schema testing + +In `doc/manual/source/protocols/json/` we have a number of manual pages generated from [JSON Schema](https://json-schema.org/). +That JSON schema is tested against the JSON file test data used in [characterisation tests](#characterisation-testing-unit ) for JSON (de)serialization, in `src/json-schema-checks`. +Between the JSON (de)serialization testing, and this testing of the same data against the schema, we make sure that the manual, the implementation, and a machine-readable schema are are all in sync. + ### Unit test support libraries There are headers and code which are not just used to test the library in question, but also downstream libraries. diff --git a/doc/manual/source/glossary.md b/doc/manual/source/glossary.md index 9e76ad37b96..a54f0cbff75 100644 --- a/doc/manual/source/glossary.md +++ b/doc/manual/source/glossary.md @@ -208,7 +208,7 @@ - [impure derivation]{#gloss-impure-derivation} - [An experimental feature](#@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure, + [An experimental feature](@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure, so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them. - [Nix database]{#gloss-nix-database} @@ -279,7 +279,7 @@ See [References](@docroot@/store/store-object.md#references) for details. -- [referrer]{#gloss-reference} +- [referrer]{#gloss-referrer} A reversed edge from one [store object] to another. @@ -359,8 +359,8 @@ Nix represents files as [file system objects][file system object], and how they belong together is encoded as [references][reference] between [store objects][store object] that contain these file system objects. - The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#attribute-set) containing: - - attributes that refer to the files of a package, typically in the form of [derivation outputs](#output), + The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#type-attrs) containing: + - attributes that refer to the files of a package, typically in the form of [derivation outputs](#gloss-output), - attributes with metadata, such as information about how the package is supposed to be used. The exact shape of these attribute sets is up to convention. @@ -375,7 +375,7 @@ [string]: ./language/types.md#type-string [path]: ./language/types.md#type-path - [attribute name]: ./language/types.md#attribute-set + [attribute name]: ./language/types.md#type-attrs - [base directory]{#gloss-base-directory} diff --git a/doc/manual/source/installation/installing-docker.md b/doc/manual/source/installation/installing-docker.md index 9354c1a7228..ccc75be5a99 100644 --- a/doc/manual/source/installation/installing-docker.md +++ b/doc/manual/source/installation/installing-docker.md @@ -3,19 +3,21 @@ To run the latest stable release of Nix with Docker run the following command: ```console -$ docker run -ti ghcr.io/nixos/nix -Unable to find image 'ghcr.io/nixos/nix:latest' locally -latest: Pulling from ghcr.io/nixos/nix +$ docker run -ti docker.io/nixos/nix +Unable to find image 'docker.io/nixos/nix:latest' locally +latest: Pulling from docker.io/nixos/nix 5843afab3874: Pull complete b52bf13f109c: Pull complete 1e2415612aa3: Pull complete Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff -Status: Downloaded newer image for ghcr.io/nixos/nix:latest +Status: Downloaded newer image for docker.io/nixos/nix:latest 35ca4ada6e96:/# nix --version nix (Nix) 2.3.12 35ca4ada6e96:/# exit ``` +> If you want the latest pre-release you can use ghcr.io/nixos/nix and view them at https://github.com/nixos/nix/pkgs/container/nix + # What is included in Nix's Docker image? The official Docker image is created using `pkgs.dockerTools.buildLayeredImage` diff --git a/doc/manual/source/language/advanced-attributes.md b/doc/manual/source/language/advanced-attributes.md index c9d64f060ee..f0b1a4c730e 100644 --- a/doc/manual/source/language/advanced-attributes.md +++ b/doc/manual/source/language/advanced-attributes.md @@ -333,7 +333,7 @@ Here is more information on the `output*` attributes, and what values they may b `outputHashAlgo` can only be `null` when `outputHash` follows the SRI format, because in that case the choice of hash algorithm is determined by `outputHash`. - - [`outputHash`]{#adv-attr-outputHashAlgo}; [`outputHash`]{#adv-attr-outputHashMode} + - [`outputHash`]{#adv-attr-outputHash} This will specify the output hash of the single output of a [fixed-output derivation]. diff --git a/doc/manual/source/language/builtins-prefix.md b/doc/manual/source/language/builtins-prefix.md index fff0f7cb5e4..8dd929be360 100644 --- a/doc/manual/source/language/builtins-prefix.md +++ b/doc/manual/source/language/builtins-prefix.md @@ -23,7 +23,7 @@ Some built-ins are also exposed directly in the global scope: - [`null`](#builtins-null) - [`placeholder`](#builtins-placeholder) - [`removeAttrs`](#builtins-removeAttrs) -- `scopedImport` +- [`scopedImport`](#builtins-scopedImport) - [`throw`](#builtins-throw) - [`toString`](#builtins-toString) - [`true`](#builtins-true) diff --git a/doc/manual/source/language/derivations.md b/doc/manual/source/language/derivations.md index 43eec680bbc..2403183fc2d 100644 --- a/doc/manual/source/language/derivations.md +++ b/doc/manual/source/language/derivations.md @@ -16,7 +16,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect - [`name`]{#attr-name} ([String](@docroot@/language/types.md#type-string)) A symbolic name for the derivation. - See [derivation outputs](@docroot@/store/derivation/index.md#outputs) for what this is affects. + See [derivation outputs](@docroot@/store/derivation/outputs/index.md#outputs) for what this is affects. [store path]: @docroot@/store/store-path.md diff --git a/doc/manual/source/language/evaluation.md b/doc/manual/source/language/evaluation.md index 980942c92b9..dff42977631 100644 --- a/doc/manual/source/language/evaluation.md +++ b/doc/manual/source/language/evaluation.md @@ -74,4 +74,48 @@ in f { x = throw "error"; y = throw "error"; } => "ok" ``` +## Evaluation order + +The order in which expressions are evaluated is generally unspecified, because it does not affect successful evaluation outcomes. +This allows more freedom for the evaluator to evolve and to evaluate efficiently. + +Data dependencies naturally impose some ordering constraints: a value cannot be used before it is computed. +Beyond these constraints, the evaluator is free to choose any order. + +The order in which side effects such as [`builtins.trace`](@docroot@/language/builtins.md#builtins-trace) output occurs is not defined, but may be expected to follow data dependencies. + +In a lazy language, evaluation order is often opposite to expectations from strict languages. +For example, in `let wrap = x: { wrapped = x; }; in wrap (1 + 2)`, the function body produces a result (`{ wrapped = ...; }`) *before* evaluating `x`. + +## Infinite recursion and stack overflow + +During evaluation, two types of errors can occur when expressions reference themselves or call functions too deeply: + +### Infinite recursion + +This error occurs when a value depends on itself through a cycle, making it impossible to compute. + +```nix +let x = x; in x +=> error: infinite recursion encountered +``` + +Infinite recursion happens at the value level when evaluating an expression requires evaluating the same expression again. + +Despite the name, infinite recursion is cheap to compute and does not involve a stack overflow. +The cycle is finite and fairly easy to detect. + +### Stack overflow + +This error occurs when the call depth exceeds the maximum allowed limit. + +```nix +let f = x: f (x + 1); +in f 0 +=> error: stack overflow; max-call-depth exceeded +``` + +Stack overflow happens when too many function calls are nested without returning. +The maximum call depth is controlled by the [`max-call-depth` setting](@docroot@/command-ref/conf-file.md#conf-max-call-depth). + [C API]: @docroot@/c-api.md diff --git a/doc/manual/source/language/identifiers.md b/doc/manual/source/language/identifiers.md index 584a2f86191..67bb1eeec67 100644 --- a/doc/manual/source/language/identifiers.md +++ b/doc/manual/source/language/identifiers.md @@ -16,7 +16,7 @@ An *identifier* is an [ASCII](https://en.wikipedia.org/wiki/ASCII) character seq # Names -A *name* can be written as an [identifier](#identifier) or a [string literal](./string-literals.md). +A *name* can be written as an [identifier](#identifiers) or a [string literal](./string-literals.md). > **Syntax** > diff --git a/doc/manual/source/language/index.md b/doc/manual/source/language/index.md index 1eb14e96d36..116f928dc9c 100644 --- a/doc/manual/source/language/index.md +++ b/doc/manual/source/language/index.md @@ -137,7 +137,7 @@ This is an incomplete overview of language features, by example. - [Booleans](@docroot@/language/types.md#type-boolean) + [Booleans](@docroot@/language/types.md#type-bool) @@ -245,7 +245,7 @@ This is an incomplete overview of language features, by example. - An [attribute set](@docroot@/language/types.md#attribute-set) with attributes named `x` and `y` + An [attribute set](@docroot@/language/types.md#type-attrs) with attributes named `x` and `y` @@ -285,7 +285,7 @@ This is an incomplete overview of language features, by example. - [Lists](@docroot@/language/types.md#list) with three elements. + [Lists](@docroot@/language/types.md#type-list) with three elements. @@ -369,7 +369,7 @@ This is an incomplete overview of language features, by example. - [Attribute selection](@docroot@/language/types.md#attribute-set) (evaluates to `1`) + [Attribute selection](@docroot@/language/types.md#type-attrs) (evaluates to `1`) @@ -381,7 +381,7 @@ This is an incomplete overview of language features, by example. - [Attribute selection](@docroot@/language/types.md#attribute-set) with default (evaluates to `3`) + [Attribute selection](@docroot@/language/types.md#type-attrs) with default (evaluates to `3`) diff --git a/doc/manual/source/language/operators.md b/doc/manual/source/language/operators.md index ab74e8a9999..dad3e1e8d51 100644 --- a/doc/manual/source/language/operators.md +++ b/doc/manual/source/language/operators.md @@ -23,8 +23,8 @@ | [Greater than or equal to][Comparison] | *expr* `>=` *expr* | none | 10 | | [Equality] | *expr* `==` *expr* | none | 11 | | Inequality | *expr* `!=` *expr* | none | 11 | -| Logical conjunction (`AND`) | *bool* `&&` *bool* | left | 12 | -| Logical disjunction (`OR`) | *bool* \|\| *bool* | left | 13 | +| [Logical conjunction] (`AND`) | *bool* `&&` *bool* | left | [12](#precedence-and-disjunctive-normal-form) | +| [Logical disjunction] (`OR`) | *bool* \|\| *bool* | left | [13](#precedence-and-disjunctive-normal-form) | | [Logical implication] | *bool* `->` *bool* | right | 14 | | [Pipe operator] (experimental) | *expr* `\|>` *func* | left | 15 | | [Pipe operator] (experimental) | *func* `<\|` *expr* | right | 15 | @@ -162,6 +162,9 @@ Update [attribute set] *attrset1* with names and values from *attrset2*. The returned attribute set will have all of the attributes in *attrset1* and *attrset2*. If an attribute name is present in both, the attribute value from the latter is taken. +This operator is [strict](@docroot@/language/evaluation.md#strictness) in both *attrset1* and *attrset2*. +That means that both arguments are evaluated to [weak head normal form](@docroot@/language/evaluation.md#values), so the attribute sets themselves are evaluated, but their attribute values are not evaluated. + [Update]: #update ## Comparison @@ -185,18 +188,95 @@ All comparison operators are implemented in terms of `<`, and the following equi ## Equality -- [Attribute sets][attribute set] and [lists][list] are compared recursively, and therefore are fully evaluated. -- Comparison of [functions][function] always returns `false`. +- [Attribute sets][attribute set] are compared first by attribute names and then by items until a difference is found. +- [Lists][list] are compared first by length and then by items until a difference is found. +- Comparison of distinct [functions][function] returns `false`, but identical functions may be subject to [value identity optimization](#value-identity-optimization). - Numbers are type-compatible, see [arithmetic] operators. - Floating point numbers only differ up to a limited precision. +The `==` operator is [strict](@docroot@/language/evaluation.md#strictness) in both arguments; when comparing composite types ([attribute sets][attribute set] and [lists][list]), it is partially strict in their contained values: they are evaluated until a difference is found. + +### Value identity optimization + +Nix performs equality comparisons of nested values by pointer equality or more abstractly, _identity_. +Nix semantics ideally do not assign a unique identity to values as they are created, but equality is an exception to this rule. +The disputable benefit of this is that it is more efficient, and it allows cyclical structures to be compared, e.g. `let x = { x = x; }; in x == x` evaluates to `true`. +However, as a consequence, it makes a function equal to itself when the comparison is made in a list or attribute set, in contradiction to a simple direct comparison. + [function]: ./syntax.md#functions [Equality]: #equality +## Logical conjunction + +> **Syntax** +> +> *bool1* `&&` *bool2* + +Logical AND. Equivalent to `if` *bool1* `then` *bool2* `else false`. + +This operator is [strict](@docroot@/language/evaluation.md#strictness) in *bool1*, but only evaluates *bool2* if *bool1* is `true`. + +> **Example** +> +> ```nix +> true && false +> => false +> +> false && throw "never evaluated" +> => false +> ``` + +[Logical conjunction]: #logical-conjunction + +## Logical disjunction + +> **Syntax** +> +> *bool1* `||` *bool2* + +Logical OR. Equivalent to `if` *bool1* `then true` `else` *bool2*. + +This operator is [strict](@docroot@/language/evaluation.md#strictness) in *bool1*, but only evaluates *bool2* if *bool1* is `false`. + +> **Example** +> +> ```nix +> true || false +> => true +> +> true || throw "never evaluated" +> => true +> ``` + +[Logical disjunction]: #logical-disjunction + +### Precedence and disjunctive normal form + +The precedence of `&&` and `||` aligns with disjunctive normal form. +Without parentheses, an expression describes multiple "permissible situations" (connected by `||`), where each situation consists of multiple simultaneous conditions (connected by `&&`). + +For example, `A || B && C || D && E` is parsed as `A || (B && C) || (D && E)`, describing three permissible situations: A holds, or both B and C hold, or both D and E hold. + ## Logical implication -Equivalent to `!`*b1* `||` *b2* (or `if` *b1* `then` *b2* `else true`) +> **Syntax** +> +> *bool1* `->` *bool2* + +Logical implication. Equivalent to `!`*bool1* `||` *bool2* (or `if` *bool1* `then` *bool2* `else true`). + +This operator is [strict](@docroot@/language/evaluation.md#strictness) in *bool1*, but only evaluates *bool2* if *bool1* is `true`. + +> **Example** +> +> ```nix +> true -> false +> => false +> +> false -> throw "never evaluated" +> => true +> ``` [Logical implication]: #logical-implication diff --git a/doc/manual/source/language/string-context.md b/doc/manual/source/language/string-context.md index 0d8fcdefa91..65c59d865f0 100644 --- a/doc/manual/source/language/string-context.md +++ b/doc/manual/source/language/string-context.md @@ -111,7 +111,7 @@ It creates an [attribute set] representing the string context, which can be insp [`builtins.hasContext`]: ./builtins.md#builtins-hasContext [`builtins.getContext`]: ./builtins.md#builtins-getContext -[attribute set]: ./types.md#attribute-set +[attribute set]: ./types.md#type-attrs ## Clearing string contexts diff --git a/doc/manual/source/language/string-interpolation.md b/doc/manual/source/language/string-interpolation.md index a503d5f04bd..8e25d2b6311 100644 --- a/doc/manual/source/language/string-interpolation.md +++ b/doc/manual/source/language/string-interpolation.md @@ -6,7 +6,7 @@ Such a construct is called *interpolated string*, and the expression inside is a [string]: ./types.md#type-string [path]: ./types.md#type-path -[attribute set]: ./types.md#attribute-set +[attribute set]: ./types.md#type-attrs > **Syntax** > diff --git a/doc/manual/source/language/syntax.md b/doc/manual/source/language/syntax.md index 85162db747a..b127aca14c1 100644 --- a/doc/manual/source/language/syntax.md +++ b/doc/manual/source/language/syntax.md @@ -51,7 +51,7 @@ See [String literals](string-literals.md). Path literals can also include [string interpolation], besides being [interpolated into other expressions]. - [interpolated into other expressions]: ./string-interpolation.md#interpolated-expressions + [interpolated into other expressions]: ./string-interpolation.md#interpolated-expression At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path. @@ -235,7 +235,7 @@ of object-oriented programming, for example. ## Recursive sets -Recursive sets are like normal [attribute sets](./types.md#attribute-set), but the attributes can refer to each other. +Recursive sets are like normal [attribute sets](./types.md#type-attrs), but the attributes can refer to each other. > *rec-attrset* = `rec {` [ *name* `=` *expr* `;` `]`... `}` @@ -287,7 +287,7 @@ This evaluates to `"foobar"`. ## Inheriting attributes -When defining an [attribute set](./types.md#attribute-set) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes). +When defining an [attribute set](./types.md#type-attrs) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes). This can be shortened using the `inherit` keyword. Example: diff --git a/doc/manual/source/meson.build b/doc/manual/source/meson.build index 949d265269a..294d57ad9f9 100644 --- a/doc/manual/source/meson.build +++ b/doc/manual/source/meson.build @@ -1,3 +1,6 @@ +# Process JSON schema documentation +subdir('protocols') + summary_rl_next = custom_target( command : [ bash, diff --git a/doc/manual/source/protocols/derivation-aterm.md b/doc/manual/source/protocols/derivation-aterm.md index 99e3c2be630..523678e663e 100644 --- a/doc/manual/source/protocols/derivation-aterm.md +++ b/doc/manual/source/protocols/derivation-aterm.md @@ -1,6 +1,8 @@ # Derivation "ATerm" file format -For historical reasons, [store derivations][store derivation] are stored on-disk in [ATerm](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html) format. +For historical reasons, [store derivations][store derivation] are stored on-disk in "Annotated Term" (ATerm) format +([guide](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html), +[paper](https://doi.org/10.1002/(SICI)1097-024X(200003)30:3%3C259::AID-SPE298%3E3.0.CO;2-Y)). ## The ATerm format used diff --git a/doc/manual/source/protocols/json/build-result.md b/doc/manual/source/protocols/json/build-result.md new file mode 100644 index 00000000000..527e7bcc0eb --- /dev/null +++ b/doc/manual/source/protocols/json/build-result.md @@ -0,0 +1,21 @@ +{{#include build-result-v1-fixed.md}} + +## Examples + +### Successful build + +```json +{{#include schema/build-result-v1/success.json}} +``` + +### Failed build (output rejected) + +```json +{{#include schema/build-result-v1/output-rejected.json}} +``` + +### Failed build (non-deterministic) + +```json +{{#include schema/build-result-v1/not-deterministic.json}} +``` \ No newline at end of file diff --git a/doc/manual/source/protocols/json/build-trace-entry.md b/doc/manual/source/protocols/json/build-trace-entry.md new file mode 100644 index 00000000000..8050a2840bf --- /dev/null +++ b/doc/manual/source/protocols/json/build-trace-entry.md @@ -0,0 +1,27 @@ +{{#include build-trace-entry-v1-fixed.md}} + +## Examples + +### Simple build trace entry + +```json +{{#include schema/build-trace-entry-v1/simple.json}} +``` + +### Build trace entry with dependencies + +```json +{{#include schema/build-trace-entry-v1/with-dependent-realisations.json}} +``` + +### Build trace entry with signature + +```json +{{#include schema/build-trace-entry-v1/with-signature.json}} +``` + + \ No newline at end of file diff --git a/doc/manual/source/protocols/json/content-address.md b/doc/manual/source/protocols/json/content-address.md new file mode 100644 index 00000000000..2284e30aa6d --- /dev/null +++ b/doc/manual/source/protocols/json/content-address.md @@ -0,0 +1,21 @@ +{{#include content-address-v1-fixed.md}} + +## Examples + +### [Text](@docroot@/store/store-object/content-address.html#method-text) method + +```json +{{#include schema/content-address-v1/text.json}} +``` + +### [Nix Archive](@docroot@/store/store-object/content-address.html#method-nix-archive) method + +```json +{{#include schema/content-address-v1/nar.json}} +``` + + diff --git a/doc/manual/source/protocols/json/derivation.md b/doc/manual/source/protocols/json/derivation.md deleted file mode 100644 index 86854cf2655..00000000000 --- a/doc/manual/source/protocols/json/derivation.md +++ /dev/null @@ -1,114 +0,0 @@ -# Derivation JSON Format - -The JSON serialization of a -[derivations](@docroot@/glossary.md#gloss-store-derivation) -is a JSON object with the following fields: - -* `name`: - The name of the derivation. - This is used when calculating the store paths of the derivation's outputs. - -* `version`: - Must be `3`. - This is a guard that allows us to continue evolving this format. - The choice of `3` is fairly arbitrary, but corresponds to this informal version: - - - Version 0: A-Term format - - - Version 1: Original JSON format, with ugly `"r:sha256"` inherited from A-Term format. - - - Version 2: Separate `method` and `hashAlgo` fields in output specs - - - Verison 3: Drop store dir from store paths, just include base name. - - Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change. - -* `outputs`: - Information about the output paths of the derivation. - This is a JSON object with one member per output, where the key is the output name and the value is a JSON object with these fields: - - * `path`: - The output path, if it is known in advanced. - Otherwise, `null`. - - - * `method`: - For an output which will be [content addressed], a string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen. - Valid method strings are: - - - [`flat`](@docroot@/store/store-object/content-address.md#method-flat) - - [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive) - - [`text`](@docroot@/store/store-object/content-address.md#method-text) - - [`git`](@docroot@/store/store-object/content-address.md#method-git) - - Otherwise, `null`. - - * `hashAlgo`: - For an output which will be [content addressed], the name of the hash algorithm used. - Valid algorithm strings are: - - - `blake3` - - `md5` - - `sha1` - - `sha256` - - `sha512` - - * `hash`: - For fixed-output derivations, the expected content hash in base-16. - - > **Example** - > - > ```json - > "outputs": { - > "out": { - > "method": "nar", - > "hashAlgo": "sha256", - > "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62" - > } - > } - > ``` - -* `inputSrcs`: - A list of store paths on which this derivation depends. - - > **Example** - > - > ```json - > "inputSrcs": [ - > "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh", - > "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch" - > ] - > ``` - -* `inputDrvs`: - A JSON object specifying the derivations on which this derivation depends, and what outputs of those derivations. - - > **Example** - > - > ```json - > "inputDrvs": { - > "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"], - > "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"] - > } - > ``` - - specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`. - -* `system`: - The system type on which this derivation is to be built - (e.g. `x86_64-linux`). - -* `builder`: - The absolute path of the program to be executed to run the build. - Typically this is the `bash` shell - (e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`). - -* `args`: - The command-line arguments passed to the `builder`. - -* `env`: - The environment passed to the `builder`. - -* `structuredAttrs`: - [Strucutured Attributes](@docroot@/store/derivation/index.md#structured-attrs), only defined if the derivation contains them. - Structured attributes are JSON, and thus embedded as-is. diff --git a/doc/manual/source/protocols/json/derivation/index.md b/doc/manual/source/protocols/json/derivation/index.md new file mode 100644 index 00000000000..0b15acb8f7d --- /dev/null +++ b/doc/manual/source/protocols/json/derivation/index.md @@ -0,0 +1,7 @@ +{{#include ../derivation-v4-fixed.md}} + + diff --git a/doc/manual/source/protocols/json/derivation/options.md b/doc/manual/source/protocols/json/derivation/options.md new file mode 100644 index 00000000000..c40ffe0f3f2 --- /dev/null +++ b/doc/manual/source/protocols/json/derivation/options.md @@ -0,0 +1,49 @@ +{{#include ../derivation-options-v1-fixed.md}} + +## Examples + +### Input-addressed derivations + +#### Default options + +```json +{{#include ../schema/derivation-options-v1/ia/derivation-options/defaults.json}} +``` + +#### All options set + +```json +{{#include ../schema/derivation-options-v1/ia/derivation-options/all_set.json}} +``` + +#### Default options (structured attributes) + +```json +{{#include ../schema/derivation-options-v1/ia/derivation-options/structuredAttrs_defaults.json}} +``` + +#### All options set (structured attributes) + +```json +{{#include ../schema/derivation-options-v1/ia/derivation-options/structuredAttrs_all_set.json}} +``` + +### Content-addressed derivations + +#### All options set + +```json +{{#include ../schema/derivation-options-v1/ca/derivation-options/all_set.json}} +``` + +#### All options set (structured attributes) + +```json +{{#include ../schema/derivation-options-v1/ca/derivation-options/structuredAttrs_all_set.json}} +``` + + diff --git a/doc/manual/source/protocols/json/deriving-path.md b/doc/manual/source/protocols/json/deriving-path.md new file mode 100644 index 00000000000..9851b371d37 --- /dev/null +++ b/doc/manual/source/protocols/json/deriving-path.md @@ -0,0 +1,21 @@ +{{#include deriving-path-v1-fixed.md}} + +## Examples + +### Constant + +```json +{{#include schema/deriving-path-v1/single_opaque.json}} +``` + +### Output of static derivation + +```json +{{#include schema/deriving-path-v1/single_built.json}} +``` + +### Output of dynamic derivation + +```json +{{#include schema/deriving-path-v1/single_built_built.json}} +``` diff --git a/doc/manual/source/protocols/json/file-system-object.md b/doc/manual/source/protocols/json/file-system-object.md new file mode 100644 index 00000000000..7a93b65ab1f --- /dev/null +++ b/doc/manual/source/protocols/json/file-system-object.md @@ -0,0 +1,21 @@ +{{#include file-system-object-v1-fixed.md}} + +## Examples + +### Simple + +```json +{{#include schema/file-system-object-v1/simple.json}} +``` + +### Complex + +```json +{{#include schema/file-system-object-v1/complex.json}} +``` + + diff --git a/doc/manual/source/protocols/json/fixup-json-schema-generated-doc.sed b/doc/manual/source/protocols/json/fixup-json-schema-generated-doc.sed new file mode 100644 index 00000000000..96b6f1801a5 --- /dev/null +++ b/doc/manual/source/protocols/json/fixup-json-schema-generated-doc.sed @@ -0,0 +1,18 @@ +# For some reason, backticks in the JSON schema are being escaped rather +# than being kept as intentional code spans. This removes all backtick +# escaping, which is an ugly solution, but one that is fine, because we +# are not using backticks for any other purpose. +s/\\`/`/g + +# The way that semi-external references are rendered (i.e. ones to +# sibling schema files, as opposed to separate website ones, is not nice +# for humans. Replace it with a nice relative link within the manual +# instead. +# +# As we have more such relative links, more replacements of this nature +# should appear below. +s^#/\$defs/\(regular\|symlink\|directory\)^In this schema^g +s^\(./hash-v1.yaml\)\?#/$defs/algorithm^[JSON format for `Hash`](@docroot@/protocols/json/hash.html#algorithm)^g +s^\(./hash-v1.yaml\)^[JSON format for `Hash`](@docroot@/protocols/json/hash.html)^g +s^\(./content-address-v1.yaml\)\?#/$defs/method^[JSON format for `ContentAddress`](@docroot@/protocols/json/content-address.html#method)^g +s^\(./content-address-v1.yaml\)^[JSON format for `ContentAddress`](@docroot@/protocols/json/content-address.html)^g diff --git a/doc/manual/source/protocols/json/hash.md b/doc/manual/source/protocols/json/hash.md new file mode 100644 index 00000000000..3ecff4da010 --- /dev/null +++ b/doc/manual/source/protocols/json/hash.md @@ -0,0 +1,21 @@ +{{#include hash-v1-fixed.md}} + +## Examples + +### SHA-256 + +```json +{{#include schema/hash-v1/sha256.json}} +``` + +### BLAKE3 + +```json +{{#include schema/hash-v1/blake3.json}} +``` + + diff --git a/doc/manual/source/protocols/json/json-schema-for-humans-config.yaml b/doc/manual/source/protocols/json/json-schema-for-humans-config.yaml new file mode 100644 index 00000000000..cad098053ab --- /dev/null +++ b/doc/manual/source/protocols/json/json-schema-for-humans-config.yaml @@ -0,0 +1,17 @@ +# Configuration file for json-schema-for-humans +# +# https://github.com/coveooss/json-schema-for-humans/blob/main/docs/examples/examples_md_default/Configuration.md + +template_name: md +show_toc: true +# impure timestamp and distracting +with_footer: false +recursive_detection_depth: 3 +show_breadcrumbs: false +description_is_markdown: true +template_md_options: + properties_table_columns: + - Property + - Type + - Pattern + - Title/Description diff --git a/doc/manual/source/protocols/json/meson.build b/doc/manual/source/protocols/json/meson.build new file mode 100644 index 00000000000..e32cf06408b --- /dev/null +++ b/doc/manual/source/protocols/json/meson.build @@ -0,0 +1,83 @@ +# Tests in: ../../../../src/json-schema-checks + +fs = import('fs') + +# Find json-schema-for-humans if available +json_schema_for_humans = find_program('generate-schema-doc', required : false) + +# Configuration for json-schema-for-humans +json_schema_config = files('json-schema-for-humans-config.yaml') + +schemas = [ + 'file-system-object-v1', + 'hash-v1', + 'content-address-v1', + 'store-path-v1', + 'store-object-info-v2', + 'derivation-v4', + 'derivation-options-v1', + 'deriving-path-v1', + 'build-trace-entry-v1', + 'build-result-v1', + 'store-v1', +] + +schema_files = files() +foreach schema_name : schemas + schema_files += files('schema' / schema_name + '.yaml') +endforeach + + +schema_outputs = [] +foreach schema_name : schemas + schema_outputs += schema_name + '.md' +endforeach + +json_schema_generated_files = [] + +if json_schema_for_humans.found() + # Generate markdown documentation from JSON schema + # Note: output must be just a filename, not a path + gen_file = custom_target( + schema_name + '-schema-docs.tmp', + command : [ + json_schema_for_humans, + '--config-file', + json_schema_config, + meson.current_source_dir() / 'schema', + meson.current_build_dir(), + ], + input : schema_files + [ + json_schema_config, + ], + output : schema_outputs, + capture : false, + build_by_default : true, + ) + + idx = 0 + foreach schema_name : schemas + #schema_file = 'schema' / schema_name + '.yaml' + + # There is one so-so hack, and one horrible hack being done here. + sedded_file = custom_target( + schema_name + '-schema-docs', + command : [ + 'sed', + '-f', + # Out of line to avoid https://github.com/mesonbuild/meson/issues/1564 + files('fixup-json-schema-generated-doc.sed'), + '@INPUT@', + ], + capture : true, + input : gen_file[idx], + output : schema_name + '-fixed.md', + ) + idx += 1 + json_schema_generated_files += [ sedded_file ] + endforeach +else + warning( + 'json-schema-for-humans not found, skipping JSON schema documentation generation', + ) +endif diff --git a/doc/manual/source/protocols/json/schema/build-result-v1 b/doc/manual/source/protocols/json/schema/build-result-v1 new file mode 120000 index 00000000000..a143d2c5008 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/build-result-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/build-result \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/build-result-v1.yaml b/doc/manual/source/protocols/json/schema/build-result-v1.yaml new file mode 100644 index 00000000000..31f59a44dda --- /dev/null +++ b/doc/manual/source/protocols/json/schema/build-result-v1.yaml @@ -0,0 +1,136 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/build-result-v1.json" +title: Build Result +description: | + This schema describes the JSON representation of Nix's `BuildResult` type, which represents the result of building a derivation or substituting store paths. + + Build results can represent either successful builds (with built outputs) or various types of failures. + +oneOf: + - "$ref": "#/$defs/success" + - "$ref": "#/$defs/failure" +type: object +required: + - success + - status +properties: + timesBuilt: + type: integer + minimum: 0 + title: Times built + description: | + How many times this build was performed. + + startTime: + type: integer + minimum: 0 + title: Start time + description: | + The start time of the build (or one of the rounds, if it was repeated), as a Unix timestamp. + + stopTime: + type: integer + minimum: 0 + title: Stop time + description: | + The stop time of the build (or one of the rounds, if it was repeated), as a Unix timestamp. + + cpuUser: + type: integer + minimum: 0 + title: User CPU time + description: | + User CPU time the build took, in microseconds. + + cpuSystem: + type: integer + minimum: 0 + title: System CPU time + description: | + System CPU time the build took, in microseconds. + +"$defs": + success: + type: object + title: Successful Build Result + description: | + Represents a successful build with built outputs. + required: + - success + - status + - builtOutputs + properties: + success: + const: true + title: Success indicator + description: | + Always true for successful build results. + + status: + type: string + title: Success status + description: | + Status string for successful builds. + enum: + - "Built" + - "Substituted" + - "AlreadyValid" + - "ResolvesToAlreadyValid" + + builtOutputs: + type: object + title: Built outputs + description: | + A mapping from output names to their build trace entries. + additionalProperties: + "$ref": "build-trace-entry-v1.yaml" + + failure: + type: object + title: Failed Build Result + description: | + Represents a failed build with error information. + required: + - success + - status + - errorMsg + properties: + success: + const: false + title: Success indicator + description: | + Always false for failed build results. + + status: + type: string + title: Failure status + description: | + Status string for failed builds. + enum: + - "PermanentFailure" + - "InputRejected" + - "OutputRejected" + - "TransientFailure" + - "CachedFailure" + - "TimedOut" + - "MiscFailure" + - "DependencyFailed" + - "LogLimitExceeded" + - "NotDeterministic" + - "NoSubstituters" + - "HashMismatch" + + errorMsg: + type: string + title: Error message + description: | + Information about the error if the build failed. + + isNonDeterministic: + type: boolean + title: Non-deterministic flag + description: | + If timesBuilt > 1, whether some builds did not produce the same result. + + Note that 'isNonDeterministic = false' does not mean the build is deterministic, + just that we don't have evidence of non-determinism. diff --git a/doc/manual/source/protocols/json/schema/build-trace-entry-v1 b/doc/manual/source/protocols/json/schema/build-trace-entry-v1 new file mode 120000 index 00000000000..0d02880a5b4 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/build-trace-entry-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/realisation \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/build-trace-entry-v1.yaml b/doc/manual/source/protocols/json/schema/build-trace-entry-v1.yaml new file mode 100644 index 00000000000..a85738b50b9 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/build-trace-entry-v1.yaml @@ -0,0 +1,100 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/build-trace-entry-v1.json" +title: Build Trace Entry +description: | + A record of a successful build outcome for a specific derivation output. + + This schema describes the JSON representation of a [build trace entry](@docroot@/store/build-trace.md). + + > **Warning** + > + > This JSON format is currently + > [**experimental**](@docroot@/development/experimental-features.md#xp-feature-ca-derivations) + > and subject to change. +required: + - id + - outPath + - dependentRealisations + - signatures +allOf: + - "$ref": "#/$defs/key" + - "$ref": "#/$defs/value" +properties: + id: {} + outPath: {} + dependentRealisations: {} + signatures: {} +additionalProperties: false + +"$defs": + key: + title: Build Trace Key + description: | + A [build trace entry](@docroot@/store/build-trace.md) is a key-value pair. + This is the "key" part, refering to a derivation and output. + type: object + required: + - id + properties: + id: + type: string + title: Derivation Output ID + pattern: "^sha256:[0-9a-f]{64}![a-zA-Z_][a-zA-Z0-9_-]*$" + description: | + Unique identifier for the derivation output that was built. + + Format: `{hash-quotient-drv}!{output-name}` + + - **hash-quotient-drv**: SHA-256 [hash of the quotient derivation](@docroot@/store/derivation/outputs/input-address.md#hash-quotient-drv). + Begins with `sha256:`. + + - **output-name**: Name of the specific output (e.g., "out", "dev", "doc") + + Example: `"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo"` + + value: + title: Build Trace Value + description: | + A [build trace entry](@docroot@/store/build-trace.md) is a key-value pair. + This is the "value" part, describing an output. + type: object + required: + - outPath + - dependentRealisations + - signatures + properties: + outPath: + "$ref": "store-path-v1.yaml" + title: Output Store Path + description: | + The path to the store object that resulted from building this derivation for the given output name. + + dependentRealisations: + type: object + title: Underlying Base Build Trace + description: | + This is for [*derived*](@docroot@/store/build-trace.md#derived) build trace entries to ensure coherence. + + Keys are derivation output IDs (same format as the main `id` field). + Values are the store paths that those dependencies resolved to. + + As described in the linked section on derived build trace traces, derived build trace entries must be kept in addition and not instead of the underlying base build entries. + This is the set of base build trace entries that this derived build trace is derived from. + (The set is also a map since this miniature base build trace must be coherent, mapping each key to a single value.) + + patternProperties: + "^sha256:[0-9a-f]{64}![a-zA-Z_][a-zA-Z0-9_-]*$": + "$ref": "store-path-v1.yaml" + title: Dependent Store Path + description: Store path that this dependency resolved to during the build + additionalProperties: false + + signatures: + type: array + title: Build Signatures + description: | + A set of cryptographic signatures attesting to the authenticity of this build trace entry. + items: + type: string + title: Signature + description: A single cryptographic signature diff --git a/doc/manual/source/protocols/json/schema/content-address-v1 b/doc/manual/source/protocols/json/schema/content-address-v1 new file mode 120000 index 00000000000..35a0dd865d4 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/content-address-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/content-address \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/content-address-v1.yaml b/doc/manual/source/protocols/json/schema/content-address-v1.yaml new file mode 100644 index 00000000000..d0f75920184 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/content-address-v1.yaml @@ -0,0 +1,55 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/content-address-v1.json" +title: Content Address +description: | + This schema describes the JSON representation of Nix's `ContentAddress` type, which conveys information about [content-addressing store objects](@docroot@/store/store-object/content-address.md). + + > **Note** + > + > For current methods of content addressing, this data type is a bit suspicious, because it is neither simply a content address of a file system object (the `method` is richer), nor simply a content address of a store object (the `hash` doesn't account for the references). + > It should thus only be used in contexts where the references are also known / otherwise made tamper-resistant. + + + +type: object +properties: + method: + "$ref": "#/$defs/method" + hash: + title: Content Address + description: | + This would be the content-address itself. + + For all current methods, this is just a content address of the file system object of the store object, [as described in the store chapter](@docroot@/store/file-system-object/content-address.md), and not of the store object as a whole. + In particular, the references of the store object are *not* taken into account with this hash (and currently-supported methods). + "$ref": "./hash-v1.yaml" +required: +- method +- hash +additionalProperties: false +"$defs": + method: + type: string + enum: [flat, nar, text, git] + title: Content-Addressing Method + description: | + A string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen. + + Valid method strings are: + + - [`flat`](@docroot@/store/store-object/content-address.md#method-flat) (provided the contents are a single file) + - [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive) + - [`text`](@docroot@/store/store-object/content-address.md#method-text) + - [`git`](@docroot@/store/store-object/content-address.md#method-git) diff --git a/doc/manual/source/protocols/json/schema/derivation-options-v1 b/doc/manual/source/protocols/json/schema/derivation-options-v1 new file mode 120000 index 00000000000..9332a539070 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/derivation-options-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/derivation \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/derivation-options-v1.yaml b/doc/manual/source/protocols/json/schema/derivation-options-v1.yaml new file mode 100644 index 00000000000..d247802cd6c --- /dev/null +++ b/doc/manual/source/protocols/json/schema/derivation-options-v1.yaml @@ -0,0 +1,242 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-options-v1.json" +title: Derivation Options +description: | + JSON representation of Nix's `DerivationOptions` type. + + This schema describes various build-time options and constraints that can be specified for a derivation. + + > **Warning** + > + > This JSON format is currently + > [**experimental**](@docroot@/development/experimental-features.md) + > and subject to change. + +type: object +required: + - outputChecks + - unsafeDiscardReferences + - passAsFile + - exportReferencesGraph + - additionalSandboxProfile + - noChroot + - impureHostDeps + - impureEnvVars + - allowLocalNetworking + - requiredSystemFeatures + - preferLocalBuild + - allowSubstitutes +properties: + outputChecks: + type: object + title: Output Check + description: | + Constraints on what the derivation's outputs can and cannot reference. + Can either apply to all outputs or be specified per output. + oneOf: + - title: Output Checks For All Outputs + description: | + Output checks that apply to all outputs of the derivation. + required: + - forAllOutputs + properties: + forAllOutputs: + "$ref": "#/$defs/outputCheckSpec" + additionalProperties: false + + - title: Output Checks Per Output + description: | + Output checks specified individually for each output. + required: + - perOutput + properties: + perOutput: + type: object + additionalProperties: + "$ref": "#/$defs/outputCheckSpec" + additionalProperties: false + + unsafeDiscardReferences: + type: object + title: Unsafe Discard References + description: | + A map specifying which references should be unsafely discarded from each output. + This is generally not recommended and requires special permissions. + additionalProperties: + type: array + items: + type: string + + passAsFile: + type: array + title: Pass As File + description: | + List of environment variable names whose values should be passed as files rather than directly. + items: + type: string + + exportReferencesGraph: + type: object + title: Export References Graph + description: | + Specify paths whose references graph should be exported to files. + additionalProperties: + type: array + items: + "$ref": "deriving-path-v1.yaml" + + additionalSandboxProfile: + type: string + title: Additional Sandbox Profile + description: | + Additional sandbox profile directives (macOS specific). + + noChroot: + type: boolean + title: No Chroot + description: | + Whether to disable the build sandbox, if allowed. + + impureHostDeps: + type: array + title: Impure Host Dependencies + description: | + List of host paths that the build can access. + items: + type: string + + impureEnvVars: + type: array + title: Impure Environment Variables + description: | + List of environment variable names that should be passed through to the build from the calling environment. + items: + type: string + + allowLocalNetworking: + type: boolean + title: Allow Local Networking + description: | + Whether the build should have access to local network (macOS specific). + + requiredSystemFeatures: + type: array + title: Required System Features + description: | + List of system features required to build this derivation (e.g., "kvm", "nixos-test"). + items: + type: string + + preferLocalBuild: + type: boolean + title: Prefer Local Build + description: | + Whether this derivation should preferably be built locally rather than its outputs substituted. + + allowSubstitutes: + type: boolean + title: Allow Substitutes + description: | + Whether substituting from other stores should be allowed for this derivation's outputs. + +additionalProperties: false + +$defs: + + outputCheckSpec: + type: object + title: Output Check Specification + description: | + Constraints on what a specific output can reference. + required: + - ignoreSelfRefs + - maxSize + - maxClosureSize + - allowedReferences + - allowedRequisites + - disallowedReferences + - disallowedRequisites + properties: + ignoreSelfRefs: + type: boolean + title: Ignore Self References + description: | + Whether references from this output to itself should be ignored when checking references. + + maxSize: + type: ["integer", "null"] + title: Maximum Size + description: | + Maximum allowed size of this output in bytes, or null for no limit. + minimum: 0 + + maxClosureSize: + type: ["integer", "null"] + title: Maximum Closure Size + description: | + Maximum allowed size of this output's closure in bytes, or null for no limit. + minimum: 0 + + allowedReferences: + oneOf: + - type: array + items: + "$ref": "#/$defs/drvRef" + - type: "null" + title: Allowed References + description: | + If set, the output can only reference paths in this list. + If null, no restrictions apply. + + allowedRequisites: + oneOf: + - type: array + items: + "$ref": "#/$defs/drvRef" + - type: "null" + title: Allowed Requisites + description: | + If set, the output's closure can only contain paths in this list. + If null, no restrictions apply. + + disallowedReferences: + type: array + title: Disallowed References + description: | + The output must not reference any paths in this list. + items: + "$ref": "#/$defs/drvRef" + + disallowedRequisites: + type: array + title: Disallowed Requisites + description: | + The output's closure must not contain any paths in this list. + items: + "$ref": "#/$defs/drvRef" + additionalProperties: false + + drvRef: + # TODO fix bug in checker, should be `oneOf` + anyOf: + - type: object + title: Current derivation Output Reference + description: | + A reference to a specific output of the current derivation. + required: + - drvPath + - output + properties: + drvPath: + type: string + const: "self" + title: This derivation + description: | + Won't be confused for a deriving path + output: + type: string + title: Output Name + description: | + The name of the output being referenced. + additionalProperties: false + - "$ref": "deriving-path-v1.yaml" diff --git a/doc/manual/source/protocols/json/schema/derivation-v4.yaml b/doc/manual/source/protocols/json/schema/derivation-v4.yaml new file mode 100644 index 00000000000..2b86f06b401 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/derivation-v4.yaml @@ -0,0 +1,299 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v4.json" +title: Derivation +description: | + Experimental JSON representation of a Nix derivation (version 4). + + This schema describes the JSON representation of Nix's `Derivation` type. + + > **Warning** + > + > This JSON format is currently + > [**experimental**](@docroot@/development/experimental-features.md) + > and subject to change. + +type: object +required: + - name + - version + - outputs + - inputs + - system + - builder + - args + - env +properties: + name: + type: string + title: Derivation name + description: | + The name of the derivation. + Used when calculating store paths for the derivation’s outputs. + + version: + const: 4 + title: Format version (must be 4) + description: | + Must be `4`. + This is a guard that allows us to continue evolving this format. + The choice of `3` is fairly arbitrary, but corresponds to this informal version: + + - Version 0: ATerm format + + - Version 1: Original JSON format, with ugly `"r:sha256"` inherited from ATerm format. + + - Version 2: Separate `method` and `hashAlgo` fields in output specs + + - Version 3: Drop store dir from store paths, just include base name. + + - Version 4: Two cleanups, batched together to lesson churn: + + - Reorganize inputs into nested structure (`inputs.srcs` and `inputs.drvs`) + + - Use canonical content address JSON format for floating content addressed derivation outputs. + + Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change. + + outputs: + type: object + title: Output specifications + description: | + Information about the output paths of the derivation. + This is a JSON object with one member per output, where the key is the output name and the value is a JSON object as described. + + > **Example** + > + > ```json + > "outputs": { + > "out": { + > "method": "nar", + > "hashAlgo": "sha256", + > "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62" + > } + > } + > ``` + additionalProperties: + "$ref": "#/$defs/output/overall" + + inputs: + type: object + title: Derivation inputs + description: | + Input dependencies for the derivation, organized into source paths and derivation dependencies. + required: + - srcs + - drvs + properties: + srcs: + type: array + title: Input source paths + description: | + List of store paths on which this derivation depends. + + > **Example** + > + > ```json + > "srcs": [ + > "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh", + > "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch" + > ] + > ``` + items: + $ref: "store-path-v1.yaml" + drvs: + type: object + title: Input derivations + description: | + Mapping of derivation paths to lists of output names they provide. + + > **Example** + > + > ```json + > "drvs": { + > "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"], + > "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"] + > } + > ``` + > + > specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`. + patternProperties: + "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+\\.drv$": + title: Store Path + description: | + A store path to a derivation, mapped to the outputs of that derivation. + oneOf: + - "$ref": "#/$defs/outputNames" + - "$ref": "#/$defs/dynamicOutputs" + additionalProperties: false + additionalProperties: false + + system: + type: string + title: Build system type + description: | + The system type on which this derivation is to be built + (e.g. `x86_64-linux`). + + builder: + type: string + title: Build program path + description: | + Absolute path of the program used to perform the build. + Typically this is the `bash` shell + (e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`). + + args: + type: array + title: Builder arguments + description: | + Command-line arguments passed to the `builder`. + items: + type: string + + env: + type: object + title: Environment variables + description: | + Environment variables passed to the `builder`. + additionalProperties: + type: string + + structuredAttrs: + title: Structured attributes + description: | + [Structured Attributes](@docroot@/store/derivation/index.md#structured-attrs), only defined if the derivation contains them. + Structured attributes are JSON, and thus embedded as-is. + type: object + additionalProperties: true + +"$defs": + output: + overall: + title: Derivation Output + description: | + A single output of a derivation, with different variants for different output types. + oneOf: + - "$ref": "#/$defs/output/inputAddressed" + - "$ref": "#/$defs/output/caFixed" + - "$ref": "#/$defs/output/caFloating" + - "$ref": "#/$defs/output/deferred" + - "$ref": "#/$defs/output/impure" + + inputAddressed: + title: Input-Addressed Output + description: | + The traditional non-fixed-output derivation type. + The output path is determined from the derivation itself. + + See [Input-addressing derivation outputs](@docroot@/store/derivation/outputs/input-address.md) for more details. + type: object + required: + - path + properties: + path: + $ref: "store-path-v1.yaml" + title: Output path + description: | + The output path determined from the derivation itself. + additionalProperties: false + + caFixed: + title: Fixed Content-Addressed Output + description: | + The output is content-addressed, and the content-address is fixed in advance. + + See [Fixed-output content-addressing](@docroot@/store/derivation/outputs/content-address.md#fixed) for more details. + "$ref": "./content-address-v1.yaml" + required: + - method + - hash + properties: + method: + description: | + Method of content addressing used for this output. + hash: + title: Expected hash value + description: | + The expected content hash. + additionalProperties: false + + caFloating: + title: Floating Content-Addressed Output + description: | + Floating-output derivations, whose outputs are content + addressed, but not fixed, and so the output paths are dynamically calculated from + whatever the output ends up being. + + See [Floating Content-Addressing](@docroot@/store/derivation/outputs/content-address.md#floating) for more details. + type: object + required: + - method + - hashAlgo + properties: + method: + "$ref": "./content-address-v1.yaml#/$defs/method" + description: | + Method of content addressing used for this output. + hashAlgo: + title: Hash algorithm + "$ref": "./hash-v1.yaml#/$defs/algorithm" + description: | + What hash algorithm to use for the given method of content-addressing. + additionalProperties: false + + deferred: + title: Deferred Output + description: | + Input-addressed output which depends on a (CA) derivation whose outputs (and thus their content-address + are not yet known. + type: object + properties: {} + additionalProperties: false + + impure: + title: Impure Output + description: | + Impure output which is just like a floating content-addressed output, but this derivation runs without sandboxing. + As such, we don't record it in the build trace, under the assumption that if we need it again, we should rebuild it, as it might produce something different. + required: + - impure + - method + - hashAlgo + properties: + impure: + const: true + method: + "$ref": "./content-address-v1.yaml#/$defs/method" + description: | + How the file system objects will be serialized for hashing. + hashAlgo: + title: Hash algorithm + "$ref": "./hash-v1.yaml#/$defs/algorithm" + description: | + How the serialization will be hashed. + additionalProperties: false + + outputName: + type: string + title: Output name + description: Name of the derivation output to depend on + + outputNames: + type: array + title: Output Names + description: Set of names of derivation outputs to depend on + items: + "$ref": "#/$defs/outputName" + + dynamicOutputs: + type: object + title: Dynamic Outputs + description: | + **Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations) + + This recursive data type allows for depending on outputs of outputs. + properties: + outputs: + "$ref": "#/$defs/outputNames" + dynamicOutputs: + "$ref": "#/$defs/dynamicOutputs" diff --git a/doc/manual/source/protocols/json/schema/deriving-path-v1 b/doc/manual/source/protocols/json/schema/deriving-path-v1 new file mode 120000 index 00000000000..92ec6d01a03 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/deriving-path-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/derived-path \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml b/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml new file mode 100644 index 00000000000..11a784d064c --- /dev/null +++ b/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml @@ -0,0 +1,27 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/deriving-path-v1.json" +title: Deriving Path +description: | + This schema describes the JSON representation of Nix's [Deriving Path](@docroot@/store/derivation/index.md#deriving-path). +oneOf: + - title: Constant + description: | + See [Constant](@docroot@/store/derivation/index.md#deriving-path-constant) deriving path. + $ref: "store-path-v1.yaml" + - title: Output + description: | + See [Output](@docroot@/store/derivation/index.md#deriving-path-output) deriving path. + type: object + properties: + drvPath: + "$ref": "#" + description: | + A deriving path to a [Derivation](@docroot@/store/derivation/index.md#store-derivation), whose output is being referred to. + output: + type: string + description: | + The name of an output produced by that derivation (e.g. "out", "doc", etc.). + required: + - drvPath + - output + additionalProperties: false diff --git a/doc/manual/source/protocols/json/schema/file-system-object-v1 b/doc/manual/source/protocols/json/schema/file-system-object-v1 new file mode 120000 index 00000000000..cbb21a10d04 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/file-system-object-v1 @@ -0,0 +1 @@ +../../../../../../src/libutil-tests/data/memory-source-accessor \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/file-system-object-v1.yaml b/doc/manual/source/protocols/json/schema/file-system-object-v1.yaml new file mode 100644 index 00000000000..116c7ce0da0 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/file-system-object-v1.yaml @@ -0,0 +1,71 @@ +"$schema": http://json-schema.org/draft-04/schema# +"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/file-system-object-v1.json +title: File System Object +description: | + This schema describes the JSON representation of Nix's [File System Object](@docroot@/store/file-system-object.md). + + The schema is recursive because file system objects contain other file system objects. +type: object +required: ["type"] +properties: + type: + type: string + enum: ["regular", "symlink", "directory"] + +# Enforce conditional structure based on `type` +anyOf: + - $ref: "#/$defs/regular" + required: ["type", "contents"] + + - $ref: "#/$defs/directory" + required: ["type", "entries"] + + - $ref: "#/$defs/symlink" + required: ["type", "target"] + +"$defs": + regular: + title: Regular File + description: | + See [Regular File](@docroot@/store/file-system-object.md#regular) in the manual for details. + required: ["contents"] + properties: + type: + const: "regular" + contents: + type: string + description: File contents + executable: + type: boolean + description: Whether the file is executable. + default: false + additionalProperties: false + + directory: + title: Directory + description: | + See [Directory](@docroot@/store/file-system-object.md#directory) in the manual for details. + required: ["entries"] + properties: + type: + const: "directory" + entries: + type: object + description: | + Map of names to nested file system objects (for type=directory) + additionalProperties: + $ref: "#" + additionalProperties: false + + symlink: + title: Symbolic Link + description: | + See [Symbolic Link](@docroot@/store/file-system-object.md#symlink) in the manual for details. + required: ["target"] + properties: + type: + const: "symlink" + target: + type: string + description: Target path of the symlink. + additionalProperties: false diff --git a/doc/manual/source/protocols/json/schema/hash-v1 b/doc/manual/source/protocols/json/schema/hash-v1 new file mode 120000 index 00000000000..06937e28679 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/hash-v1 @@ -0,0 +1 @@ +../../../../../../src/libutil-tests/data/hash/ \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/hash-v1.yaml b/doc/manual/source/protocols/json/schema/hash-v1.yaml new file mode 100644 index 00000000000..c7e46191d92 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/hash-v1.yaml @@ -0,0 +1,27 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/hash-v1.json" +title: Hash +description: | + A cryptographic hash value used throughout Nix for content addressing and integrity verification. + + This schema describes the JSON representation of Nix's `Hash` type as an [SRI](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) string. +type: string +pattern: "^(blake3|md5|sha1|sha256|sha512)-[A-Za-z0-9+/]+=*$" +examples: +- "sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" +- "sha512-IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ==" + +"$defs": + algorithm: + type: string + enum: + - blake3 + - md5 + - sha1 + - sha256 + - sha512 + title: Hash algorithm + description: | + The hash algorithm used to compute the hash value. + + `blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake3-hashes) experimental feature. diff --git a/doc/manual/source/protocols/json/schema/nar-info-v2 b/doc/manual/source/protocols/json/schema/nar-info-v2 new file mode 120000 index 00000000000..cb08fe781a0 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/nar-info-v2 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/nar-info/json-2 \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/store-object-info-v2 b/doc/manual/source/protocols/json/schema/store-object-info-v2 new file mode 120000 index 00000000000..36ca7f13db9 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/store-object-info-v2 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/path-info/json-2 \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/store-object-info-v2.yaml b/doc/manual/source/protocols/json/schema/store-object-info-v2.yaml new file mode 100644 index 00000000000..6ebaa3b2422 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/store-object-info-v2.yaml @@ -0,0 +1,265 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-object-info-v2.json" +title: Store Object Info v2 +description: | + Information about a [store object](@docroot@/store/store-object.md). + + This schema describes the JSON representation of store object metadata as returned by commands like [`nix path-info --json`](@docroot@/command-ref/new-cli/nix3-path-info.md). + + ### Field Categories + + Store object information can come in a few different variations. + + Firstly, "impure" fields, which contain non-intrinsic information about the store object, may or may not be included. + + Second, binary cache stores have extra non-intrinsic infomation about the store objects they contain. + + Thirdly, [`nix path-info --json --closure-size`](@docroot@/command-ref/new-cli/nix3-path-info.html#opt-closure-size) can compute some extra information about not just the single store object in question, but the store object and its [closure](@docroot@/glossary.md#gloss-closure). + + The impure and NAR fields are grouped into separate variants below. + See their descriptions for additional information. + The closure fields however as just included as optional fields, to avoid a combinatorial explosion of variants. + +oneOf: + - $ref: "#/$defs/base" + + - $ref: "#/$defs/impure" + + - $ref: "#/$defs/narInfo" + +$defs: + base: + title: Store Object Info + description: | + Basic store object metadata containing only intrinsic properties. + This is the minimal set of fields that describe what a store object contains. + type: object + required: + - version + - narHash + - narSize + - references + - ca + - storeDir + properties: + version: + type: integer + const: 2 + title: Format version (must be 2) + description: | + Must be `2`. + This is a guard that allows us to continue evolving this format. + Here is the rough version history: + + - Version 0: `.narinfo` line-oriented format + + - Version 1: Original JSON format, with ugly `"r:sha256"` inherited from `.narinfo` format. + + - Version 2: Use structured JSON type for `ca` + + path: + "$ref": "./store-path-v1.yaml" + title: Store Path + description: | + [Store path](@docroot@/store/store-path.md) to the given store object. + + Note: This field may not be present in all contexts, such as when the path is used as the key and the the store object info the value in map. + + narHash: + "$ref": "./hash-v1.yaml" + title: NAR Hash + description: | + Hash of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive). + + narSize: + type: integer + minimum: 0 + title: NAR Size + description: | + Size of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive). + + references: + type: array + title: References + description: | + An array of [store paths](@docroot@/store/store-path.md), possibly including this one. + items: + "$ref": "./store-path-v1.yaml" + + ca: + oneOf: + - type: "null" + const: null + - "$ref": "./content-address-v1.yaml" + title: Content Address + description: | + If the store object is [content-addressed](@docroot@/store/store-object/content-address.md), + this is the content address of this store object's file system object, used to compute its store path. + Otherwise (i.e. if it is [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object)), this is `null`. + + storeDir: + type: string + title: Store Directory + description: | + The [store directory](@docroot@/store/store-path.md#store-directory) this store object belongs to (e.g. `/nix/store`). + additionalProperties: false + + impure: + title: Store Object Info with Impure Fields + description: | + Store object metadata including impure fields that are not *intrinsic* properties. + In other words, the same store object in different stores could have different values for these impure fields. + type: object + required: + - version + - narHash + - narSize + - references + - ca + - storeDir + # impure + - deriver + - registrationTime + - ultimate + - signatures + properties: + version: { $ref: "#/$defs/base/properties/version" } + path: { $ref: "#/$defs/base/properties/path" } + narHash: { $ref: "#/$defs/base/properties/narHash" } + narSize: { $ref: "#/$defs/base/properties/narSize" } + references: { $ref: "#/$defs/base/properties/references" } + ca: { $ref: "#/$defs/base/properties/ca" } + storeDir: { $ref: "#/$defs/base/properties/storeDir" } + deriver: + oneOf: + - "$ref": "./store-path-v1.yaml" + - type: "null" + title: Deriver + description: | + If known, the path to the [store derivation](@docroot@/glossary.md#gloss-store-derivation) from which this store object was produced. + Otherwise `null`. + + > This is an "impure" field that may not be included in certain contexts. + + registrationTime: + type: ["integer", "null"] + title: Registration Time + description: | + If known, when this derivation was added to the store (Unix timestamp). + Otherwise `null`. + + > This is an "impure" field that may not be included in certain contexts. + + ultimate: + type: boolean + title: Ultimate + description: | + Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere. + + > This is an "impure" field that may not be included in certain contexts. + + signatures: + type: array + title: Signatures + description: | + Signatures claiming that this store object is what it claims to be. + Not relevant for [content-addressed](@docroot@/store/store-object/content-address.md) store objects, + but useful for [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object) store objects. + + > This is an "impure" field that may not be included in certain contexts. + items: + type: string + + # Computed closure fields + closureSize: + type: integer + minimum: 0 + title: Closure Size + description: | + The total size of this store object and every other object in its [closure](@docroot@/glossary.md#gloss-closure). + + > This field is not stored at all, but computed by traversing the other fields across all the store objects in a closure. + additionalProperties: false + + narInfo: + title: Store Object Info with Impure fields and NAR Info + description: | + The store object info in the "binary cache" family of Nix store type contain extra information pertaining to *downloads* of the store object in question. + (This store info is called "NAR info", since the downloads take the form of [Nix Archives](@docroot@/store/file-system-object/content-address.md#serial-nix-archive, and the metadata is served in a file with a `.narinfo` extension.) + + This download information, being specific to how the store object happens to be stored and transferred, is also considered to be non-intrinsic / impure. + type: object + required: + - version + - narHash + - narSize + - references + - ca + - storeDir + # impure + - deriver + - registrationTime + - ultimate + - signatures + # nar + - url + - compression + - downloadHash + - downloadSize + properties: + version: { $ref: "#/$defs/base/properties/version" } + path: { $ref: "#/$defs/base/properties/path" } + narHash: { $ref: "#/$defs/base/properties/narHash" } + narSize: { $ref: "#/$defs/base/properties/narSize" } + references: { $ref: "#/$defs/base/properties/references" } + ca: { $ref: "#/$defs/base/properties/ca" } + storeDir: { $ref: "#/$defs/base/properties/storeDir" } + deriver: { $ref: "#/$defs/impure/properties/deriver" } + registrationTime: { $ref: "#/$defs/impure/properties/registrationTime" } + ultimate: { $ref: "#/$defs/impure/properties/ultimate" } + signatures: { $ref: "#/$defs/impure/properties/signatures" } + closureSize: { $ref: "#/$defs/impure/properties/closureSize" } + url: + type: string + title: URL + description: | + Where to download a compressed archive of the file system objects of this store object. + + > This is an impure "`.narinfo`" field that may not be included in certain contexts. + + compression: + type: string + title: Compression + description: | + The compression format that the archive is in. + + > This is an impure "`.narinfo`" field that may not be included in certain contexts. + + downloadHash: + "$ref": "./hash-v1.yaml" + title: Download Hash + description: | + A digest for the compressed archive itself, as opposed to the data contained within. + + > This is an impure "`.narinfo`" field that may not be included in certain contexts. + + downloadSize: + type: integer + minimum: 0 + title: Download Size + description: | + The size of the compressed archive itself. + + > This is an impure "`.narinfo`" field that may not be included in certain contexts. + + closureDownloadSize: + type: integer + minimum: 0 + title: Closure Download Size + description: | + The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure](@docroot@/glossary.md#gloss-closure). + + > This is an impure "`.narinfo`" field that may not be included in certain contexts. + + > This field is not stored at all, but computed by traversing the other fields across all the store objects in a closure. + additionalProperties: false diff --git a/doc/manual/source/protocols/json/schema/store-path-v1 b/doc/manual/source/protocols/json/schema/store-path-v1 new file mode 120000 index 00000000000..31e7a6b2a5e --- /dev/null +++ b/doc/manual/source/protocols/json/schema/store-path-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/store-path \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/store-path-v1.yaml b/doc/manual/source/protocols/json/schema/store-path-v1.yaml new file mode 100644 index 00000000000..61653d60e21 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/store-path-v1.yaml @@ -0,0 +1,26 @@ +"$schema": "http://json-schema.org/draft-07/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-path-v1.json" +title: Store Path +description: | + A [store path](@docroot@/store/store-path.md) identifying a store object. + + This schema describes the JSON representation of store paths as used in various Nix JSON APIs. + + ## Format + + Store paths in JSON are represented as strings containing just the hash and name portion, without the store directory prefix. + + For example: `"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"` + + (If the store dir is `/nix/store`, then this corresponds to the path `/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv`.) + + ## Structure + + The format follows this pattern: `${digest}-${name}` + + - **hash**: Digest rendered in a custom variant of [Base32](https://en.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters) + - **name**: The package name and optional version/suffix information + +type: string +pattern: "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+$" +minLength: 34 diff --git a/doc/manual/source/protocols/json/schema/store-v1 b/doc/manual/source/protocols/json/schema/store-v1 new file mode 120000 index 00000000000..0cb61f962e2 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/store-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/dummy-store \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/store-v1.yaml b/doc/manual/source/protocols/json/schema/store-v1.yaml new file mode 100644 index 00000000000..31aa10c4147 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/store-v1.yaml @@ -0,0 +1,90 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-v1.json" +title: Store +description: | + Experimental JSON representation of a Nix [Store](@docroot@/store/index.md). + + This schema describes the JSON serialization of a Nix store. + We use it for (de)serializing in-memory "dummy stores" used for testing, but in principle the data represented in this schema could live in any type of store. + + > **Warning** + > + > This JSON format is currently + > [**experimental**](@docroot@/development/experimental-features.md) + > and subject to change. + +type: object +required: + - config + - contents + - derivations + - buildTrace +properties: + config: + "$ref": "#/$defs/storeConfig" + + contents: + type: object + title: Store Objects + description: | + Map of [store path](@docroot@/store/store-path.md) base names to [store objects](@docroot@/store/store-object.md). + patternProperties: + "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+$": + type: object + title: Store Object + required: + - info + - contents + properties: + info: + "$ref": "./store-object-info-v2.yaml#/$defs/impure" + title: Store Object Info + description: | + Metadata about the [store object](@docroot@/store/store-object.md) including hash, size, references, etc. + contents: + "$ref": "./file-system-object-v1.yaml" + title: File System Object Contents + description: | + The actual [file system object](@docroot@/store/file-system-object.md) contents of this store path. + additionalProperties: false + additionalProperties: false + + derivations: + type: object + title: Derivations + description: | + Map of [store path](@docroot@/store/store-path.md) base names (always ending in `.drv`) to [derivations](@docroot@/store/derivation/index.md). + patternProperties: + "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+\\.drv$": + "$ref": "./derivation-v4.yaml" + additionalProperties: false + + buildTrace: + type: object + title: Build Trace + description: | + Map of output hashes (base64 SHA256) to maps of output names to realisations. + Records which outputs have been built and their realisations. + See [Build Trace](@docroot@/store/build-trace.md) for more details. + patternProperties: + "^[A-Za-z0-9+/]{43}=$": + type: object + additionalProperties: + "$ref": "./build-trace-entry-v1.yaml#/$defs/value" + additionalProperties: false + +"$defs": + storeConfig: + title: Store Configuration + description: | + Configuration for the store, including the store directory path. + type: object + required: + - store + properties: + store: + type: string + title: Store Directory + description: | + The store directory path (e.g., `/nix/store`). + additionalProperties: false diff --git a/doc/manual/source/protocols/json/store-object-info.md b/doc/manual/source/protocols/json/store-object-info.md index 4b029c40b5d..4ad83de00b3 100644 --- a/doc/manual/source/protocols/json/store-object-info.md +++ b/doc/manual/source/protocols/json/store-object-info.md @@ -1,96 +1,45 @@ -# Store object info JSON format +{{#include store-object-info-v2-fixed.md}} -Info about a [store object]. +## Examples -* `path`: +### Minimal store object (content-addressed) - [Store path][store path] to the given store object. +```json +{{#include schema/store-object-info-v2/pure.json}} +``` -* `narHash`: +### Store object with impure fields - Hash of the [file system object] part of the store object when serialized as a [Nix Archive]. +```json +{{#include schema/store-object-info-v2/impure.json}} +``` -* `narSize`: +### Minimal store object (empty) - Size of the [file system object] part of the store object when serialized as a [Nix Archive]. +```json +{{#include schema/store-object-info-v2/empty_pure.json}} +``` -* `references`: +### Store object with all impure fields - An array of [store paths][store path], possibly including this one. +```json +{{#include schema/store-object-info-v2/empty_impure.json}} +``` -* `ca`: +### NAR info (minimal) - If the store object is [content-addressed], - this is the content address of this store object's file system object, used to compute its store path. - Otherwise (i.e. if it is [input-addressed]), this is `null`. +```json +{{#include schema/nar-info-v2/pure.json}} +``` -[store path]: @docroot@/store/store-path.md -[file system object]: @docroot@/store/file-system-object.md -[Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive +### NAR info (with binary cache fields) -## Impure fields +```json +{{#include schema/nar-info-v2/impure.json}} +``` -These are not intrinsic properties of the store object. -In other words, the same store object residing in different store could have different values for these properties. + diff --git a/doc/manual/source/protocols/json/store-path.md b/doc/manual/source/protocols/json/store-path.md new file mode 100644 index 00000000000..cd18f659531 --- /dev/null +++ b/doc/manual/source/protocols/json/store-path.md @@ -0,0 +1,15 @@ +{{#include store-path-v1-fixed.md}} + +## Examples + +### Simple store path + +```json +{{#include schema/store-path-v1/simple.json}} +``` + + diff --git a/doc/manual/source/protocols/json/store.md b/doc/manual/source/protocols/json/store.md new file mode 100644 index 00000000000..951c1759e67 --- /dev/null +++ b/doc/manual/source/protocols/json/store.md @@ -0,0 +1,21 @@ +{{#include store-v1-fixed.md}} + +## Examples + +### Empty store + +```json +{{#include schema/store-v1/empty.json}} +``` + +### Store with one file + +```json +{{#include schema/store-v1/one-flat-file.json}} +``` + +### Store with one derivation + +```json +{{#include schema/store-v1/one-derivation.json}} +``` diff --git a/doc/manual/source/protocols/meson.build b/doc/manual/source/protocols/meson.build new file mode 100644 index 00000000000..5b5eb900dcc --- /dev/null +++ b/doc/manual/source/protocols/meson.build @@ -0,0 +1,2 @@ +# Process JSON schema documentation +subdir('json') diff --git a/doc/manual/source/protocols/nix-archive.md b/doc/manual/source/protocols/nix-archive/index.md similarity index 68% rename from doc/manual/source/protocols/nix-archive.md rename to doc/manual/source/protocols/nix-archive/index.md index 02a8dd46470..98769d59e43 100644 --- a/doc/manual/source/protocols/nix-archive.md +++ b/doc/manual/source/protocols/nix-archive/index.md @@ -4,7 +4,7 @@ This is the complete specification of the [Nix Archive] format. The Nix Archive format closely follows the abstract specification of a [file system object] tree, because it is designed to serialize exactly that data structure. -[Nix Archive]: @docroot@/store/file-system-object/content-address.md#nix-archive +[Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive [file system object]: @docroot@/store/file-system-object.md The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), with the exception of the `str(..)` function / parameterized rule, which length-prefixes and pads strings. @@ -24,7 +24,7 @@ nar-obj-inner | str("type"), str("directory") directory ; -regular = [ str("executable") ], str("contents"), str(contents); +regular = [ str("executable"), str("") ], str("contents"), str(contents); symlink = str("target"), str(target); @@ -41,3 +41,15 @@ The `str` function / parameterized rule is defined as follows: - `int(n)` = the 64-bit little endian representation of the number `n` - `pad(s)` = the byte sequence `s`, padded with 0s to a multiple of 8 byte + +## Kaitai Struct Specification + +The Nix Archive (NAR) format is also formally described using [Kaitai Struct](https://kaitai.io/), an Interface Description Language (IDL) for defining binary data structures. + +> Kaitai Struct provides a language-agnostic, machine-readable specification that can be compiled into parsers for various programming languages (e.g., C++, Python, Java, Rust). + +```yaml +{{#include nar.ksy}} +``` + +The source of the spec can be found [here](https://github.com/nixos/nix/blob/master/src/nix-manual/source/protocols/nix-archive/nar.ksy). Contributions and improvements to the spec are welcomed. diff --git a/doc/manual/source/protocols/nix-archive/nar.ksy b/doc/manual/source/protocols/nix-archive/nar.ksy new file mode 100644 index 00000000000..6a172b2760b --- /dev/null +++ b/doc/manual/source/protocols/nix-archive/nar.ksy @@ -0,0 +1,169 @@ +meta: + id: nix_nar + title: Nix Archive (NAR) + file-extension: nar + endian: le +doc: | + Nix Archive (NAR) format. A simple, reproducible binary archive + format used by the Nix package manager to serialize file system objects. +doc-ref: 'https://nixos.org/manual/nix/stable/command-ref/nix-store.html#nar-format' + +seq: + - id: magic + type: padded_str + doc: "Magic string, must be 'nix-archive-1'." + valid: + expr: _.body == 'nix-archive-1' + - id: root_node + type: node + doc: "The root of the archive, which is always a single node." + +types: + padded_str: + doc: | + A string, prefixed with its length (u8le) and + padded with null bytes to the next 8-byte boundary. + seq: + - id: len_str + type: u8 + - id: body + type: str + size: len_str + encoding: 'ASCII' + - id: padding + size: (8 - (len_str % 8)) % 8 + + node: + doc: "A single filesystem node (file, directory, or symlink)." + seq: + - id: open_paren + type: padded_str + doc: "Must be '(', a token starting the node definition." + valid: + expr: _.body == '(' + - id: type_key + type: padded_str + doc: "Must be 'type'." + valid: + expr: _.body == 'type' + - id: type_val + type: padded_str + doc: "The type of the node: 'regular', 'directory', or 'symlink'." + - id: body + type: + switch-on: type_val.body + cases: + "'directory'": type_directory + "'regular'": type_regular + "'symlink'": type_symlink + - id: close_paren + type: padded_str + valid: + expr: _.body == ')' + if: "type_val.body != 'directory'" + doc: "Must be ')', a token ending the node definition." + + type_directory: + doc: "A directory node, containing a list of entries. Entries must be ordered by their names." + seq: + - id: entries + type: dir_entry + repeat: until + repeat-until: _.kind.body == ')' + types: + dir_entry: + doc: "A single entry within a directory, or a terminator." + seq: + - id: kind + type: padded_str + valid: + expr: _.body == 'entry' or _.body == ')' + doc: "Must be 'entry' (for a child node) or '' (for terminator)." + - id: open_paren + type: padded_str + valid: + expr: _.body == '(' + if: 'kind.body == "entry"' + - id: name_key + type: padded_str + valid: + expr: _.body == 'name' + if: 'kind.body == "entry"' + - id: name + type: padded_str + if: 'kind.body == "entry"' + - id: node_key + type: padded_str + valid: + expr: _.body == 'node' + if: 'kind.body == "entry"' + - id: node + type: node + if: 'kind.body == "entry"' + doc: "The child node, present only if kind is 'entry'." + - id: close_paren + type: padded_str + valid: + expr: _.body == ')' + if: 'kind.body == "entry"' + instances: + is_terminator: + value: kind.body == ')' + + type_regular: + doc: "A regular file node." + seq: + # Read attributes (like 'executable') until we hit 'contents' + - id: attributes + type: reg_attribute + repeat: until + repeat-until: _.key.body == "contents" + # After the 'contents' token, read the file data + - id: file_data + type: file_content + instances: + is_executable: + value: 'attributes[0].key.body == "executable"' + doc: "True if the file has the 'executable' attribute." + types: + reg_attribute: + doc: "An attribute of the file, e.g., 'executable' or 'contents'." + seq: + - id: key + type: padded_str + doc: "Attribute key, e.g., 'executable' or 'contents'." + valid: + expr: _.body == 'executable' or _.body == 'contents' + - id: value + type: padded_str + if: 'key.body == "executable"' + valid: + expr: _.body == '' + doc: "Must be '' if key is 'executable'." + file_content: + doc: "The raw data of the file, prefixed by length." + seq: + - id: len_contents + type: u8 + # # This relies on the property of instances that they are lazily evaluated and cached. + - size: 0 + if: nar_offset < 0 + - id: contents + size: len_contents + - id: padding + size: (8 - (len_contents % 8)) % 8 + instances: + nar_offset: + value: _io.pos + + type_symlink: + doc: "A symbolic link node." + seq: + - id: target_key + type: padded_str + doc: "Must be 'target'." + valid: + expr: _.body == 'target' + - id: target_val + type: padded_str + doc: "The destination path of the symlink." diff --git a/doc/manual/source/release-notes/rl-2.0.md b/doc/manual/source/release-notes/rl-2.0.md index aad0de21189..25cc5e0a5f3 100644 --- a/doc/manual/source/release-notes/rl-2.0.md +++ b/doc/manual/source/release-notes/rl-2.0.md @@ -358,7 +358,7 @@ This release has the following new features: they are needed for evaluation. - You can now use `channel:` as a short-hand for - . For example, + [now ]. For example, `nix-build channel:nixos-15.09 -A hello` will build the GNU Hello package from the `nixos-15.09` channel. In the future, this may use Git to fetch updates more efficiently. diff --git a/doc/manual/source/release-notes/rl-2.18.md b/doc/manual/source/release-notes/rl-2.18.md index eb26fc9e721..71b25f40819 100644 --- a/doc/manual/source/release-notes/rl-2.18.md +++ b/doc/manual/source/release-notes/rl-2.18.md @@ -13,7 +13,7 @@ - The `discard-references` feature has been stabilized. This means that the - [unsafeDiscardReferences](@docroot@/development/experimental-features.md#xp-feature-discard-references) + [unsafeDiscardReferences](@docroot@/language/advanced-attributes.md#adv-attr-unsafeDiscardReferences) attribute is no longer guarded by an experimental flag and can be used freely. diff --git a/doc/manual/source/release-notes/rl-2.19.md b/doc/manual/source/release-notes/rl-2.19.md index 47a0dd3db99..171dffb121d 100644 --- a/doc/manual/source/release-notes/rl-2.19.md +++ b/doc/manual/source/release-notes/rl-2.19.md @@ -17,8 +17,8 @@ - `nix-shell` shebang lines now support single-quoted arguments. -- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-fetch-tree). - This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-fetch-tree). +- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-feature-fetch-tree). + This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-feature-flakes). - The interface for creating and updating lock files has been overhauled: diff --git a/doc/manual/source/release-notes/rl-2.23.md b/doc/manual/source/release-notes/rl-2.23.md index e6b0e9ffcec..b358a0fdc3c 100644 --- a/doc/manual/source/release-notes/rl-2.23.md +++ b/doc/manual/source/release-notes/rl-2.23.md @@ -14,7 +14,7 @@ - Modify `nix derivation {add,show}` JSON format [#9866](https://github.com/NixOS/nix/issues/9866) [#10722](https://github.com/NixOS/nix/pull/10722) - The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/cli-guideline.md#returning-future-proof-json). + The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/json-guideline.md). In particular, the hash algorithm and content addressing method of content-addressed derivation outputs are now separated into two fields `hashAlgo` and `method`, rather than one field with an arcane `:`-separated format. diff --git a/doc/manual/source/release-notes/rl-2.24.md b/doc/manual/source/release-notes/rl-2.24.md index 33fc0db03f9..f608fb54f7d 100644 --- a/doc/manual/source/release-notes/rl-2.24.md +++ b/doc/manual/source/release-notes/rl-2.24.md @@ -93,7 +93,7 @@ - Support unit prefixes in configuration settings [#10668](https://github.com/NixOS/nix/pull/10668) - Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/opt-common.md#opt-min-free) to set the minimum free space to 1 gigabyte. + Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/conf-file.md#conf-min-free) to set the minimum free space to 1 gigabyte. This enhancement was extracted from [#7851](https://github.com/NixOS/nix/pull/7851) and is also useful for PR [#10661](https://github.com/NixOS/nix/pull/10661). diff --git a/doc/manual/source/release-notes/rl-2.26.md b/doc/manual/source/release-notes/rl-2.26.md index 0c3df828f79..a665cc9fa8e 100644 --- a/doc/manual/source/release-notes/rl-2.26.md +++ b/doc/manual/source/release-notes/rl-2.26.md @@ -112,7 +112,7 @@ This release was made possible by the following 45 contributors: - Connor Baker [**(@ConnorBaker)**](https://github.com/ConnorBaker) - Cole Helbling [**(@cole-h)**](https://github.com/cole-h) - Jack Wilsdon [**(@jackwilsdon)**](https://github.com/jackwilsdon) -- ‮rekcäH nitraM‮ [**(@dwt)**](https://github.com/dwt) +- Martin Häcker [**(@dwt)**](https://github.com/dwt) - Martin Fischer [**(@not-my-profile)**](https://github.com/not-my-profile) - John Ericson [**(@Ericson2314)**](https://github.com/Ericson2314) - Graham Christensen [**(@grahamc)**](https://github.com/grahamc) diff --git a/doc/manual/source/release-notes/rl-2.32.md b/doc/manual/source/release-notes/rl-2.32.md index 3a925198dd4..5d90da0c9eb 100644 --- a/doc/manual/source/release-notes/rl-2.32.md +++ b/doc/manual/source/release-notes/rl-2.32.md @@ -12,7 +12,7 @@ We ultimately want to rectify this issue with all JSON formats to the extent allowed by our stability promises. To start with, we are changing the JSON format for derivations because the `nix derivation` commands are — in addition to being formally unstable — less widely used than other unstable commands. - See the documentation on the [JSON format for derivations](@docroot@/protocols/json/derivation.md) for further details. + See the documentation on the [JSON format for derivations](@docroot@/protocols/json/derivation/index.md) for further details. - C API: `nix_get_attr_name_byidx`, `nix_get_attr_byidx` take a `nix_value *` instead of `const nix_value *` [#13987](https://github.com/NixOS/nix/pull/13987) diff --git a/doc/manual/source/release-notes/rl-2.33.md b/doc/manual/source/release-notes/rl-2.33.md new file mode 100644 index 00000000000..bed69702938 --- /dev/null +++ b/doc/manual/source/release-notes/rl-2.33.md @@ -0,0 +1,281 @@ +# Release 2.33.0 (2025-12-09) + +## New features + +- New command `nix registry resolve` [#14595](https://github.com/NixOS/nix/pull/14595) + + This command looks up a flake registry input name and returns the flakeref it resolves to. + + For example, looking up Nixpkgs: + + ``` + $ nix registry resolve nixpkgs + github:NixOS/nixpkgs/nixpkgs-unstable + ``` + + Upstreamed from [Determinate Nix 3.14.0](https://github.com/DeterminateSystems/nix-src/pull/273). + +- `nix flake clone` supports all input types [#14581](https://github.com/NixOS/nix/pull/14581) + + `nix flake clone` now supports arbitrary input types. In particular, this allows you to clone tarball flakes, such as flakes on FlakeHub. + + Upstreamed from [Determinate Nix 3.12.0](https://github.com/DeterminateSystems/nix-src/pull/229). + +## Performance improvements + +- Git fetcher computes `revCount`s using multiple threads [#14462](https://github.com/NixOS/nix/pull/14462) + + When using Git repositories with a long history, calculating the `revCount` attribute can take a long time. Nix now computes `revCount` using multiple threads, making it much faster (e.g. 9.1s to 3.7s for Nixpkgs). + + Note that if you don't need `revCount`, you can disable it altogether by setting the flake input attribute `shallow = true`. + + Upstreamed from [Determinate Nix 3.12.2](https://github.com/DeterminateSystems/nix-src/pull/245). + +- `builtins.stringLength` now runs in constant time [#14442](https://github.com/NixOS/nix/pull/14442) + + The internal representation of strings has been replaced with a size-prefixed Pascal style string. Previously Nix stored strings as a NUL-terminated array of bytes, necessitating a linear scan to calculate the length. + +- Uploads to `http://` and `https://` binary cache stores now run in constant memory [#14390](https://github.com/NixOS/nix/pull/14390) + + Nix used to buffer the whole compressed NAR contents in memory. It now reads it in a streaming fashion. + +- Channel URLs migrated to channels.nixos.org subdomain [#14517](https://github.com/NixOS/nix/issues/14517) [#14518](https://github.com/NixOS/nix/pull/14518) + + Channel URLs have been updated from `https://nixos.org/channels/` to `https://channels.nixos.org/` throughout Nix. This subdomain provides better reliability with IPv6 support and improved CDN distribution. The old domain apex (`nixos.org/channels/`) currently redirects to the new location but may be deprecated in the future. + +- Fix `download buffer is full; consider increasing the 'download-buffer-size' setting` warning [#11728](https://github.com/NixOS/nix/issues/11728) [#14614](https://github.com/NixOS/nix/pull/14614) + + The underlying issue that led to [#11728](https://github.com/NixOS/nix/issues/11728) has been resolved by utilizing + [libcurl write pausing functionality](https://curl.se/libcurl/c/curl_easy_pause.html) to control backpressure when unpacking to slow destinations like the git-backed tarball cache. The default value of `download-buffer-size` is now 1 MiB and it's no longer recommended to increase it, since the root cause has been fixed. + + This is expected to improve download performance on fast connections, since previously a single slow download consumer would stall the thread and prevent any other transfers from progressing. + + Many thanks go out to the [Lix project](https://lix.systems/) for the [implementation](https://git.lix.systems/lix-project/lix/commit/4ae6fb5a8f0d456b8d2ba2aaca3712b4e49057fc) that served as inspiration for this change and for triaging libcurl [issues with pausing](https://github.com/curl/curl/issues/19334). + +- Significantly improve tarball unpacking performance [#14689](https://github.com/NixOS/nix/pull/14689) [#14696](https://github.com/NixOS/nix/pull/14696) [#10683](https://github.com/NixOS/nix/issues/10683) [#11098](https://github.com/NixOS/nix/issues/11098) + + Nix uses a content-addressed cache backed by libgit2 for deduplicating files fetched via `fetchTarball` and `github`, `tarball` flake inputs. Its usage has been significantly optimised to reduce the amount of I/O operations that are performed. For a typical nixpkgs source tarball this results in 200 times fewer system calls on Linux. In combination with libcurl pausing this alleviates performance regressions stemming from the tarball cache. + +- Already valid derivations are no longer copied to the store [#14219](https://github.com/NixOS/nix/pull/14219) + + This results in a modest speedup when using the Nix daemon. + +- `nix nar ls` and `nix nar cat` are significantly faster and no longer buffer the whole NAR in memory [#14273](https://github.com/NixOS/nix/pull/14273) [#14732](https://github.com/NixOS/nix/pull/14732) + +## S3 improvements + +- Improved S3 binary cache support via HTTP [#11748](https://github.com/NixOS/nix/issues/11748) [#12403](https://github.com/NixOS/nix/issues/12403) [#12671](https://github.com/NixOS/nix/issues/12671) [#13084](https://github.com/NixOS/nix/issues/13084) [#13752](https://github.com/NixOS/nix/pull/13752) [#13823](https://github.com/NixOS/nix/pull/13823) [#14026](https://github.com/NixOS/nix/pull/14026) [#14120](https://github.com/NixOS/nix/pull/14120) [#14131](https://github.com/NixOS/nix/pull/14131) [#14135](https://github.com/NixOS/nix/pull/14135) [#14144](https://github.com/NixOS/nix/pull/14144) [#14170](https://github.com/NixOS/nix/pull/14170) [#14190](https://github.com/NixOS/nix/pull/14190) [#14198](https://github.com/NixOS/nix/pull/14198) [#14206](https://github.com/NixOS/nix/pull/14206) [#14209](https://github.com/NixOS/nix/pull/14209) [#14222](https://github.com/NixOS/nix/pull/14222) [#14223](https://github.com/NixOS/nix/pull/14223) [#14330](https://github.com/NixOS/nix/pull/14330) [#14333](https://github.com/NixOS/nix/pull/14333) [#14335](https://github.com/NixOS/nix/pull/14335) [#14336](https://github.com/NixOS/nix/pull/14336) [#14337](https://github.com/NixOS/nix/pull/14337) [#14350](https://github.com/NixOS/nix/pull/14350) [#14356](https://github.com/NixOS/nix/pull/14356) [#14357](https://github.com/NixOS/nix/pull/14357) [#14374](https://github.com/NixOS/nix/pull/14374) [#14375](https://github.com/NixOS/nix/pull/14375) [#14376](https://github.com/NixOS/nix/pull/14376) [#14377](https://github.com/NixOS/nix/pull/14377) [#14391](https://github.com/NixOS/nix/pull/14391) [#14393](https://github.com/NixOS/nix/pull/14393) [#14420](https://github.com/NixOS/nix/pull/14420) [#14421](https://github.com/NixOS/nix/pull/14421) + + S3 binary cache operations now happen via HTTP, leveraging `libcurl`'s native AWS SigV4 authentication instead of the AWS C++ SDK, providing significant improvements: + + - **Reduced memory usage**: Eliminates memory buffering issues that caused segfaults with large files + - **Fixed upload reliability**: Resolves AWS SDK chunking errors (`InvalidChunkSizeError`) + - **Lighter dependencies**: Uses lightweight `aws-crt-cpp` instead of full `aws-cpp-sdk`, reducing build complexity + + The new implementation requires curl >= 7.75.0 and `aws-crt-cpp` for credential management. + + All existing S3 URL formats and parameters remain supported, however the store settings for configuring multipart uploads have changed: + + - **`multipart-upload`** (default: `false`): Enable multipart uploads for large files. When enabled, files exceeding the multipart threshold will be uploaded in multiple parts. + + - **`multipart-threshold`** (default: `100 MiB`): Minimum file size for using multipart uploads. Files smaller than this will use regular PUT requests. Only takes effect when `multipart-upload` is enabled. + + - **`multipart-chunk-size`** (default: `5 MiB`): Size of each part in multipart uploads. Must be at least 5 MiB (AWS S3 requirement). Larger chunk sizes reduce the number of requests but use more memory. + + - **`buffer-size`**: Has been replaced by `multipart-chunk-size` and is now an alias to it. + + Note that this change also means Nix now supports S3 binary cache stores even if built without `aws-crt-cpp`, but only for public buckets which do not require authentication. + +- S3 URLs now support object versioning via `versionId` parameter [#13955](https://github.com/NixOS/nix/issues/13955) [#14274](https://github.com/NixOS/nix/pull/14274) + + S3 URLs now support a `versionId` query parameter to fetch specific versions + of objects from S3 buckets with versioning enabled. This allows pinning to + exact object versions for reproducibility and protection against unexpected + changes: + + ``` + s3://bucket/key?region=us-east-1&versionId=abc123def456 + ``` + +- S3 binary cache stores now support storage class configuration [#7015](https://github.com/NixOS/nix/issues/7015) [#14464](https://github.com/NixOS/nix/pull/14464) + + S3 binary cache stores now support configuring the storage class for uploaded objects via the `storage-class` parameter. This allows users to optimize costs by selecting appropriate storage tiers based on access patterns. + + Example usage: + + ```bash + # Use Glacier storage for long-term archival + nix copy --to 's3://my-bucket?storage-class=GLACIER' /nix/store/... + + # Use Intelligent Tiering for automatic cost optimization + nix copy --to 's3://my-bucket?storage-class=INTELLIGENT_TIERING' /nix/store/... + ``` + + The storage class applies to both regular uploads and multipart uploads. When not specified, objects use the bucket's default storage class. + + See the [S3 storage classes documentation](https://docs.aws.amazon.com/AmazonS3/latest/userguide/storage-class-intro.html) for available storage classes and their characteristics. + + +## Store path info JSON format changes + +The JSON format emitted by `nix path-info --json` has been updated to a new version with improved structure. + +To maintain compatibility, `nix path-info --json` now requires a `--json-format` flag to specify the output format version. +Using `--json` without `--json-format` is deprecated and will become an error in a future release. +For now, it defaults to version 1 with a warning, for a smoother migration. + +### Version 1 (`--json-format 1`) + +This is the legacy format, preserved for backwards compatibility: + +- String-based hash values (e.g., `"narHash": "sha256:FePFYIlM..."`) +- String-based content addresses (e.g., `"ca": "fixed:r:sha256:1abc..."`) +- Full store paths for map keys and references (e.g., `"/nix/store/abc...-foo"`) +- Now includes `"storeDir"` field at the top level + +### Version 2 (`--json-format 2`) + +The new structured format follows the [JSON guidelines](@docroot@/development/json-guideline.md) with the following changes: + +- **Nested structure with top-level metadata**: + + The output is now wrapped in an object with `version`, `storeDir`, and `info` fields: + + ```json + { + "version": 2, + "storeDir": "/nix/store", + "info": { ... } + } + ``` + + The map from store path base names to store object info is nested under the `info` field. + +- **Store path base names instead of full paths**: + + Map keys and references use store path base names (e.g., `"abc...-foo"`) instead of full absolute store paths. + Combined with `storeDir`, the full path can be reconstructed. + +- **Structured `ca` field**: + + Content address is now a structured JSON object instead of a string: + + - Old: `"ca": "fixed:r:sha256:1abc..."` + - New: `"ca": {"method": "nar", "hash": "sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="}` + - Still `null` values for input-addressed store objects + + The `hash` field uses the [SRI](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) format like other hashes. + +Additionally the following fields are added to both formats: + + - **`version` field**: + + All store path info JSON now includes `"version": <1|2>`. The `version` tracks breaking changes, and adding fields to outputted JSON is not a breaking change. + + - **`storeDir` field**: + + Top-level `"storeDir"` field contains the store directory path (e.g., `"/nix/store"`). + +## Derivation JSON format changes + +The derivation JSON format has been updated from version 3 to version 4: + +- **Nested structure with top-level metadata**: + + The output of `nix derivation show` is now wrapped in an object with `version` and `derivations` fields: + + ```json + { + "version": 4, + "derivations": { ... } + } + ``` + + The map from derivation paths to derivation info is nested under the `derivations` field. + + This matches the structure used for `nix path-info --json --json-format 2`, and likewise brings this command into compliance with the JSON guidelines. + +- **Restructured inputs**: + + Inputs are now nested under an `inputs` object: + + - Old: `"inputSrcs": [...], "inputDrvs": {...}` + - New: `"inputs": {"srcs": [...], "drvs": {...}}` + +- **Consistent content addresses**: + + Fixed content-addressed outputs now use structured JSON format. + This is the same format as `ca` in store path info (after the new version). + +Version 3 and earlier formats are *not* accepted when reading. + +**Affected command**: `nix derivation`, namely its `show` and `add` sub-commands. + +## Miscellaneous changes + +- Git fetcher: Restore progress indication [#14487](https://github.com/NixOS/nix/pull/14487) + + Nix used to feel "stuck" while it was cloning large repositories. Nix now shows Git's native progress indicator while fetching. + + Upstreamed from [Determinate Nix 3.13.0](https://github.com/DeterminateSystems/nix-src/pull/250). + +- Interrupting REPL commands works more than once [#13481](https://github.com/NixOS/nix/issues/13481) + + Previously, this only worked once per REPL session; further attempts would be ignored. + This issue is now fixed, so REPL commands such as `:b` or `:p` can be canceled consistently. + This is a cherry-pick of the change from the [Lix project](https://gerrit.lix.systems/c/lix/+/1097). + +- NAR unpacking code has been rewritten to make use of dirfd-based `openat` and `openat2` system calls when available [#14597](https://github.com/NixOS/nix/pull/14597) + +- Dynamic size unit rendering [#14423](https://github.com/NixOS/nix/pull/14423) [#14364](https://github.com/NixOS/nix/pull/14364) + + Various commands and the progress bar now use dynamically determined size units instead + of always using `MiB`. For example, the progress bar now reports download status like: + + ``` + [1/196/197 copied (773.7 MiB/2.1 GiB), 172.4/421.5 MiB DL] + ``` + + Instead of: + + ``` + [1/196/197 copied (773.7/2147.3 MiB), 172.4/421.5 MiB DL] + ``` + +## Contributors + +This release was made possible by the following 33 contributors: + +- Adam Dinwoodie [**(@me-and)**](https://github.com/me-and) +- jonhermansen [**(@jonhermansen)**](https://github.com/jonhermansen) +- Arnout Engelen [**(@raboof)**](https://github.com/raboof) +- Jean-François Roche [**(@jfroche)**](https://github.com/jfroche) +- tomberek [**(@tomberek)**](https://github.com/tomberek) +- Eelco Dolstra [**(@edolstra)**](https://github.com/edolstra) +- Marcel [**(@MarcelCoding)**](https://github.com/MarcelCoding) +- David McFarland [**(@corngood)**](https://github.com/corngood) +- Soumyadip Sarkar [**(@neuralsorcerer)**](https://github.com/neuralsorcerer) +- Cole Helbling [**(@cole-h)**](https://github.com/cole-h) +- John Ericson [**(@Ericson2314)**](https://github.com/Ericson2314) +- Tristan Ross [**(@RossComputerGuy)**](https://github.com/RossComputerGuy) +- Alex Auvolat [**(@Alexis211)**](https://github.com/Alexis211) +- edef [**(@edef1c)**](https://github.com/edef1c) +- Sergei Zimmerman [**(@xokdvium)**](https://github.com/xokdvium) +- Vinayak Goyal [**(@vinayakankugoyal)**](https://github.com/vinayakankugoyal) +- Graham Dennis [**(@GrahamDennis)**](https://github.com/GrahamDennis) +- Aspen Smith [**(@glittershark)**](https://github.com/glittershark) +- Jens Petersen [**(@juhp)**](https://github.com/juhp) +- Bernardo Meurer [**(@lovesegfault)**](https://github.com/lovesegfault) +- Peter Bynum [**(@pkpbynum)**](https://github.com/pkpbynum) +- Jörg Thalheim [**(@Mic92)**](https://github.com/Mic92) +- Alex Decious [**(@adeci)**](https://github.com/adeci) +- Matthieu Coudron [**(@teto)**](https://github.com/teto) +- Domen Kožar [**(@domenkozar)**](https://github.com/domenkozar) +- Taeer Bar-Yam [**(@Radvendii)**](https://github.com/Radvendii) +- Seth Flynn [**(@getchoo)**](https://github.com/getchoo) +- Robert Hensing [**(@roberth)**](https://github.com/roberth) +- Vladimir Panteleev [**(@CyberShadow)**](https://github.com/CyberShadow) +- bryango [**(@bryango)**](https://github.com/bryango) +- Henry [**(@cootshk)**](https://github.com/cootshk) +- Martin Joerg [**(@mjoerg)**](https://github.com/mjoerg) +- Farid Zakaria [**(@fzakaria)**](https://github.com/fzakaria) diff --git a/doc/manual/source/store/build-trace.md b/doc/manual/source/store/build-trace.md new file mode 100644 index 00000000000..a879d37d208 --- /dev/null +++ b/doc/manual/source/store/build-trace.md @@ -0,0 +1,53 @@ +# Build Trace + +> **Warning** +> +> This entire concept is currently +> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-ca-derivations) +> and subject to change. + +The *build trace* is a [memoization table](https://en.wikipedia.org/wiki/Memoization) for builds. +It maps the inputs of builds to the outputs of builds. +Concretely, that means it maps [derivations][derivation] to maps of [output] names to [store objects][store object]. + +In general the derivations used as a key should be [*resolved*](./resolution.md). +A build trace with all-resolved-derivation keys is also called a *base build trace* for extra clarity. +If all the resolved inputs of a derivation are content-addressed, that means the inputs will be fully determined, leaving no ambiguity for what build was performed. +(Input-addressed inputs however are still ambiguous. They too should be locked down, but this is left as future work.) + +Accordingly, to look up an unresolved derivation, one must first resolve it to get a resolved derivation. +Resolving itself involves looking up entries in the build trace, so this is a mutually recursive process that will end up inspecting possibly many entries. + +Except for the issue with input-addressed paths called out above, base build traces are trivially *coherent* -- incoherence is not possible. +That means that the claims that each key-value base build try entry makes are independent, and no mapping invalidates another mapping. + +Whether the mappings are *true*, i.e. the faithful recording of actual builds performed, is another matter. +Coherence is about the multiple claims of the build trace being mutually consistent, not about whether the claims are individually true or false. + +In general, there is no way to audit a build trace entry except for by performing the build again from scratch. +And even in that case, a different result doesn't mean the original entry was a "lie", because the derivation being built may be non-deterministic. +As such, the decision of whether to trust a counterparty's build trace is a fundamentally subject policy choice. +Build trace entries are typically *signed* in order to enable arbitrary public-key-based trust polices. + +## Derived build traces {#derived} + +Implementations that wish to memoize the above may also keep additional *derived* build trace entries that do map unresolved derivations. +But if they do so, they *must* also keep the underlying base entries with resolved derivation keys around. +Firstly, this ensures that the derived entries are merely cache, which could be recomputed from scratch. +Secondly, this ensures the coherence of the derived build trace. + +Unlike with base build traces, incoherence with derived build traces is possible. +The key ingredient is that derivation resolution is only deterministic with respect to a fixed base build trace. +Without fixing the base build trace, it inherits the subjectivity of base build traces themselves. + +Concretely, suppose there are three derivations \\(a\\), \\(b\\), and \\(c\\). +Let \\(a\\) be a resolved derivation, but let \\(b\\) and \\(c\\) be unresolved and both take as an input an output of \\(a\\). +Now suppose that derived entries are made for \\(b\\) and \\(c\\) based on two different entries of \\(a\\). +(This could happen if \\(a\\) is non-deterministic, \\(a\\) and \\(b\\) are built in one store, \\(a\\) and \\(c\\) are built in another store, and then a third store substitutes from both of the first two stores.) + +If trusting the derived build trace entries for \\(b\\) and \\(c\\) requires that each's underlying entry for \\(a\\) be also trusted, the two different mappings for \\(a\\) will be caught. +However, if \\(b\\) and \\(c\\)'s entries can be combined in isolation, there will be nothing to catch the contradiction in their hidden assumptions about \\(a\\)'s output. + +[derivation]: ./derivation/index.md +[output]: ./derivation/outputs/index.md +[store object]: @docroot@/store/store-object.md diff --git a/doc/manual/source/store/building.md b/doc/manual/source/store/building.md index dbfe6b5ca10..32e80012934 100644 --- a/doc/manual/source/store/building.md +++ b/doc/manual/source/store/building.md @@ -8,14 +8,15 @@ - Once this is done, the derivation is *normalized*, replacing each input deriving path with its store path, which we now know from realising the input. -## Builder Execution +## Builder Execution {#builder-execution} The [`builder`](./derivation/index.md#builder) is executed as follows: -- A temporary directory is created under the directory specified by - `TMPDIR` (default `/tmp`) where the build will take place. The +- A temporary directory is created where the build will take place. The current directory is changed to this directory. + See the per-store [`build-dir`](@docroot@/store/types/local-store.md#store-local-store-build-dir) setting for more information. + - The environment is cleared and set to the derivation attributes, as specified above. diff --git a/doc/manual/source/store/derivation/index.md b/doc/manual/source/store/derivation/index.md index 0e12b4d5e99..3a14d06bcaf 100644 --- a/doc/manual/source/store/derivation/index.md +++ b/doc/manual/source/store/derivation/index.md @@ -102,11 +102,11 @@ But rather than somehow scanning all the other fields for inputs, Nix requires t ### System {#system} -The system type on which the [`builder`](#attr-builder) executable is meant to be run. +The system type on which the [`builder`](#builder) executable is meant to be run. A necessary condition for Nix to schedule a given derivation on some [Nix instance] is for the "system" of that derivation to match that instance's [`system` configuration option] or [`extra-platforms` configuration option]. -By putting the `system` in each derivation, Nix allows *heterogenous* build plans, where not all steps can be run on the same machine or same sort of machine. +By putting the `system` in each derivation, Nix allows *heterogeneous* build plans, where not all steps can be run on the same machine or same sort of machine. Nix can schedule builds such that it automatically builds on other platforms by [forwarding build requests](@docroot@/advanced-topics/distributed-builds.md) to other Nix instances. [`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system @@ -192,7 +192,7 @@ There are two formats, documented separately: - The legacy ["ATerm" format](@docroot@/protocols/derivation-aterm.md) -- The experimental, currently under development and changing [JSON format](@docroot@/protocols/json/derivation.md) +- The experimental, currently under development and changing [JSON format](@docroot@/protocols/json/derivation/index.md) Every derivation has a canonical choice of encoding used to serialize it to a store object. This ensures that there is a canonical [store path] used to refer to the derivation, as described in [Referencing derivations](#derivation-path). @@ -245,7 +245,7 @@ If those other derivations *also* abide by this common case (and likewise for tr > note the ".drv" > ``` -## Extending the model to be higher-order +## Extending the model to be higher-order {#dynamic} **Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations) diff --git a/doc/manual/source/store/derivation/outputs/content-address.md b/doc/manual/source/store/derivation/outputs/content-address.md index 4d51303480d..aa65fbe4932 100644 --- a/doc/manual/source/store/derivation/outputs/content-address.md +++ b/doc/manual/source/store/derivation/outputs/content-address.md @@ -167,10 +167,10 @@ It is only in the potential for that check to fail that they are different. > > In a future world where floating content-addressing is also stable, we in principle no longer need separate [fixed](#fixed) content-addressing. > Instead, we could always use floating content-addressing, and separately assert the precise value content address of a given store object to be used as an input (of another derivation). -> A stand-alone assertion object of this sort is not yet implemented, but its possible creation is tracked in [Issue #11955](https://github.com/NixOS/nix/issues/11955). +> A stand-alone assertion object of this sort is not yet implemented, but its possible creation is tracked in [issue #11955](https://github.com/NixOS/nix/issues/11955). > > In the current version of Nix, fixed outputs which fail their hash check are still registered as valid store objects, just not registered as outputs of the derivation which produced them. -> This is an optimization that means if the wrong output hash is specified in a derivation, and then the derivation is recreated with the right output hash, derivation does not need to be rebuilt --- avoiding downloading potentially large amounts of data twice. +> This is an optimization that means if the wrong output hash is specified in a derivation, and then the derivation is recreated with the right output hash, derivation does not need to be rebuilt — avoiding downloading potentially large amounts of data twice. > This optimisation prefigures the design above: > If the output hash assertion was removed outside the derivation itself, Nix could additionally not only register that outputted store object like today, but could also make note that derivation did in fact successfully download some data. For example, for the "fetch URL" example above, making such a note is tantamount to recording what data is available at the time of download at the given URL. diff --git a/doc/manual/source/store/derivation/outputs/index.md b/doc/manual/source/store/derivation/outputs/index.md index 0683f5703bf..ca2ce6665b0 100644 --- a/doc/manual/source/store/derivation/outputs/index.md +++ b/doc/manual/source/store/derivation/outputs/index.md @@ -43,7 +43,7 @@ In particular, the specification decides: - if the content is content-addressed, how is it content addressed -- if the content is content-addressed, [what is its content address](./content-address.md#fixed-content-addressing) (and thus what is its [store path]) +- if the content is content-addressed, [what is its content address](./content-address.md#fixed) (and thus what is its [store path]) ## Types of derivations diff --git a/doc/manual/source/store/derivation/outputs/input-address.md b/doc/manual/source/store/derivation/outputs/input-address.md index e2e15a801b6..3fd20f17d72 100644 --- a/doc/manual/source/store/derivation/outputs/input-address.md +++ b/doc/manual/source/store/derivation/outputs/input-address.md @@ -6,26 +6,221 @@ That is to say, an input-addressed output's store path is a function not of the output itself, but of the derivation that produced it. Even if two store paths have the same contents, if they are produced in different ways, and one is input-addressed, then they will have different store paths, and thus guaranteed to not be the same store object. - +type FirstOrderDerivingPath = ConstantPath | FirstOrderOutputPath; +type Inputs = Set; +``` + +For the algorithm below, we adopt a derivation where the two types of (first order) derived paths are partitioned into two sets, as follows: +```typescript +type Derivation = { + // inputs: Set; // replaced + inputSrcs: Set; // new instead + inputDrvOutputs: Set; // new instead + // ...other fields... +}; +``` + +In the [currently-experimental][xp-feature-dynamic-derivations] higher-order case where outputs of outputs are allowed as [deriving paths][deriving-path] and thus derivation inputs, derivations using that generalization are not valid arguments to this function. +Those derivations must be (partially) [resolved](@docroot@/store/resolution.md) enough first, to the point where no such higher-order inputs remain. +Then, and only then, can input addresses be assigned. + +``` +function hashQuotientDerivation(drv) -> Hash: + assert(drv.outputs are input-addressed) + drv′ ← drv with { + inputDrvOutputs = ⋃( + assert(drvPath is store path) + case hashOutputsOrQuotientDerivation(readDrv(drvPath)) of + drvHash : Hash → + (drvHash.toBase16(), output) + outputHashes : Map[String, Hash] → + (outputHashes[output].toBase16(), "out") + | (drvPath, output) ∈ drv.inputDrvOutputs + ) + } + return hashSHA256(printDrv(drv′)) + +function hashOutputsOrQuotientDerivation(drv) -> Map[String, Hash] | Hash: + if drv.outputs are content-addressed: + return { + outputName ↦ hashSHA256( + "fixed:out:" + ca.printMethodAlgo() + + ":" + ca.hash.toBase16() + + ":" + ca.makeFixedOutputPath(drv.name, outputName)) + | (outputName ↦ output) ∈ drv.outputs + , ca = output.contentAddress // or get from build trace if floating + } + else: // drv.outputs are input-addressed + return hashQuotientDerivation(drv) +``` + +### `hashQuotientDerivation` + +We replace each element in the derivation's `inputDrvOutputs` using data from a call to `hashOutputsOrQuotientDerivation` on the `drvPath` of that element. +When `hashOutputsOrQuotientDerivation` returns a single drv hash (because the input derivation in question is input-addressing), we simply swap out the `drvPath` for that hash, and keep the same output name. +When `hashOutputsOrQuotientDerivation` returns a map of content addresses per-output, we look up the output in question, and pair it with the output name `out`. + +The resulting pseudo-derivation (with hashes instead of store paths in `inputDrvs`) is then printed (in the ["ATerm" format](@docroot@/protocols/derivation-aterm.md)) and hashed, and this becomes the hash of the "quotient derivation". + +When calculating output hashes, `hashQuotientDerivation` is called on an almost-complete input-addressing derivation, which is just missing its input-addressed outputs paths. +The derivation hash is then used to calculate output paths for each output. + +Those output paths can then be substituted into the almost-complete input-addressed derivation to complete it. + +> **Note** +> +> There may be an unintentional deviation from specification currently implemented in the `(outputHashes[output].toBase16(), "out")` case. +> This is not fatal because the deviation would only apply for content-addressing derivations with more than one output, and that only occurs in the floating case, which is [experimental][xp-feature-ca-derivations]. +> Once this bug is fixed, this note will be removed. + +### `hashOutputsOrQuotientDerivation` + +How does `hashOutputsOrQuotientDerivation` in turn work? +It consists of two main cases, based on whether the outputs of the derivation are to be input-addressed or content-addressed. + +#### Input-addressed outputs case + +In the input-addressed case, it just calls `hashQuotientDerivation`, and returns that derivation hash. +This makes `hashQuotientDerivation` and `hashOutputsOrQuotientDerivation` mutually-recursive. + +> **Note** +> +> In this case, `hashQuotientDerivation` is being called on a *complete* input-addressing derivation that already has its output paths calculated. +> The `inputDrvs` substitution takes place anyways. + +#### Content-addressed outputs case + +If the outputs are [content-addressed](./content-address.md), then it computes a hash for each output derived from the content-address of that output. + +> **Note** +> +> In the [fixed](./content-address.md#fixed) content-addressing case, the outputs' content addresses are statically specified in advance, so this always just works. +> (The fixed case is what the pseudo-code shows.) +> +> In the [floating](./content-address.md#floating) case, the content addresses are not specified in advance. +> This is what the "or get from [build trace](@docroot@/store/build-trace.md) if floating" comment refers to. +> In this case, the algorithm is *stuck* until the input in question is built, and we know what the actual contents of the output in question is. +> +> That is OK however, because there is no problem with delaying the assigning of input addresses (which, remember, is what `hashQuotientDerivation` is ultimately for) until all inputs are known. + +### Performance + +The recursion in the algorithm is potentially inefficient: +it could call itself once for each path by which a subderivation can be reached, i.e., `O(V^k)` times for a derivation graph with `V` derivations and with out-degree of at most `k`. +In the actual implementation, [memoisation](https://en.wikipedia.org/wiki/Memoization) is used to reduce this cost to be proportional to the total number of `inputDrvOutputs` encountered. + +### Semantic properties + +*See [this chapter's appendix](@docroot@/store/math-notation.md) on grammar and metavariable conventions.* + +In essence, `hashQuotientDerivation` partitions input-addressing derivations into equivalence classes: every derivation in that equivalence class is mapped to the same derivation hash. +We can characterize this equivalence relation directly, by working bottom up. + +We start by defining an equivalence relation on first-order output deriving paths that refer content-addressed derivation outputs. Two such paths are equivalent if they refer to the same store object: + +\\[ +\\begin{prooftree} +\\AxiomC{$d\_1$ is content-addressing} +\\AxiomC{$d\_2$ is content-addressing} +\\AxiomC{$ + {}^\*(\text{path}(d\_1), o\_1) + \= + {}^\*(\text{path}(d\_2), o\_2) +$} +\\TrinaryInfC{$(\text{path}(d\_1), o\_1) \\,\\sim_{\\mathrm{CA}}\\, (d\_2, o\_2)$} +\\end{prooftree} +\\] + +where \\({}^*(s, o)\\) denotes the store object that the output deriving path refers to. + +We will also need the following construction to lift any equivalence relation on \\(X\\) to an equivalence relation on (finite) sets of \\(X\\) (in short, \\(\\mathcal{P}(X)\\)): + +\\[ +\\begin{prooftree} +\\AxiomC{$\\forall a \\in A. \\exists b \\in B. a \\,\\sim\_X\\, b$} +\\AxiomC{$\\forall b \\in B. \\exists a \\in A. b \\,\\sim\_X\\, a$} +\\BinaryInfC{$A \\,\\sim_{\\mathcal{P}(X)}\\, B$} +\\end{prooftree} +\\] + +Now we can define the equivalence relation \\(\\sim_\\mathrm{IA}\\) on input-addressed derivation outputs. Two input-addressed outputs are equivalent if their derivations are equivalent (via the yet-to-be-defined \\(\\sim_{\\mathrm{IADrv}}\\) relation) and their output names are the same: + +\\[ +\\begin{prooftree} +\\AxiomC{$d\_1$ is input-addressing} +\\AxiomC{$d\_2$ is input-addressing} +\\AxiomC{$d\_1 \\,\\sim_{\\mathrm{IADrv}}\\, d\_2$} +\\AxiomC{$o\_1 = o\_2$} +\\QuaternaryInfC{$(\text{path}(d\_1), o\_1) \\,\\sim_{\\mathrm{IA}}\\, (\text{path}(d\_2), o\_2)$} +\\end{prooftree} +\\] + +And now we can define \\(\\sim_{\\mathrm{IADrv}}\\). +Two input-addressed derivations are equivalent if their content-addressed inputs are equivalent, their input-addressed inputs are also equivalent, and they are otherwise equal: + + + +\\[ +\\begin{prooftree} +\\alwaysNoLine +\\AxiomC{$ + \\mathrm{caInputs}(d\_1) + \\,\\sim_{\\mathcal{P}(\\mathrm{CA})}\\, + \\mathrm{caInputs}(d\_2) +$} +\\AxiomC{$ + \\mathrm{iaInputs}(d\_1) + \\,\\sim_{\\mathcal{P}(\\mathrm{IA})}\\, + \\mathrm{iaInputs}(d\_2) +$} +\\BinaryInfC{$ + d\_1\left[\\mathrm{inputDrvOutputs} := \\{\\}\right] + \= + d\_2\left[\\mathrm{inputDrvOutputs} := \\{\\}\right] +$} +\\alwaysSingleLine +\\UnaryInfC{$d\_1 \\,\\sim_{\\mathrm{IADrv}}\\, d\_2$} +\\end{prooftree} +\\] + +where \\(\\mathrm{caInputs}(d)\\) returns the content-addressed inputs of \\(d\\) and \\(\\mathrm{iaInputs}(d)\\) returns the input-addressed inputs. + +> **Note** +> +> An astute reader might notice that that nowhere does `inputSrcs` enter into these definitions. +> That means that replacing an input derivation with its outputs directly added to `inputSrcs` always results in a derivation in a different equivalence class, despite the resulting input closure (as would be mounted in the store at build time) being the same. +> [Issue #9259](https://github.com/NixOS/nix/issues/9259) is about creating a coarser equivalence relation to address this. +> +> \\(\\sim_\mathrm{Drv}\\) from [derivation resolution](@docroot@/store/resolution.md) is such an equivalence relation. +> It is coarser than this one: any two derivations which are "'hash quotient derivation'-equivalent" (\\(\\sim_\mathrm{IADrv}\\)) are also "resolution-equivalent" (\\(\\sim_\mathrm{Drv}\\)). +> It also relates derivations whose `inputDrvOutputs` have been rewritten into `inputSrcs`. + +[deriving-path]: @docroot@/store/derivation/index.md#deriving-path +[xp-feature-dynamic-derivations]: @docroot@/development/experimental-features.md#xp-feature-dynamic-derivations [xp-feature-ca-derivations]: @docroot@/development/experimental-features.md#xp-feature-ca-derivations -[xp-feature-git-hashing]: @docroot@/development/experimental-features.md#xp-feature-git-hashing -[xp-feature-impure-derivations]: @docroot@/development/experimental-features.md#xp-feature-impure-derivations diff --git a/doc/manual/source/store/file-system-object.md b/doc/manual/source/store/file-system-object.md index 42f04726089..60cb3e57206 100644 --- a/doc/manual/source/store/file-system-object.md +++ b/doc/manual/source/store/file-system-object.md @@ -3,19 +3,23 @@ Nix uses a simplified model of the file system, which consists of file system objects. Every file system object is one of the following: - - File + - [**Regular File**]{#regular} - A possibly empty sequence of bytes for contents - A single boolean representing the [executable](https://en.m.wikipedia.org/wiki/File-system_permissions#Permissions) permission - - Directory + - [**Directory**]{#directory} Mapping of names to child file system objects - - [Symbolic link](https://en.m.wikipedia.org/wiki/Symbolic_link) + - [**Symbolic link**]{#symlink} - An arbitrary string. - Nix does not assign any semantics to symbolic links. + An arbitrary string, known as the *target* of the symlink. + + In general, Nix does not assign any semantics to symbolic links. + Certain operations however, may make additional assumptions and attempt to use the target to find another file system object. + + > See [the Wikpedia article on symbolic links](https://en.m.wikipedia.org/wiki/Symbolic_link) for background information if you are unfamiliar with this Unix concept. File system objects and their children form a tree. A bare file or symlink can be a root file system object. diff --git a/doc/manual/source/store/file-system-object/content-address.md b/doc/manual/source/store/file-system-object/content-address.md index 04a1021f144..5685de03e2b 100644 --- a/doc/manual/source/store/file-system-object/content-address.md +++ b/doc/manual/source/store/file-system-object/content-address.md @@ -46,7 +46,7 @@ be many different serialisations. For these reasons, Nix has its very own archive format—the Nix Archive (NAR) format, which is carefully designed to avoid the problems described above. -The exact specification of the Nix Archive format is in [specified here](../../protocols/nix-archive.md). +The exact specification of the Nix Archive format is in [specified here](../../protocols/nix-archive/index.md). ## Content addressing File System Objects beyond a single serialisation pass diff --git a/doc/manual/source/store/math-notation.md b/doc/manual/source/store/math-notation.md new file mode 100644 index 00000000000..723982e73fa --- /dev/null +++ b/doc/manual/source/store/math-notation.md @@ -0,0 +1,16 @@ +# Appendix: Math notation + +A few times in this manual, formal "proof trees" are used for [natural deduction](https://en.wikipedia.org/wiki/Natural_deduction)-style definition of various [relations](https://en.wikipedia.org/wiki/Relation_(mathematics)). + +The following grammar and assignment of metavariables to syntactic categories is used in these sections. + +\\begin{align} +s, t &\in \text{store-path} \\\\ +o &\in \text{output-name} \\\\ +i, p &\in \text{deriving-path} \\\\ +d &\in \text{derivation} +\\end{align} + +\\begin{align} +\text{deriving-path} \quad p &::= s \mid (p, o) +\\end{align} diff --git a/doc/manual/source/store/resolution.md b/doc/manual/source/store/resolution.md new file mode 100644 index 00000000000..9a87fea99cc --- /dev/null +++ b/doc/manual/source/store/resolution.md @@ -0,0 +1,219 @@ +# Derivation Resolution + +*See [this chapter's appendix](@docroot@/store/math-notation.md) on grammar and metavariable conventions.* + +To *resolve* a derivation is to replace its [inputs] with the simplest inputs — plain store paths — that denote the same store objects. + +Derivations that only have store paths as inputs are likewise called *resolved derivations*. +(They are called that whether they are in fact the output of derivation resolution, or just made that way without non-store-path inputs to begin with.) + +## Input Content Equivalence of Derivations + +[Deriving paths][deriving-path] intentionally make it possible to refer to the same [store object] in multiple ways. +This is a consequence of content-addressing, since different derivations can produce the same outputs, and the same data can also be manually added to the store. +This is also a consequence even of input-addressing, as an output can be referred to by derivation and output name, or directly by its [computed](./derivation/outputs/input-address.md) store path. +Since dereferencing deriving paths is thus not injective, it induces an equivalence relation on deriving paths. + +Let's call this equivalence relation \\(\\sim\\), where \\(p_1 \\sim p_2\\) means that deriving paths \\(p_1\\) and \\(p_2\\) refer to the same store object. + +**Content Equivalence**: Two deriving paths are equivalent if they refer to the same store object: + +\\[ +\\begin{prooftree} +\\AxiomC{${}^*p_1 = {}^*p_2$} +\\UnaryInfC{$p_1 \\,\\sim_\\mathrm{DP}\\, p_2$} +\\end{prooftree} +\\] + +where \\({}^\*p\\) denotes the store object that deriving path \\(p\\) refers to. + +This also induces an equivalence relation on sets of deriving paths: + +\\[ +\\begin{prooftree} +\\AxiomC{$\\{ {}^*p | p \\in P_1 \\} = \\{ {}^*p | p \\in P_2 \\}$} +\\UnaryInfC{$P_1 \\,\\sim_{\\mathcal{P}(\\mathrm{DP})}\\, P_2$} +\\end{prooftree} +\\] + +**Input Content Equivalence**: This, in turn, induces an equivalence relation on derivations: two derivations are equivalent if their inputs are equivalent, and they are otherwise equal: + +\\[ +\\begin{prooftree} +\\AxiomC{$\\mathrm{inputs}(d_1) \\,\\sim_{\\mathcal{P}(\\mathrm{DP})}\\, \\mathrm{inputs}(d_2)$} +\\AxiomC{$ + d\_1\left[\\mathrm{inputs} := \\{\\}\right] + \= + d\_2\left[\\mathrm{inputs} := \\{\\}\right] +$} +\\BinaryInfC{$d_1 \\,\\sim_\\mathrm{Drv}\\, d_2$} +\\end{prooftree} +\\] + +Derivation resolution always maps derivations to input-content-equivalent derivations. + +## Resolution relation + +Dereferencing a derived path — \\({}^\*p\\) above — was just introduced as a black box. +But actually it is a multi-step process of looking up build results in the [build trace] that itself depends on resolving the lookup keys. +Resolution is thus a recursive multi-step process that is worth diagramming formally. + +We can do this with a small-step binary transition relation; let's call it \\(\rightsquigarrow\\). +We can then conclude dereferenced equality like this: + +\\[ +\\begin{prooftree} +\\AxiomC{$p\_1 \\rightsquigarrow^* p$} +\\AxiomC{$p\_2 \\rightsquigarrow^* p$} +\\BinaryInfC{${}^*p\_1 = {}^*p\_2$} +\\end{prooftree} +\\] + +I.e. by showing that both original items resolve (over 0 or more small steps, hence the \\({}^*\\)) to the same exact item. + +With this motivation, let's now formalize a [small-step](https://en.wikipedia.org/wiki/Operational_semantics#Small-step_semantics) system of reduction rules for resolution. + +### Formal rules + +### \\(\text{resolved}\\) unary relation + +\\[ +\\begin{prooftree} +\\AxiomC{$s \in \text{store-path}$} +\\UnaryInfC{$s$ resolved} +\\end{prooftree} +\\] + +\\[ +\\begin{prooftree} +\\AxiomC{$\forall i \in \mathrm{inputs}(d). i \text{ resolved}$} +\\UnaryInfC{$d$ resolved} +\\end{prooftree} +\\] + +### \\(\rightsquigarrow\\) binary relation + +> **Remark** +> +> Actually, to be completely formal we would need to keep track of the build trace we are choosing to resolve against. +> +> We could do that by making \\(\rightsquigarrow\\) a ternary relation, which would pass the build trace to itself until it finally uses it in that one rule. +> This would add clutter more than insight, so we didn't bother to write it. +> +> There are other options too, like saying the whole reduction rule system is parameterized on the build trace, essentially [currying](https://en.wikipedia.org/wiki/Currying) the ternary \\(\rightsquigarrow\\) into a function from build traces to the binary relation written above. + +#### Core build trace lookup rule + +\\[ +\\begin{prooftree} +\\AxiomC{$s \in \text{store-path}$} +\\AxiomC{${}^*s \in \text{derivation}$} +\\AxiomC{${}^*s$ resolved} +\\AxiomC{$\mathrm{build\text{-}trace}[s][o] = t$} +\\QuaternaryInfC{$(s, o) \rightsquigarrow t$} +\\RightLabel{\\scriptsize output path resolution} +\\end{prooftree} +\\] + +#### Inductive rules + +\\[ +\\begin{prooftree} +\\AxiomC{$i \\rightsquigarrow i'$} +\\AxiomC{$i \\in \\mathrm{inputs}(d)$} +\\BinaryInfC{$d \\rightsquigarrow d[i \\mapsto i']$} +\\end{prooftree} +\\] + +\\[ +\\begin{prooftree} +\\AxiomC{$d \\rightsquigarrow d'$} +\\UnaryInfC{$(\\mathrm{path}(d), o) \\rightsquigarrow (\\mathrm{path}(d'), o)$} +\\end{prooftree} +\\] + +\\[ +\\begin{prooftree} +\\AxiomC{$p \\rightsquigarrow p'$} +\\UnaryInfC{$(p, o) \\rightsquigarrow (p', o)$} +\\end{prooftree} +\\] + +### Properties + +Like all well-behaved evaluation relations, partial resolution is [*confluent*](https://en.wikipedia.org/wiki/Confluence_(abstract_rewriting)). +Also, if we take the symmetric closure of \\(\\rightsquigarrow^\*\\), we end up with the equivalence relations of the previous section. +Resolution respects content equivalence for deriving paths, and input content equivalence for derivations. + +> **Remark** +> +> We chose to define from scratch an "resolved" unary relation explicitly above. +> But it can also be defined as the normal forms of the \\(\\rightsquigarrow^\*\\) relation: +> +> \\[ a \text{ resolved} \Leftrightarrow \forall b. b \rightsquigarrow^* a \Rightarrow b = a\\] +> +> In prose, resolved terms are terms which \\(\\rightsquigarrow^\*\\) only relates on the left side to the same term on the right side; they are the terms which can be resolved no further. + +## Partial versus Complete Resolution + +Similar to evaluation, we can also speak of *partial* versus *complete* derivation resolution. +Partial derivation resolution is what we've actually formalized above with \\(\\rightsquigarrow^\*\\). +Complete resolution is resolution ending in a resolved term (deriving path or derivation). +(Which is a normal form of the relation, per the remark above.) + +With partial resolution, a derivation is related to equivalent derivations with the same or simpler inputs, but not all those inputs will be plain store paths. +This is useful when the input refers to a floating content addressed output we have not yet built — we don't know what (content-address) store path will used for that derivation, so we are "stuck" trying to resolve the deriving path in question. +(In the above formalization, this happens when the build trace is missing the keys we wish to look up in it.) + +Complete resolution is a *functional* relation, i.e. values on the left are uniquely related with values on the right. +It is not however, a *total* relation (in general, assuming arbitrary build traces). +This is discussed in the next section. + +## Termination + +For static derivations graphs, complete resolution is indeed total, because it always terminates for all inputs. +(A relation that is both total and functional is a function.) + +For [dynamic][xp-feature-dynamic-derivations] derivation graphs, however, this is not the case — resolution is not guaranteed to terminate. +The issue isn't rewriting deriving paths themselves: +a single rewrite to normalize an output deriving path to a constant one always exists, and always proceeds in one step. +The issue is that dynamic derivations (i.e. those that are filled-in the graph by a previous resolution) may have more transitive dependencies than the original derivation. + +> **Example** +> +> Suppose we have this deriving path +> ```json +> { +> "drvPath": { +> "drvPath": "...-foo.drv", +> "output": "bar.drv" +> }, +> "output": "baz" +> } +> ``` +> and derivation `foo` is already resolved. +> When we resolve deriving path we'll end up with something like. +> ```json +> { +> "drvPath": "...-foo-bar.drv", +> "output": "baz" +> } +> ``` +> So far is just an atomic single rewrite, with no termination issues. +> But the derivation `foo-bar` may have its *own* dynamic derivation inputs. +> Resolution must resolve that derivation first before the above deriving path can finally be normalized to a plain `...-foo-bar-baz` store path. + +The important thing to notice is that while "build trace" *keys* must be resolved. +The *value* those keys are mapped to have no such constraints. +An arbitrary store object has no notion of being resolved or not. +But, an arbitrary store object can be read back as a derivation (as will in fact be done in case for dynamic derivations / nested output deriving paths). +And those derivations need *not* be resolved. + +It is those dynamic non-resolved derivations which are the source of non-termination. +By the same token, they are also the reason why dynamic derivations offer greater expressive power. + +[store object]: @docroot@/store/store-object.md +[inputs]: @docroot@/store/derivation/index.md#inputs +[build trace]: @docroot@/store/build-trace.md +[deriving-path]: @docroot@/store/derivation/index.md#deriving-path +[xp-feature-dynamic-derivations]: @docroot@/development/experimental-features.md#xp-feature-dynamic-derivations diff --git a/doc/manual/source/store/secrets.md b/doc/manual/source/store/secrets.md new file mode 100644 index 00000000000..dc15989a7fa --- /dev/null +++ b/doc/manual/source/store/secrets.md @@ -0,0 +1,20 @@ +# Secrets + +The store is readable to all users on the system. For this reason, it +is generally discouraged to allow secrets to make it into the store. + +Even on a single-user system, separate system users isolate services +from each other and having secrets that all local users can read +weakens that isolation. When using external store caches the secrets +may end up there, and on multi-user systems the secrets will be +available to all those users. + +Organize your derivations so that secrets are read from the filesystem +(with appropriate access controls) at run time. Place the secrets on +the filesystem manually or use a scheme that includes the secret in +the store in encrypted form, and decrypts it adding the relevant +access control on system activation. +Several such schemes for NixOS can in the +[comparison of secret managing schemes] on the wiki. + +[comparison of secret managing schemes]: https://wiki.nixos.org/wiki/Comparison_of_secret_managing_schemes diff --git a/doc/manual/source/store/store-object/content-address.md b/doc/manual/source/store/store-object/content-address.md index 36e841fa356..7834ac51027 100644 --- a/doc/manual/source/store/store-object/content-address.md +++ b/doc/manual/source/store/store-object/content-address.md @@ -1,7 +1,7 @@ # Content-Addressing Store Objects Just [like][fso-ca] [File System Objects][File System Object], -[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-addressed), +[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-address), unless they are [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object). For store objects, the content address we produce will take the form of a [Store Path] rather than regular hash. @@ -107,7 +107,7 @@ References (to other store objects and self-references alike) are supported so l > > This method is part of the [`git-hashing`][xp-feature-git-hashing] experimental feature. -This uses the corresponding [Git](../file-system-object/content-address.md#serial-git) method of file system object content addressing. +This uses the corresponding [Git](../file-system-object/content-address.md#git) method of file system object content addressing. References are not supported. diff --git a/doc/manual/source/store/store-path.md b/doc/manual/source/store/store-path.md index beec2389b1c..4061f3653f6 100644 --- a/doc/manual/source/store/store-path.md +++ b/doc/manual/source/store/store-path.md @@ -6,7 +6,7 @@ > > A rendered store path -Nix implements references to [store objects](./index.md#store-object) as *store paths*. +Nix implements references to [store objects](./store-object.md) as *store paths*. Think of a store path as an [opaque], [unique identifier]: The only way to obtain store path is by adding or building store objects. diff --git a/doc/manual/substitute.py b/doc/manual/substitute.py index 6e27c338818..106dcf17df0 100644 --- a/doc/manual/substitute.py +++ b/doc/manual/substitute.py @@ -41,6 +41,10 @@ def recursive_replace(data: dict[str, t.Any], book_root: Path, search_path: Path return data | dict( sections = [recursive_replace(section, book_root, search_path) for section in sections], ) + case {'items': items}: + return data | dict( + items = [recursive_replace(item, book_root, search_path) for item in items], + ) case {'Chapter': chapter}: path_to_chapter = Path(chapter['path']) chapter_content = chapter['content'] diff --git a/doc/manual/theme/head.hbs b/doc/manual/theme/head.hbs new file mode 100644 index 00000000000..e514a99777f --- /dev/null +++ b/doc/manual/theme/head.hbs @@ -0,0 +1,15 @@ + + + diff --git a/docker.nix b/docker.nix index 754befee8f6..72c13663488 100644 --- a/docker.nix +++ b/docker.nix @@ -11,7 +11,7 @@ fromImage ? null, bundleNixpkgs ? true, channelName ? "nixpkgs", - channelURL ? "https://nixos.org/channels/nixpkgs-unstable", + channelURL ? "https://channels.nixos.org/nixpkgs-unstable", extraPkgs ? [ ], maxLayers ? 70, nixConf ? { }, diff --git a/flake.nix b/flake.nix index 7e55c1b8c47..0d04d3b1825 100644 --- a/flake.nix +++ b/flake.nix @@ -361,6 +361,7 @@ # TODO probably should be `nix-cli` nix = self.packages.${system}.nix-everything; nix-manual = nixpkgsFor.${system}.native.nixComponents2.nix-manual; + nix-manual-manpages-only = nixpkgsFor.${system}.native.nixComponents2.nix-manual-manpages-only; nix-internal-api-docs = nixpkgsFor.${system}.native.nixComponents2.nix-internal-api-docs; nix-external-api-docs = nixpkgsFor.${system}.native.nixComponents2.nix-external-api-docs; @@ -439,6 +440,14 @@ supportsCross = false; }; + "nix-json-schema-checks" = { + supportsCross = false; + }; + + "nix-kaitai-struct-checks" = { + supportsCross = false; + }; + "nix-perl-bindings" = { supportsCross = false; }; @@ -491,6 +500,27 @@ } ); + apps = forAllSystems ( + system: + let + pkgs = nixpkgsFor.${system}.native; + opener = if pkgs.stdenv.isDarwin then "open" else "xdg-open"; + in + { + open-manual = { + type = "app"; + program = "${pkgs.writeShellScript "open-nix-manual" '' + path="${self.packages.${system}.nix-manual.site}/index.html" + if ! ${opener} "$path"; then + echo "Failed to open manual with ${opener}. Manual is located at:" + echo "$path" + fi + ''}"; + meta.description = "Open the Nix manual in your browser"; + }; + } + ); + devShells = let makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; }; diff --git a/maintainers/data/release-credits-email-to-handle.json b/maintainers/data/release-credits-email-to-handle.json index 0dbbf8fa60e..133087fe274 100644 --- a/maintainers/data/release-credits-email-to-handle.json +++ b/maintainers/data/release-credits-email-to-handle.json @@ -224,5 +224,25 @@ "42688647+netadr@users.noreply.github.com": "netadr", "matej.urbas@gmail.com": "urbas", "ethanalexevans@gmail.com": "ethanavatar", - "greg.marti@gmail.com": "gmarti" + "greg.marti@gmail.com": "gmarti", + "arnout@bzzt.net": "raboof", + "vinayakankugoyal@gmail.com": "vinayakankugoyal", + "Radvendii@users.noreply.github.com": "Radvendii", + "jon@jh86.org": "jonhermansen", + "edef@edef.eu": "edef1c", + "pkpbynum@gmail.com": "pkpbynum", + "886074+teto@users.noreply.github.com": "teto", + "alex@adnab.me": "Alexis211", + "root@gws.fyi": "glittershark", + "me@m4rc3l.de": "MarcelCoding", + "taeer.bar-yam@bevuta.com": "Radvendii", + "martin.joerg@gmail.com": "mjoerg", + "git@cy.md": "CyberShadow", + "cootshk@duck.com": "cootshk", + "adam@dinwoodie.org": "me-and", + "domen@cachix.org": "domenkozar", + "alex.decious@gmail.com": "adeci", + "soumya.papanvk18@gmail.com": "neuralsorcerer", + "gdennis@anduril.com": null, + "graham.dennis@gmail.com": "GrahamDennis" } \ No newline at end of file diff --git a/maintainers/data/release-credits-handle-to-name.json b/maintainers/data/release-credits-handle-to-name.json index 8abffc65caa..0c9997b85f5 100644 --- a/maintainers/data/release-credits-handle-to-name.json +++ b/maintainers/data/release-credits-handle-to-name.json @@ -118,7 +118,7 @@ "wh0": null, "mupdt": "Matej Urbas", "momeemt": "Mutsuha Asada", - "dwt": "\u202erekc\u00e4H nitraM\u202e", + "dwt": "Martin H\u00e4cker", "aidenfoxivey": "Aiden Fox Ivey", "ilya-bobyr": "Illia Bobyr", "B4dM4n": "Fabian M\u00f6ller", @@ -196,5 +196,21 @@ "gmarti": "Gr\u00e9gory Marti", "lovesegfault": "Bernardo Meurer", "EphraimSiegfried": "Ephraim Siegfried", - "hgl": "Glen Huang" + "hgl": "Glen Huang", + "mjoerg": "Martin Joerg", + "Alexis211": "Alex Auvolat", + "domenkozar": "Domen Ko\u017ear", + "edef1c": "edef", + "cootshk": "Henry", + "raboof": "Arnout Engelen", + "pkpbynum": "Peter Bynum", + "glittershark": "Aspen Smith", + "MarcelCoding": "Marcel", + "teto": "Matthieu Coudron", + "jonhermansen": null, + "neuralsorcerer": "Soumyadip Sarkar", + "adeci": "Alex Decious", + "vinayakankugoyal": "Vinayak Goyal", + "me-and": "Adam Dinwoodie", + "GrahamDennis": "Graham Dennis" } \ No newline at end of file diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix index 4c37da80d95..7f7447b19e4 100644 --- a/maintainers/flake-module.nix +++ b/maintainers/flake-module.nix @@ -79,6 +79,8 @@ # Not supported by nixfmt ''^tests/functional/lang/eval-okay-deprecate-cursed-or\.nix$'' ''^tests/functional/lang/eval-okay-attrs5\.nix$'' + ''^tests/functional/lang/eval-fail-dynamic-attrs-inherit\.nix$'' + ''^tests/functional/lang/eval-fail-dynamic-attrs-inherit-2\.nix$'' # More syntax tests # These tests, or parts of them, should have been parse-* test cases. diff --git a/maintainers/release-process.md b/maintainers/release-process.md index 790618b7f4c..68de3b67778 100644 --- a/maintainers/release-process.md +++ b/maintainers/release-process.md @@ -142,7 +142,6 @@ release: $ git pull $ NEW_VERSION=2.13.0 $ echo $NEW_VERSION > .version - $ ... edit .mergify.yml to add the previous version ... $ git checkout -b bump-$NEW_VERSION $ git commit -a -m 'Bump version' $ git push --set-upstream origin bump-$NEW_VERSION diff --git a/meson.build b/meson.build index 73675615721..c072a482163 100644 --- a/meson.build +++ b/meson.build @@ -60,3 +60,9 @@ if get_option('unit-tests') subproject('libflake-tests') endif subproject('nix-functional-tests') +if get_option('json-schema-checks') + subproject('json-schema-checks') +endif +if get_option('kaitai-struct-checks') + subproject('kaitai-struct-checks') +endif diff --git a/meson.options b/meson.options index d2c9fa40c23..2739b0c7163 100644 --- a/meson.options +++ b/meson.options @@ -27,3 +27,17 @@ option( value : false, description : 'Build benchmarks (requires gbenchmark)', ) + +option( + 'kaitai-struct-checks', + type : 'boolean', + value : true, + description : 'Check the Kaitai Struct specifications (requires Kaitai Struct)', +) + +option( + 'json-schema-checks', + type : 'boolean', + value : true, + description : 'Check JSON schema validity of schemas and examples (requires jv)', +) diff --git a/nix-meson-build-support/common/asan-options/asan-options.cc b/nix-meson-build-support/common/asan-options/asan-options.cc new file mode 100644 index 00000000000..c9782fea03b --- /dev/null +++ b/nix-meson-build-support/common/asan-options/asan-options.cc @@ -0,0 +1,6 @@ +extern "C" [[gnu::retain, gnu::weak]] const char * __asan_default_options() +{ + // We leak a bunch of memory knowingly on purpose. It's not worthwhile to + // diagnose that memory being leaked for now. + return "abort_on_error=1:print_summary=1:detect_leaks=0:detect_odr_violation=0"; +} diff --git a/nix-meson-build-support/asan-options/meson.build b/nix-meson-build-support/common/asan-options/meson.build similarity index 74% rename from nix-meson-build-support/asan-options/meson.build rename to nix-meson-build-support/common/asan-options/meson.build index 17880b0ed25..80527b5a988 100644 --- a/nix-meson-build-support/asan-options/meson.build +++ b/nix-meson-build-support/common/asan-options/meson.build @@ -1,7 +1,3 @@ -asan_test_options_env = { - 'ASAN_OPTIONS' : 'abort_on_error=1:print_summary=1:detect_leaks=0', -} - # Clang gets grumpy about missing libasan symbols if -shared-libasan is not # passed when building shared libs, at least on Linux if cxx.get_id() == 'clang' and ('address' in get_option('b_sanitize') or 'undefined' in get_option( @@ -10,3 +6,6 @@ if cxx.get_id() == 'clang' and ('address' in get_option('b_sanitize') or 'undefi add_project_link_arguments('-shared-libasan', language : 'cpp') endif +if 'address' in get_option('b_sanitize') + deps_other += declare_dependency(sources : 'asan-options.cc') +endif diff --git a/nix-meson-build-support/common/meson.build b/nix-meson-build-support/common/meson.build index 23013d6ee2b..5fcf557e70b 100644 --- a/nix-meson-build-support/common/meson.build +++ b/nix-meson-build-support/common/meson.build @@ -66,3 +66,4 @@ endif nix_soversion = meson.project_version().split('+')[0].split('pre')[0] subdir('assert-fail') +subdir('asan-options') diff --git a/packaging/components.nix b/packaging/components.nix index 49249797cca..6402e8b7b2f 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -195,6 +195,25 @@ let mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or [ ]; }; + enableSanitizersLayer = + finalAttrs: prevAttrs: + let + sanitizers = lib.optional scope.withASan "address" ++ lib.optional scope.withUBSan "undefined"; + in + { + mesonFlags = + (prevAttrs.mesonFlags or [ ]) + ++ lib.optionals (lib.length sanitizers > 0) ( + [ + (lib.mesonOption "b_sanitize" (lib.concatStringsSep "," sanitizers)) + ] + ++ (lib.optionals stdenv.cc.isClang [ + # https://www.github.com/mesonbuild/meson/issues/764 + (lib.mesonBool "b_lundef" false) + ]) + ); + }; + nixDefaultsLayer = finalAttrs: prevAttrs: { strictDeps = prevAttrs.strictDeps or true; enableParallelBuilding = true; @@ -237,6 +256,16 @@ in inherit filesetToSource; + /** + Whether meson components are built with [AddressSanitizer](https://clang.llvm.org/docs/AddressSanitizer.html). + */ + withASan = false; + + /** + Whether meson components are built with [UndefinedBehaviorSanitizer](https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html). + */ + withUBSan = false; + /** A user-provided extension function to apply to each component derivation. */ @@ -323,6 +352,7 @@ in setVersionLayer mesonLayer fixupStaticLayer + enableSanitizersLayer scope.mesonComponentOverrides ]; mkMesonExecutable = mkPackageBuilder [ @@ -333,6 +363,7 @@ in mesonLayer mesonBuildLayer fixupStaticLayer + enableSanitizersLayer scope.mesonComponentOverrides ]; mkMesonLibrary = mkPackageBuilder [ @@ -344,6 +375,7 @@ in mesonBuildLayer mesonLibraryLayer fixupStaticLayer + enableSanitizersLayer scope.mesonComponentOverrides ]; @@ -388,6 +420,15 @@ in The manual as would be published on https://nix.dev/reference/nix-manual */ nix-manual = callPackage ../doc/manual/package.nix { version = fineVersion; }; + + /** + Manpages only (no HTML manual, no mdbook dependency) + */ + nix-manual-manpages-only = callPackage ../doc/manual/package.nix { + version = fineVersion; + buildHtmlManual = false; + }; + /** Doxygen pages for C++ code */ @@ -397,6 +438,16 @@ in */ nix-external-api-docs = callPackage ../src/external-api-docs/package.nix { version = fineVersion; }; + /** + JSON schema validation checks + */ + nix-json-schema-checks = callPackage ../src/json-schema-checks/package.nix { }; + + /** + Kaitai struct schema validation checks + */ + nix-kaitai-struct-checks = callPackage ../src/kaitai-struct-checks/package.nix { }; + nix-perl-bindings = callPackage ../src/perl/package.nix { }; /** @@ -449,7 +500,7 @@ in Example: ``` - overrideScope (finalScope: prevScope: { aws-sdk-cpp = null; }) + overrideScope (finalScope: prevScope: { aws-crt-cpp = null; }) ``` */ overrideScope = f: (scope.overrideScope f).nix-everything; diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index b4260a960cb..49c448bca63 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -16,21 +16,6 @@ in scope: { inherit stdenv; - aws-sdk-cpp = - (pkgs.aws-sdk-cpp.override { - apis = [ - "identity-management" - "s3" - "transfer" - ]; - customMemoryManagement = false; - }).overrideAttrs - { - # only a stripped down version is built, which takes a lot less resources - # to build, so we don't need a "big-parallel" machine. - requiredSystemFeatures = [ ]; - }; - boehmgc = (pkgs.boehmgc.override { enableLargeConfig = true; @@ -90,38 +75,4 @@ scope: { buildPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.buildPhase; installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase; }); - - libgit2 = - if lib.versionAtLeast pkgs.libgit2.version "1.9.0" then - pkgs.libgit2 - else - pkgs.libgit2.overrideAttrs (attrs: { - # libgit2: Nixpkgs 24.11 has < 1.9.0, which needs our patches - nativeBuildInputs = - attrs.nativeBuildInputs or [ ] - # gitMinimal does not build on Windows. See packbuilder patch. - ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ - # Needed for `git apply`; see `prePatch` - pkgs.buildPackages.gitMinimal - ]; - # Only `git apply` can handle git binary patches - prePatch = - attrs.prePatch or "" - + lib.optionalString (!stdenv.hostPlatform.isWindows) '' - patch() { - git apply - } - ''; - patches = - attrs.patches or [ ] - ++ [ - ./patches/libgit2-mempack-thin-packfile.patch - ] - # gitMinimal does not build on Windows, but fortunately this patch only - # impacts interruptibility - ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ - # binary patch; see `prePatch` - ./patches/libgit2-packbuilder-callback-interruptible.patch - ]; - }); } diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix index c4fc70511c1..6520845f53c 100644 --- a/packaging/dev-shell.nix +++ b/packaging/dev-shell.nix @@ -3,10 +3,118 @@ devFlake, }: +let + # Some helper functions + + /** + Compute a filtered closure of build inputs. + + Specifically, `buildInputsClosure cond startSet` computes the closure formed + by recursive application of `p: filter cond p.buildInputs ++ filter cond p.propagatedBuildInputs` + to `startSet`. + + Example: + ```nix + builtInputsClosure isInternal [ pkg1 pkg2 ] + => [ pkg1 pkg3 pkg2 pkg10 ] + ``` + + Note: order tbd + + Note: `startSet` is *NOT* filtered. + */ + buildInputsClosureCond = + cond: startSet: + let + closure = builtins.genericClosure { + startSet = map (d: { + key = d.drvPath; + value = d; + }) startSet; + operator = + d: + let + r = + map + (d': { + key = d'.drvPath; + value = d'; + }) + ( + lib.filter cond d.value.buildInputs or [ ] ++ lib.filter cond d.value.propagatedBuildInputs or [ ] + ); + in + r; + }; + in + map (item: item.value) closure; + + /** + `[ pkg1 pkg2 ]` -> `{ "...-pkg2.drv" = null; "...-pkg1.drv" = null }` + + Note: fairly arbitrary order (hash based). Use for efficient set membership test only. + */ + byDrvPath = + l: + lib.listToAttrs ( + map (c: { + name = + # Just a lookup key + builtins.unsafeDiscardStringContext c.drvPath; + value = null; + }) l + ); + + /** + Stable dedup. + + Unlike `listToAttrs` -> `attrValues`, this preserves the input ordering, + which is more predictable ("deterministic") than e.g. sorting store paths, + whose hashes affect the ordering on every change. + */ + # TODO: add to Nixpkgs lib, refer from uniqueStrings + dedupByString = + key: l: + let + r = + lib.foldl' + ( + a@{ list, set }: + elem: + let + k = builtins.unsafeDiscardStringContext (key elem); + in + if set ? ${k} then + a + else + let + # Note: O(n²) copying. Use linkedLists to concat them in one go at the end. + # https://github.com/NixOS/nixpkgs/pull/452088 + newList = [ elem ] ++ list; + newSet = set // { + ${k} = null; + }; + in + builtins.seq newList builtins.seq newSet { + list = newList; + set = newSet; + } + ) + { + list = [ ]; + set = { }; + } + l; + in + r.list; + +in + { pkgs }: +# TODO: don't use nix-util for this? pkgs.nixComponents2.nix-util.overrideAttrs ( - attrs: + finalAttrs: prevAttrs: let stdenv = pkgs.nixDependencies2.stdenv; @@ -21,13 +129,93 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( "-D${prefix}:${rest}"; havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix; ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags; + + availableComponents = lib.filterAttrs ( + k: v: lib.meta.availableOn pkgs.hostPlatform v + ) allComponents; + + activeComponents = buildInputsClosureCond isInternal ( + lib.attrValues (finalAttrs.passthru.config.getComponents availableComponents) + ); + + allComponents = lib.filterAttrs (k: v: lib.isDerivation v) pkgs.nixComponents2; + internalDrvs = byDrvPath ( + # Drop the attr names (not present in buildInputs anyway) + lib.attrValues availableComponents + ++ lib.concatMap (c: lib.attrValues c.tests or { }) (lib.attrValues availableComponents) + ); + + isInternal = + dep: internalDrvs ? ${builtins.unsafeDiscardStringContext dep.drvPath or "_non-existent_"}; + in { - pname = "shell-for-" + attrs.pname; + pname = "shell-for-nix"; + + passthru = { + inherit activeComponents; + + # We use this attribute to store non-derivation values like functions and + # perhaps other things that are primarily for overriding and not the shell. + config = { + # Default getComponents + getComponents = + c: + builtins.removeAttrs c ( + lib.optionals (!havePerl) [ "nix-perl-bindings" ] + ++ lib.optionals (!buildCanExecuteHost) [ "nix-manual" ] + ); + }; + + /** + Produce a devShell for a given set of nix components + + Example: + + ```nix + shell.withActiveComponents (c: { + inherit (c) nix-util; + }) + ``` + */ + withActiveComponents = + f2: + finalAttrs.finalPackage.overrideAttrs ( + finalAttrs: prevAttrs: { + passthru = prevAttrs.passthru // { + config = prevAttrs.passthru.config // { + getComponents = f2; + }; + }; + } + ); + + small = + (finalAttrs.finalPackage.withActiveComponents ( + c: + lib.intersectAttrs (lib.genAttrs [ + "nix-cli" + "nix-util-tests" + "nix-store-tests" + "nix-expr-tests" + "nix-fetchers-tests" + "nix-flake-tests" + "nix-functional-tests" + "nix-perl-bindings" + ] (_: null)) c + )).overrideAttrs + (o: { + mesonFlags = o.mesonFlags ++ [ + # TODO: infer from activeComponents or vice versa + "-Dkaitai-struct-checks=false" + "-Djson-schema-checks=false" + ]; + }); + }; # Remove the version suffix to avoid unnecessary attempts to substitute in nix develop version = lib.fileContents ../.version-determinate; - name = attrs.pname; + name = finalAttrs.pname; installFlags = "sysconfdir=$(out)/etc"; shellHook = '' @@ -70,6 +258,9 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( # We use this shell with the local checkout, not unpackPhase. src = null; + # Workaround https://sourceware.org/pipermail/gdb-patches/2025-October/221398.html + # Remove when gdb fix is rolled out everywhere. + separateDebugInfo = false; env = { # For `make format`, to work without installing pre-commit @@ -82,6 +273,8 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( CXX_LD = "mold"; }; + dontUseCmakeConfigure = true; + mesonFlags = map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents2.nix-util.mesonFlags) ++ map (transformFlag "libstore") (ignoreCrossFile pkgs.nixComponents2.nix-store.mesonFlags) @@ -93,51 +286,52 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( ++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents2.nix-cmd.mesonFlags); nativeBuildInputs = - attrs.nativeBuildInputs or [ ] - ++ pkgs.nixComponents2.nix-util.nativeBuildInputs - ++ pkgs.nixComponents2.nix-store.nativeBuildInputs - ++ pkgs.nixComponents2.nix-fetchers.nativeBuildInputs - ++ pkgs.nixComponents2.nix-expr.nativeBuildInputs - ++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.nativeBuildInputs - ++ lib.optionals buildCanExecuteHost pkgs.nixComponents2.nix-manual.externalNativeBuildInputs - ++ pkgs.nixComponents2.nix-internal-api-docs.nativeBuildInputs - ++ pkgs.nixComponents2.nix-external-api-docs.nativeBuildInputs - ++ pkgs.nixComponents2.nix-functional-tests.externalNativeBuildInputs - ++ lib.optional ( - !buildCanExecuteHost - # Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479 - && !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin) - && stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages - && lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages) - ) pkgs.buildPackages.mesonEmulatorHook - ++ [ - pkgs.buildPackages.cmake - pkgs.buildPackages.gnused - pkgs.buildPackages.shellcheck - pkgs.buildPackages.changelog-d - modular.pre-commit.settings.package - (pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript) - pkgs.buildPackages.nixfmt-rfc-style - pkgs.buildPackages.shellcheck - pkgs.buildPackages.gdb - ] - ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) ( - lib.hiPrio pkgs.buildPackages.clang-tools + let + inputs = + dedupByString (v: "${v}") ( + lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.nativeBuildInputs) activeComponents) + ) + ++ lib.optional ( + !buildCanExecuteHost + # Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479 + && !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin) + && stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages + && lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages) + ) pkgs.buildPackages.mesonEmulatorHook + ++ [ + pkgs.buildPackages.gnused + modular.pre-commit.settings.package + (pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript) + pkgs.buildPackages.nixfmt-rfc-style + pkgs.buildPackages.shellcheck + pkgs.buildPackages.include-what-you-use + pkgs.buildPackages.gdb + ] + ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) ( + lib.hiPrio pkgs.buildPackages.clang-tools + ) + ++ lib.optional stdenv.hostPlatform.isLinux pkgs.buildPackages.mold-wrapped; + in + # FIXME: separateDebugInfo = false doesn't actually prevent -Wa,--compress-debug-sections + # from making its way into NIX_CFLAGS_COMPILE. + lib.filter (p: !lib.hasInfix "separate-debug-info" p) inputs; + + propagatedNativeBuildInputs = dedupByString (v: "${v}") ( + lib.filter (x: !isInternal x) ( + lib.lists.concatMap (c: c.propagatedNativeBuildInputs) activeComponents ) - ++ lib.optional stdenv.hostPlatform.isLinux pkgs.buildPackages.mold-wrapped; + ); buildInputs = [ pkgs.gbenchmark ] - ++ attrs.buildInputs or [ ] - ++ pkgs.nixComponents2.nix-util.buildInputs - ++ pkgs.nixComponents2.nix-store.buildInputs - ++ pkgs.nixComponents2.nix-store-tests.externalBuildInputs - ++ pkgs.nixComponents2.nix-fetchers.buildInputs - ++ pkgs.nixComponents2.nix-expr.buildInputs - ++ pkgs.nixComponents2.nix-expr.externalPropagatedBuildInputs - ++ pkgs.nixComponents2.nix-cmd.buildInputs - ++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.externalBuildInputs + ++ dedupByString (v: "${v}") ( + lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.buildInputs) activeComponents) + ) ++ lib.optional havePerl pkgs.perl; + + propagatedBuildInputs = dedupByString (v: "${v}") ( + lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.propagatedBuildInputs) activeComponents) + ); } ) diff --git a/packaging/hydra.nix b/packaging/hydra.nix index d563bff0bb3..9839dd62163 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -62,18 +62,21 @@ let "nix-cmd" "nix-cli" "nix-functional-tests" + "nix-json-schema-checks" ] ++ lib.optionals enableBindings [ "nix-perl-bindings" ] ++ lib.optionals enableDocs [ "nix-manual" + "nix-manual-manpages-only" "nix-internal-api-docs" "nix-external-api-docs" + "nix-kaitai-struct-checks" ] ); in -{ +rec { /** An internal check to make sure our package listing is complete. */ diff --git a/packaging/patches/libgit2-mempack-thin-packfile.patch b/packaging/patches/libgit2-mempack-thin-packfile.patch deleted file mode 100644 index fb74b168313..00000000000 --- a/packaging/patches/libgit2-mempack-thin-packfile.patch +++ /dev/null @@ -1,282 +0,0 @@ -commit 9bacade4a3ef4b6b26e2c02f549eef0e9eb9eaa2 -Author: Robert Hensing -Date: Sun Aug 18 20:20:36 2024 +0200 - - Add unoptimized git_mempack_write_thin_pack - -diff --git a/include/git2/sys/mempack.h b/include/git2/sys/mempack.h -index 17da590a3..3688bdd50 100644 ---- a/include/git2/sys/mempack.h -+++ b/include/git2/sys/mempack.h -@@ -44,6 +44,29 @@ GIT_BEGIN_DECL - */ - GIT_EXTERN(int) git_mempack_new(git_odb_backend **out); - -+/** -+ * Write a thin packfile with the objects in the memory store. -+ * -+ * A thin packfile is a packfile that does not contain its transitive closure of -+ * references. This is useful for efficiently distributing additions to a -+ * repository over the network, but also finds use in the efficient bulk -+ * addition of objects to a repository, locally. -+ * -+ * This operation performs the (shallow) insert operations into the -+ * `git_packbuilder`, but does not write the packfile to disk; -+ * see `git_packbuilder_write_buf`. -+ * -+ * It also does not reset the memory store; see `git_mempack_reset`. -+ * -+ * @note This function may or may not write trees and blobs that are not -+ * referenced by commits. Currently everything is written, but this -+ * behavior may change in the future as the packer is optimized. -+ * -+ * @param backend The mempack backend -+ * @param pb The packbuilder to use to write the packfile -+ */ -+GIT_EXTERN(int) git_mempack_write_thin_pack(git_odb_backend *backend, git_packbuilder *pb); -+ - /** - * Dump all the queued in-memory writes to a packfile. - * -diff --git a/src/libgit2/odb_mempack.c b/src/libgit2/odb_mempack.c -index 6f27f45f8..0b61e2b66 100644 ---- a/src/libgit2/odb_mempack.c -+++ b/src/libgit2/odb_mempack.c -@@ -132,6 +132,35 @@ cleanup: - return err; - } - -+int git_mempack_write_thin_pack(git_odb_backend *backend, git_packbuilder *pb) -+{ -+ struct memory_packer_db *db = (struct memory_packer_db *)backend; -+ const git_oid *oid; -+ size_t iter = 0; -+ int err = -1; -+ -+ /* TODO: Implement the recency heuristics. -+ For this it probably makes sense to only write what's referenced -+ through commits, an option I've carved out for you in the docs. -+ wrt heuristics: ask your favorite LLM to translate https://git-scm.com/docs/pack-heuristics/en -+ to actual normal reference documentation. */ -+ while (true) { -+ err = git_oidmap_iterate(NULL, db->objects, &iter, &oid); -+ if (err == GIT_ITEROVER) { -+ err = 0; -+ break; -+ } -+ if (err != 0) -+ return err; -+ -+ err = git_packbuilder_insert(pb, oid, NULL); -+ if (err != 0) -+ return err; -+ } -+ -+ return 0; -+} -+ - int git_mempack_dump( - git_buf *pack, - git_repository *repo, -diff --git a/tests/libgit2/mempack/thinpack.c b/tests/libgit2/mempack/thinpack.c -new file mode 100644 -index 000000000..604a4dda2 ---- /dev/null -+++ b/tests/libgit2/mempack/thinpack.c -@@ -0,0 +1,196 @@ -+#include "clar_libgit2.h" -+#include "git2/indexer.h" -+#include "git2/odb_backend.h" -+#include "git2/tree.h" -+#include "git2/types.h" -+#include "git2/sys/mempack.h" -+#include "git2/sys/odb_backend.h" -+#include "util.h" -+ -+static git_repository *_repo; -+static git_odb_backend * _mempack_backend; -+ -+void test_mempack_thinpack__initialize(void) -+{ -+ git_odb *odb; -+ -+ _repo = cl_git_sandbox_init_new("mempack_thinpack_repo"); -+ -+ cl_git_pass(git_mempack_new(&_mempack_backend)); -+ cl_git_pass(git_repository_odb(&odb, _repo)); -+ cl_git_pass(git_odb_add_backend(odb, _mempack_backend, 999)); -+ git_odb_free(odb); -+} -+ -+void _mempack_thinpack__cleanup(void) -+{ -+ cl_git_sandbox_cleanup(); -+} -+ -+/* -+ Generating a packfile for an unchanged repo works and produces an empty packfile. -+ Even if we allow this scenario to be detected, it shouldn't misbehave if the -+ application is unaware of it. -+*/ -+void test_mempack_thinpack__empty(void) -+{ -+ git_packbuilder *pb; -+ int version; -+ int n; -+ git_buf buf = GIT_BUF_INIT; -+ -+ git_packbuilder_new(&pb, _repo); -+ -+ cl_git_pass(git_mempack_write_thin_pack(_mempack_backend, pb)); -+ cl_git_pass(git_packbuilder_write_buf(&buf, pb)); -+ cl_assert_in_range(12, buf.size, 1024 /* empty packfile is >0 bytes, but certainly not that big */); -+ cl_assert(buf.ptr[0] == 'P'); -+ cl_assert(buf.ptr[1] == 'A'); -+ cl_assert(buf.ptr[2] == 'C'); -+ cl_assert(buf.ptr[3] == 'K'); -+ version = (buf.ptr[4] << 24) | (buf.ptr[5] << 16) | (buf.ptr[6] << 8) | buf.ptr[7]; -+ /* Subject to change. https://git-scm.com/docs/pack-format: Git currently accepts version number 2 or 3 but generates version 2 only.*/ -+ cl_assert_equal_i(2, version); -+ n = (buf.ptr[8] << 24) | (buf.ptr[9] << 16) | (buf.ptr[10] << 8) | buf.ptr[11]; -+ cl_assert_equal_i(0, n); -+ git_buf_dispose(&buf); -+ -+ git_packbuilder_free(pb); -+} -+ -+#define LIT_LEN(x) x, sizeof(x) - 1 -+ -+/* -+ Check that git_mempack_write_thin_pack produces a thin packfile. -+*/ -+void test_mempack_thinpack__thin(void) -+{ -+ /* Outline: -+ - Create tree 1 -+ - Flush to packfile A -+ - Create tree 2 -+ - Flush to packfile B -+ -+ Tree 2 has a new blob and a reference to a blob from tree 1. -+ -+ Expectation: -+ - Packfile B is thin and does not contain the objects from packfile A -+ */ -+ -+ -+ git_oid oid_blob_1; -+ git_oid oid_blob_2; -+ git_oid oid_blob_3; -+ git_oid oid_tree_1; -+ git_oid oid_tree_2; -+ git_treebuilder *tb; -+ -+ git_packbuilder *pb; -+ git_buf buf = GIT_BUF_INIT; -+ git_indexer *indexer; -+ git_indexer_progress stats; -+ char pack_dir_path[1024]; -+ -+ char sbuf[1024]; -+ const char * repo_path; -+ const char * pack_name_1; -+ const char * pack_name_2; -+ git_str pack_path_1 = GIT_STR_INIT; -+ git_str pack_path_2 = GIT_STR_INIT; -+ git_odb_backend * pack_odb_backend_1; -+ git_odb_backend * pack_odb_backend_2; -+ -+ -+ cl_assert_in_range(0, snprintf(pack_dir_path, sizeof(pack_dir_path), "%s/objects/pack", git_repository_path(_repo)), sizeof(pack_dir_path)); -+ -+ /* Create tree 1 */ -+ -+ cl_git_pass(git_blob_create_from_buffer(&oid_blob_1, _repo, LIT_LEN("thinpack blob 1"))); -+ cl_git_pass(git_blob_create_from_buffer(&oid_blob_2, _repo, LIT_LEN("thinpack blob 2"))); -+ -+ -+ cl_git_pass(git_treebuilder_new(&tb, _repo, NULL)); -+ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob1", &oid_blob_1, GIT_FILEMODE_BLOB)); -+ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob2", &oid_blob_2, GIT_FILEMODE_BLOB)); -+ cl_git_pass(git_treebuilder_write(&oid_tree_1, tb)); -+ -+ /* Flush */ -+ -+ cl_git_pass(git_packbuilder_new(&pb, _repo)); -+ cl_git_pass(git_mempack_write_thin_pack(_mempack_backend, pb)); -+ cl_git_pass(git_packbuilder_write_buf(&buf, pb)); -+ cl_git_pass(git_indexer_new(&indexer, pack_dir_path, 0, NULL, NULL)); -+ cl_git_pass(git_indexer_append(indexer, buf.ptr, buf.size, &stats)); -+ cl_git_pass(git_indexer_commit(indexer, &stats)); -+ pack_name_1 = strdup(git_indexer_name(indexer)); -+ cl_assert(pack_name_1); -+ git_buf_dispose(&buf); -+ git_mempack_reset(_mempack_backend); -+ git_indexer_free(indexer); -+ git_packbuilder_free(pb); -+ -+ /* Create tree 2 */ -+ -+ cl_git_pass(git_treebuilder_clear(tb)); -+ /* blob 1 won't be used, but we add it anyway to test that just "declaring" an object doesn't -+ necessarily cause its inclusion in the next thin packfile. It must only be included if new. */ -+ cl_git_pass(git_blob_create_from_buffer(&oid_blob_1, _repo, LIT_LEN("thinpack blob 1"))); -+ cl_git_pass(git_blob_create_from_buffer(&oid_blob_3, _repo, LIT_LEN("thinpack blob 3"))); -+ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob1", &oid_blob_1, GIT_FILEMODE_BLOB)); -+ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob3", &oid_blob_3, GIT_FILEMODE_BLOB)); -+ cl_git_pass(git_treebuilder_write(&oid_tree_2, tb)); -+ -+ /* Flush */ -+ -+ cl_git_pass(git_packbuilder_new(&pb, _repo)); -+ cl_git_pass(git_mempack_write_thin_pack(_mempack_backend, pb)); -+ cl_git_pass(git_packbuilder_write_buf(&buf, pb)); -+ cl_git_pass(git_indexer_new(&indexer, pack_dir_path, 0, NULL, NULL)); -+ cl_git_pass(git_indexer_append(indexer, buf.ptr, buf.size, &stats)); -+ cl_git_pass(git_indexer_commit(indexer, &stats)); -+ pack_name_2 = strdup(git_indexer_name(indexer)); -+ cl_assert(pack_name_2); -+ git_buf_dispose(&buf); -+ git_mempack_reset(_mempack_backend); -+ git_indexer_free(indexer); -+ git_packbuilder_free(pb); -+ git_treebuilder_free(tb); -+ -+ /* Assertions */ -+ -+ assert(pack_name_1); -+ assert(pack_name_2); -+ -+ repo_path = git_repository_path(_repo); -+ -+ snprintf(sbuf, sizeof(sbuf), "objects/pack/pack-%s.pack", pack_name_1); -+ git_str_joinpath(&pack_path_1, repo_path, sbuf); -+ snprintf(sbuf, sizeof(sbuf), "objects/pack/pack-%s.pack", pack_name_2); -+ git_str_joinpath(&pack_path_2, repo_path, sbuf); -+ -+ /* If they're the same, something definitely went wrong. */ -+ cl_assert(strcmp(pack_name_1, pack_name_2) != 0); -+ -+ cl_git_pass(git_odb_backend_one_pack(&pack_odb_backend_1, pack_path_1.ptr)); -+ cl_assert(pack_odb_backend_1->exists(pack_odb_backend_1, &oid_blob_1)); -+ cl_assert(pack_odb_backend_1->exists(pack_odb_backend_1, &oid_blob_2)); -+ cl_assert(!pack_odb_backend_1->exists(pack_odb_backend_1, &oid_blob_3)); -+ cl_assert(pack_odb_backend_1->exists(pack_odb_backend_1, &oid_tree_1)); -+ cl_assert(!pack_odb_backend_1->exists(pack_odb_backend_1, &oid_tree_2)); -+ -+ cl_git_pass(git_odb_backend_one_pack(&pack_odb_backend_2, pack_path_2.ptr)); -+ /* blob 1 is already in the packfile 1, so packfile 2 must not include it, in order to be _thin_. */ -+ cl_assert(!pack_odb_backend_2->exists(pack_odb_backend_2, &oid_blob_1)); -+ cl_assert(!pack_odb_backend_2->exists(pack_odb_backend_2, &oid_blob_2)); -+ cl_assert(pack_odb_backend_2->exists(pack_odb_backend_2, &oid_blob_3)); -+ cl_assert(!pack_odb_backend_2->exists(pack_odb_backend_2, &oid_tree_1)); -+ cl_assert(pack_odb_backend_2->exists(pack_odb_backend_2, &oid_tree_2)); -+ -+ pack_odb_backend_1->free(pack_odb_backend_1); -+ pack_odb_backend_2->free(pack_odb_backend_2); -+ free((void *)pack_name_1); -+ free((void *)pack_name_2); -+ git_str_dispose(&pack_path_1); -+ git_str_dispose(&pack_path_2); -+ -+} diff --git a/packaging/patches/libgit2-packbuilder-callback-interruptible.patch b/packaging/patches/libgit2-packbuilder-callback-interruptible.patch deleted file mode 100644 index c67822ff755..00000000000 --- a/packaging/patches/libgit2-packbuilder-callback-interruptible.patch +++ /dev/null @@ -1,930 +0,0 @@ -commit e9823c5da4fa977c46bcb97167fbdd0d70adb5ff -Author: Robert Hensing -Date: Mon Aug 26 20:07:04 2024 +0200 - - Make packbuilder interruptible using progress callback - - Forward errors from packbuilder->progress_cb - - This allows the callback to terminate long-running operations when - the application is interrupted. - -diff --git a/include/git2/pack.h b/include/git2/pack.h -index 0f6bd2ab9..bee72a6c0 100644 ---- a/include/git2/pack.h -+++ b/include/git2/pack.h -@@ -247,6 +247,9 @@ typedef int GIT_CALLBACK(git_packbuilder_progress)( - * @param progress_cb Function to call with progress information during - * pack building. Be aware that this is called inline with pack building - * operations, so performance may be affected. -+ * When progress_cb returns an error, the pack building process will be -+ * aborted and the error will be returned from the invoked function. -+ * `pb` must then be freed. - * @param progress_cb_payload Payload for progress callback. - * @return 0 or an error code - */ -diff --git a/src/libgit2/pack-objects.c b/src/libgit2/pack-objects.c -index b2d80cba9..7c331c2d5 100644 ---- a/src/libgit2/pack-objects.c -+++ b/src/libgit2/pack-objects.c -@@ -932,6 +932,9 @@ static int report_delta_progress( - { - int ret; - -+ if (pb->failure) -+ return pb->failure; -+ - if (pb->progress_cb) { - uint64_t current_time = git_time_monotonic(); - uint64_t elapsed = current_time - pb->last_progress_report_time; -@@ -943,8 +946,10 @@ static int report_delta_progress( - GIT_PACKBUILDER_DELTAFICATION, - count, pb->nr_objects, pb->progress_cb_payload); - -- if (ret) -+ if (ret) { -+ pb->failure = ret; - return git_error_set_after_callback(ret); -+ } - } - } - -@@ -976,7 +981,10 @@ static int find_deltas(git_packbuilder *pb, git_pobject **list, - } - - pb->nr_deltified += 1; -- report_delta_progress(pb, pb->nr_deltified, false); -+ if ((error = report_delta_progress(pb, pb->nr_deltified, false)) < 0) { -+ GIT_ASSERT(git_packbuilder__progress_unlock(pb) == 0); -+ goto on_error; -+ } - - po = *list++; - (*list_size)--; -@@ -1124,6 +1132,10 @@ struct thread_params { - size_t depth; - size_t working; - size_t data_ready; -+ -+ /* A pb->progress_cb can stop the packing process by returning an error. -+ When that happens, all threads observe the error and stop voluntarily. */ -+ bool stopped; - }; - - static void *threaded_find_deltas(void *arg) -@@ -1133,7 +1145,12 @@ static void *threaded_find_deltas(void *arg) - while (me->remaining) { - if (find_deltas(me->pb, me->list, &me->remaining, - me->window, me->depth) < 0) { -- ; /* TODO */ -+ me->stopped = true; -+ GIT_ASSERT_WITH_RETVAL(git_packbuilder__progress_lock(me->pb) == 0, NULL); -+ me->working = false; -+ git_cond_signal(&me->pb->progress_cond); -+ GIT_ASSERT_WITH_RETVAL(git_packbuilder__progress_unlock(me->pb) == 0, NULL); -+ return NULL; - } - - GIT_ASSERT_WITH_RETVAL(git_packbuilder__progress_lock(me->pb) == 0, NULL); -@@ -1175,8 +1192,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, - pb->nr_threads = git__online_cpus(); - - if (pb->nr_threads <= 1) { -- find_deltas(pb, list, &list_size, window, depth); -- return 0; -+ return find_deltas(pb, list, &list_size, window, depth); - } - - p = git__mallocarray(pb->nr_threads, sizeof(*p)); -@@ -1195,6 +1211,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, - p[i].depth = depth; - p[i].working = 1; - p[i].data_ready = 0; -+ p[i].stopped = 0; - - /* try to split chunks on "path" boundaries */ - while (sub_size && sub_size < list_size && -@@ -1262,7 +1279,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, - (!victim || victim->remaining < p[i].remaining)) - victim = &p[i]; - -- if (victim) { -+ if (victim && !target->stopped) { - sub_size = victim->remaining / 2; - list = victim->list + victim->list_size - sub_size; - while (sub_size && list[0]->hash && -@@ -1286,7 +1303,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, - } - target->list_size = sub_size; - target->remaining = sub_size; -- target->working = 1; -+ target->working = 1; /* even when target->stopped, so that we don't process this thread again */ - GIT_ASSERT(git_packbuilder__progress_unlock(pb) == 0); - - if (git_mutex_lock(&target->mutex)) { -@@ -1299,7 +1316,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, - git_cond_signal(&target->cond); - git_mutex_unlock(&target->mutex); - -- if (!sub_size) { -+ if (target->stopped || !sub_size) { - git_thread_join(&target->thread, NULL); - git_cond_free(&target->cond); - git_mutex_free(&target->mutex); -@@ -1308,7 +1325,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, - } - - git__free(p); -- return 0; -+ return pb->failure; - } - - #else -@@ -1319,6 +1336,7 @@ int git_packbuilder__prepare(git_packbuilder *pb) - { - git_pobject **delta_list; - size_t i, n = 0; -+ int error; - - if (pb->nr_objects == 0 || pb->done) - return 0; /* nothing to do */ -@@ -1327,8 +1345,10 @@ int git_packbuilder__prepare(git_packbuilder *pb) - * Although we do not report progress during deltafication, we - * at least report that we are in the deltafication stage - */ -- if (pb->progress_cb) -- pb->progress_cb(GIT_PACKBUILDER_DELTAFICATION, 0, pb->nr_objects, pb->progress_cb_payload); -+ if (pb->progress_cb) { -+ if ((error = pb->progress_cb(GIT_PACKBUILDER_DELTAFICATION, 0, pb->nr_objects, pb->progress_cb_payload)) < 0) -+ return git_error_set_after_callback(error); -+ } - - delta_list = git__mallocarray(pb->nr_objects, sizeof(*delta_list)); - GIT_ERROR_CHECK_ALLOC(delta_list); -@@ -1345,31 +1365,33 @@ int git_packbuilder__prepare(git_packbuilder *pb) - - if (n > 1) { - git__tsort((void **)delta_list, n, type_size_sort); -- if (ll_find_deltas(pb, delta_list, n, -+ if ((error = ll_find_deltas(pb, delta_list, n, - GIT_PACK_WINDOW + 1, -- GIT_PACK_DEPTH) < 0) { -+ GIT_PACK_DEPTH)) < 0) { - git__free(delta_list); -- return -1; -+ return error; - } - } - -- report_delta_progress(pb, pb->nr_objects, true); -+ error = report_delta_progress(pb, pb->nr_objects, true); - - pb->done = true; - git__free(delta_list); -- return 0; -+ return error; - } - --#define PREPARE_PACK if (git_packbuilder__prepare(pb) < 0) { return -1; } -+#define PREPARE_PACK error = git_packbuilder__prepare(pb); if (error < 0) { return error; } - - int git_packbuilder_foreach(git_packbuilder *pb, int (*cb)(void *buf, size_t size, void *payload), void *payload) - { -+ int error; - PREPARE_PACK; - return write_pack(pb, cb, payload); - } - - int git_packbuilder__write_buf(git_str *buf, git_packbuilder *pb) - { -+ int error; - PREPARE_PACK; - - return write_pack(pb, &write_pack_buf, buf); -diff --git a/src/libgit2/pack-objects.h b/src/libgit2/pack-objects.h -index bbc8b9430..380a28ebe 100644 ---- a/src/libgit2/pack-objects.h -+++ b/src/libgit2/pack-objects.h -@@ -100,6 +100,10 @@ struct git_packbuilder { - uint64_t last_progress_report_time; - - bool done; -+ -+ /* A non-zero error code in failure causes all threads to shut themselves -+ down. Some functions will return this error code. */ -+ volatile int failure; - }; - - int git_packbuilder__write_buf(git_str *buf, git_packbuilder *pb); -diff --git a/tests/libgit2/pack/cancel.c b/tests/libgit2/pack/cancel.c -new file mode 100644 -index 000000000..a0aa9716a ---- /dev/null -+++ b/tests/libgit2/pack/cancel.c -@@ -0,0 +1,240 @@ -+#include "clar_libgit2.h" -+#include "futils.h" -+#include "pack.h" -+#include "hash.h" -+#include "iterator.h" -+#include "vector.h" -+#include "posix.h" -+#include "hash.h" -+#include "pack-objects.h" -+ -+static git_repository *_repo; -+static git_revwalk *_revwalker; -+static git_packbuilder *_packbuilder; -+static git_indexer *_indexer; -+static git_vector _commits; -+static int _commits_is_initialized; -+static git_indexer_progress _stats; -+ -+extern bool git_disable_pack_keep_file_checks; -+ -+static void pack_packbuilder_init(const char *sandbox) { -+ _repo = cl_git_sandbox_init(sandbox); -+ /* cl_git_pass(p_chdir(sandbox)); */ -+ cl_git_pass(git_revwalk_new(&_revwalker, _repo)); -+ cl_git_pass(git_packbuilder_new(&_packbuilder, _repo)); -+ cl_git_pass(git_vector_init(&_commits, 0, NULL)); -+ _commits_is_initialized = 1; -+ memset(&_stats, 0, sizeof(_stats)); -+ p_fsync__cnt = 0; -+} -+ -+void test_pack_cancel__initialize(void) -+{ -+ pack_packbuilder_init("small.git"); -+} -+ -+void test_pack_cancel__cleanup(void) -+{ -+ git_oid *o; -+ unsigned int i; -+ -+ cl_git_pass(git_libgit2_opts(GIT_OPT_ENABLE_FSYNC_GITDIR, 0)); -+ cl_git_pass(git_libgit2_opts(GIT_OPT_DISABLE_PACK_KEEP_FILE_CHECKS, false)); -+ -+ if (_commits_is_initialized) { -+ _commits_is_initialized = 0; -+ git_vector_foreach(&_commits, i, o) { -+ git__free(o); -+ } -+ git_vector_free(&_commits); -+ } -+ -+ git_packbuilder_free(_packbuilder); -+ _packbuilder = NULL; -+ -+ git_revwalk_free(_revwalker); -+ _revwalker = NULL; -+ -+ git_indexer_free(_indexer); -+ _indexer = NULL; -+ -+ /* cl_git_pass(p_chdir("..")); */ -+ cl_git_sandbox_cleanup(); -+ _repo = NULL; -+} -+ -+static int seed_packbuilder(void) -+{ -+ int error; -+ git_oid oid, *o; -+ unsigned int i; -+ -+ git_revwalk_sorting(_revwalker, GIT_SORT_TIME); -+ cl_git_pass(git_revwalk_push_ref(_revwalker, "HEAD")); -+ -+ while (git_revwalk_next(&oid, _revwalker) == 0) { -+ o = git__malloc(sizeof(git_oid)); -+ cl_assert(o != NULL); -+ git_oid_cpy(o, &oid); -+ cl_git_pass(git_vector_insert(&_commits, o)); -+ } -+ -+ git_vector_foreach(&_commits, i, o) { -+ if((error = git_packbuilder_insert(_packbuilder, o, NULL)) < 0) -+ return error; -+ } -+ -+ git_vector_foreach(&_commits, i, o) { -+ git_object *obj; -+ cl_git_pass(git_object_lookup(&obj, _repo, o, GIT_OBJECT_COMMIT)); -+ error = git_packbuilder_insert_tree(_packbuilder, -+ git_commit_tree_id((git_commit *)obj)); -+ git_object_free(obj); -+ if (error < 0) -+ return error; -+ } -+ -+ return 0; -+} -+ -+static int fail_stage; -+ -+static int packbuilder_cancel_after_n_calls_cb(int stage, uint32_t current, uint32_t total, void *payload) -+{ -+ -+ /* Force the callback to run again on the next opportunity regardless -+ of how fast we're running. */ -+ _packbuilder->last_progress_report_time = 0; -+ -+ if (stage == fail_stage) { -+ int *calls = (int *)payload; -+ int n = *calls; -+ /* Always decrement, including past zero. This way the error is only -+ triggered once, making sure it is picked up immediately. */ -+ --*calls; -+ if (n == 0) -+ return GIT_EUSER; -+ } -+ -+ return 0; -+} -+ -+static void test_cancel(int n) -+{ -+ -+ int calls_remaining = n; -+ int err; -+ git_buf buf = GIT_BUF_INIT; -+ -+ /* Switch to a small repository, so that `packbuilder_cancel_after_n_calls_cb` -+ can hack the time to call the callback on every opportunity. */ -+ -+ cl_git_pass(git_packbuilder_set_callbacks(_packbuilder, &packbuilder_cancel_after_n_calls_cb, &calls_remaining)); -+ err = seed_packbuilder(); -+ if (!err) -+ err = git_packbuilder_write_buf(&buf, _packbuilder); -+ -+ cl_assert_equal_i(GIT_EUSER, err); -+} -+void test_pack_cancel__cancel_after_add_0(void) -+{ -+ fail_stage = GIT_PACKBUILDER_ADDING_OBJECTS; -+ test_cancel(0); -+} -+ -+void test_pack_cancel__cancel_after_add_1(void) -+{ -+ cl_skip(); -+ fail_stage = GIT_PACKBUILDER_ADDING_OBJECTS; -+ test_cancel(1); -+} -+ -+void test_pack_cancel__cancel_after_delta_0(void) -+{ -+ fail_stage = GIT_PACKBUILDER_DELTAFICATION; -+ test_cancel(0); -+} -+ -+void test_pack_cancel__cancel_after_delta_1(void) -+{ -+ fail_stage = GIT_PACKBUILDER_DELTAFICATION; -+ test_cancel(1); -+} -+ -+void test_pack_cancel__cancel_after_delta_0_threaded(void) -+{ -+#ifdef GIT_THREADS -+ git_packbuilder_set_threads(_packbuilder, 8); -+ fail_stage = GIT_PACKBUILDER_DELTAFICATION; -+ test_cancel(0); -+#else -+ cl_skip(); -+#endif -+} -+ -+void test_pack_cancel__cancel_after_delta_1_threaded(void) -+{ -+#ifdef GIT_THREADS -+ git_packbuilder_set_threads(_packbuilder, 8); -+ fail_stage = GIT_PACKBUILDER_DELTAFICATION; -+ test_cancel(1); -+#else -+ cl_skip(); -+#endif -+} -+ -+static int foreach_cb(void *buf, size_t len, void *payload) -+{ -+ git_indexer *idx = (git_indexer *) payload; -+ cl_git_pass(git_indexer_append(idx, buf, len, &_stats)); -+ return 0; -+} -+ -+void test_pack_cancel__foreach(void) -+{ -+ git_indexer *idx; -+ -+ seed_packbuilder(); -+ -+#ifdef GIT_EXPERIMENTAL_SHA256 -+ cl_git_pass(git_indexer_new(&idx, ".", GIT_OID_SHA1, NULL)); -+#else -+ cl_git_pass(git_indexer_new(&idx, ".", 0, NULL, NULL)); -+#endif -+ -+ cl_git_pass(git_packbuilder_foreach(_packbuilder, foreach_cb, idx)); -+ cl_git_pass(git_indexer_commit(idx, &_stats)); -+ git_indexer_free(idx); -+} -+ -+static int foreach_cancel_cb(void *buf, size_t len, void *payload) -+{ -+ git_indexer *idx = (git_indexer *)payload; -+ cl_git_pass(git_indexer_append(idx, buf, len, &_stats)); -+ return (_stats.total_objects > 2) ? -1111 : 0; -+} -+ -+void test_pack_cancel__foreach_with_cancel(void) -+{ -+ git_indexer *idx; -+ -+ seed_packbuilder(); -+ -+#ifdef GIT_EXPERIMENTAL_SHA256 -+ cl_git_pass(git_indexer_new(&idx, ".", GIT_OID_SHA1, NULL)); -+#else -+ cl_git_pass(git_indexer_new(&idx, ".", 0, NULL, NULL)); -+#endif -+ -+ cl_git_fail_with( -+ git_packbuilder_foreach(_packbuilder, foreach_cancel_cb, idx), -1111); -+ git_indexer_free(idx); -+} -+ -+void test_pack_cancel__keep_file_check(void) -+{ -+ assert(!git_disable_pack_keep_file_checks); -+ cl_git_pass(git_libgit2_opts(GIT_OPT_DISABLE_PACK_KEEP_FILE_CHECKS, true)); -+ assert(git_disable_pack_keep_file_checks); -+} -diff --git a/tests/resources/small.git/HEAD b/tests/resources/small.git/HEAD -new file mode 100644 -index 0000000000000000000000000000000000000000..cb089cd89a7d7686d284d8761201649346b5aa1c -GIT binary patch -literal 23 -ecmXR)O|w!cN=+-)&qz&7Db~+TEG|hc;sO9;xClW2 - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/config b/tests/resources/small.git/config -new file mode 100644 -index 0000000000000000000000000000000000000000..07d359d07cf1ed0c0074fdad71ffff5942f0adfa -GIT binary patch -literal 66 -zcmaz}&M!)h<>D+#Eyypk5{uv*03B5png9R* - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/description b/tests/resources/small.git/description -new file mode 100644 -index 0000000000000000000000000000000000000000..498b267a8c7812490d6479839c5577eaaec79d62 -GIT binary patch -literal 73 -zcmWH|%S+5nO;IRHEyyp$t+PQ$;d2LNXyJgRZve!Elw`VEGWs$&r??@ -Q$yWgB0LrH#Y0~2Y0PnOK(EtDd - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/hooks/applypatch-msg.sample b/tests/resources/small.git/hooks/applypatch-msg.sample -new file mode 100755 -index 0000000000000000000000000000000000000000..dcbf8167fa503f96ff6a39c68409007eadc9b1f3 -GIT binary patch -literal 535 -zcmY+AX;Q;542A#a6e8^~FyI8r&I~hf2QJ{GO6(?HuvEG*+#R{4EI%zhfA8r{j%sh$ -zHE~E-UtQd8{bq4@*S%jq3@bmxwQDXGv#o!N`o3AHMw3xD)hy0#>&E&zzl%vRffomqo=v6>_2NRa#TwDdYvTVQyueO*15Nlo%=#DXgC0bhF3vTa`LQGaO9;jeD$OP?~ -za$G4Q{z+Q_{5V?5h;a-noM$P{<>Q~j4o7u%#P6^o^16{y*jU=-K8GYD_dUtdj4FSx -zSC0C!DvAnv%S!4dgk -XB^)11aoGMJPCqWs%IS0YSv(eBT&%T6 - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/hooks/commit-msg.sample b/tests/resources/small.git/hooks/commit-msg.sample -new file mode 100755 -index 0000000000000000000000000000000000000000..f3780f92349638ebe32f6baf24c7c3027675d7c9 -GIT binary patch -literal 953 -zcmaJy@-{3h^^Cx;#d0zEA@DDc$nY4ez&|=%jTg@_HU*ub=!!y$xW09TSjlj -z(`I@QCsM`!9&80$I98wsQ8yK#)Orb<8re8FjkKh630D$QUDwi~(gkX=RunYm$rDjk -zlp%RUSnzA#6yjdG5?T?2DcYKp+v_lts0ljn&bh3J0bD5@N@1UKZ190O6ZeWr-BuZ^ -zWRebCX%(%=Xoj#(xYk1Cjtr!=tyBesf@m6}8zY6Ijbz9i9ziI_jG9MvR -zDH*e>^ga9IR?2wrSrAVm;eButj4Y>7(E2?b~jsu>& -zRKCJ7bp#19sqYh627wD%D9R$8=Ml$TNlumDypl~$jBu*G>5fIR^FB0h0Ex&TGZNr> -zL5hs1_K>taRb!|ThN9ns7^@4MXKP+6aGI_UK)T-M#rcP$;kN(Vcf#P)+5GzWa{l@J -z>-E{`$1iiNVYxq27}j;uo%;)r3kJI2xCFF~Ux;$Q%) -wjbk6JlDCM`jU&P+UVOvg`|iYl<7~9k>HHB4I;pdlQ=I-^$DrHaN$@lH1?P!0U;qFB - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/hooks/fsmonitor-watchman.sample b/tests/resources/small.git/hooks/fsmonitor-watchman.sample -new file mode 100755 -index 0000000000000000000000000000000000000000..41184ebc318c159f51cd1ebe2290559805df89d8 -GIT binary patch -literal 4777 -zcmbtYYi}F368$Xwipg4lq(BeHMvzvH-4;n7DGJBPqq#tw3aed8+IU5-m)yvL>;Cqh -z8FFRGj$`9CA8aoJ?j^$%==FV``-=rhLcPW`McSytRm~mEO7_&_cAVZrf1fFy*ha@8oe%*-aBYE -zcjzZg>LOkgxuUr-XJnHyD;zmPnRaSc#!k_P*d_BttRdc+J6G7za5#+^Y1nkc2Oowk`ya47uUR3Feu?B(w;S{(VYzxh}q-=#zP@uxSx{wbyPUMFU;K(06)$o{07&3yI?q{GqMcQ1c_^M<0< -zF4acAV)Il-V(rCTC1(;bsZ*}bl8dmejAk~yb`B}!^0;g^(o9kGUfZfDOvyp@x4OQt -zSgWh6T|3eq;9MFs8-#z+FDM1h(IjRUP|``PxupgJ7CUHOH90gbgl^2~97`?_X{P)) -zB*$r1cDlF-%azKND}?Gv`2K8-9v5e`gQoft=j?T<&a13c^!wY_$D`5z-X1g?ty&6- -zQN50{8?bUk9AI->^W@~~nkOghHIC2YN+AXkLQG_2-{Pq3%{`3KUMeG$iIn%%^6*NYb -zn|_BdV#C)n4565VccX;uT8&z3vSi!HXGbUj2B!R -zdz~&#fk#L-&k$fLwo$4?>12g@AXOKFekuo#6EHB%gmpD?1eyh%N8s{2wGoTu -z*@6cEZ^ZW!FAF_|JL`NkV7k}0ow|-2jHwbgH0;c@Dq*o?@&c*HnGdyx6^su8Qk%2{ -z*ye(dxO*6-&>qn1+zw}tc6;=sOX{4WB=VqjTS^))y1jlX2Q;=e!qMmFA5lC$#;BxC -z=Y%tRpWxb+_uQAvAw7Q{HGV#R$xb&udLCzZ+HN?kTyB};1EJ8UlQ5!>5eGW@)RX0n -zkjj>EF!3=0Gl^8dzv$B^NMGRxJoqN4A`xq-@wCbrx*u2NmIJ1xZ%H -zh;{|4T3(!E9sY#Ni(wUJYs1MmIc9bl)(4Nl3_wD_BWB>i<1S(LX7m*{Q7PU$muMS* -zM!%0EZx-Vw=Zey;erC?SNxF;pY@^A%-krqzfLV2meBp1vWdyArFYn`DD19T)Hw(?n -z)}{NP(Lk(o*?gl#B@pP7^*r|=;PIDT4|F#{2Hzh-AL0Rv$6uT;n|WzE4=slK?on@(fZeGhRgQCu56qB -z{+n81Az96qnQjMY*-*r-KV*7;Z#4QuJRJJV$M^KdldiMhj?ImK6~FvwJ*L5a){QoM=L5TYHkGO1$UrO3`a>{?Opw|b -zG(#59NQ#jFL9v~vgOVkM@^^(^A}onOE))yWEwhIlk&{ZyseZ^O0b=w8&O=BK{k<5B -k^Q-B@eG}LeHrquz%(SVEp_N)VhYZikCW__82JXfD17`J9Qvd(} - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/hooks/pre-applypatch.sample b/tests/resources/small.git/hooks/pre-applypatch.sample -new file mode 100755 -index 0000000000000000000000000000000000000000..625837e25f91421b8809a097f4a3103dd387ef31 -GIT binary patch -literal 481 -zcmY+ATTa6;5Jms9iouO45IBJXEg&Jm9@v1LPHMM_ZR|;#6tQh$71hSXq*MxP;V& -zj0cY7SCL=x4`a46sF)C>94Gk%=3q$W2s;j6iHtB2$R0%gix4oK@&T~=ALd_o*CKxt -I-`Pv{1Bpzc>;M1& - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/hooks/pre-commit.sample b/tests/resources/small.git/hooks/pre-commit.sample -new file mode 100755 -index 0000000000000000000000000000000000000000..10b39b2e26981b8f87ea424e735ef87359066dbb -GIT binary patch -literal 1706 -zcmZuxU2ohr5PY_N#pZ0-F<{-v&v-X^RA+u>k}E$4d&uD7=g_fA8+pNNV=4s0|iD3p<=DTXClTS -zXV23tJ;ECmN@M0j@zUAKEYW@3bv!SeYZ8ZH`YQNTApFVNc;F|9r5p4TqGs=>8E?6y -zi|gY{iM#PG1nL?UE9YCnWTk72kgZPG*Usqw!~Qd3c?~@w2?%eg@~)+VlSs6N5Yf2^ -zz;owF#K#r^&KMq1A`oqVGFpD&-!Pv|Rc -zO3KSqA@h9nSc%bm`0)Amk6*J}@14J*1-219l%%7D!Pl}UK>|lVi0Dfgu2jN3WC!uL -z0ej??b2iSehVgdnWHmZV4kUo*QL#aiIp}U=9x)IXk}JJ7VQ;CI9Rtn5e0VcjbYcVt+`x5D+svCGD;Z5hm*E$jSEQZ%SQ(}oLgslTvrKK@9Qf#b!hajVFnp9@oIix;NcI9Wk -xjnh0ya!AWet{I7YpD;y6HXyzI*lfSvH=o6*7mJZPkuaYpm>vzZ`wyGEBtOQPo|pgt - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/hooks/pre-push.sample b/tests/resources/small.git/hooks/pre-push.sample -new file mode 100755 -index 0000000000000000000000000000000000000000..02cbd80c287f959fe33975bb66c56293e3f5b396 -GIT binary patch -literal 1431 -zcmaJ>U60!~5PUX&#a1@z9B{IIZkjLT0t5kq9#8~D(I5{+8&J~9;#ndUk~-ZT`r|uG -z$#K$$J{TsKs*LP1}9!GoZ@4I4myMMG_di|of -z%?llx{O8TS-#^;(OioEmPy%kwWQBA1OMzV{hsQ8XFzS1k!~YQoLa5 -zhtP1fA$q6VmMbbAC_9)4I628k*O5J$NR19uHe4QYDK<==I~SQk)Nu%xQ~KH -z53w=!ke(FGb_PpnZfd*+hnXDTn;2*`u^~;?+5C~cn?bRka7NR%06%e6O91{MAgN6J -zmlO8{Biw4&wr&&(z4p3eln`E}XR9m9bNYZ7Ibrg(4yZIXrfgD7N*AFD7L3YSM#j}% -zo__rOS5fr;@8UM<6cl+cv_$YB$PQ&9dv($eM*))g!_cu!QcSh-mqE9i#QDZT)=o#` -z?8!RtE?w6p?GkGZ-6yt_p~5~4ecu|Sf^)6096%h*q-eNiEA1;Xwg)p~Q&iGSG7-IQ -z9aII&`ps$WOojFA`*bjGkFk|E@sHHuD}W^d`7YJ3YE^zrQnqR -zGoq?;YGKe)93o|_=^f%3U1KYZGPOXRRxK7w`UUbMMa3<86OmVH!EKP$8RCrn9mWX+ -zC?9yF!fRVLmud3hF<}x;;sR}f(*r}6Gap3fR6zLHR~kbMgD{98N`L+r&?3p~*0+FX -zcAL%j=(SO}xTJUTvA`&Lf`2mv4koPG9&|;2+68$XxiXKL@ma;l5d2^5Ba_rPh_DHI-u1#&_upttZXp;no03$20|NFiM -zK#D#xQ>!Z3JkX8T-LDVm!B5j7y_{;JDmmTTef+K1oIiPzeEr+Ai*<2PUgnG4^ZB>p -z_fkAvoR1emuf~ri^K$-px=4#D-vY9w& -z`bCv#2zVn=YnJyeNey(Y -zRh`9vtLw~A+5zsjp|W0Nsa|29Rm!B>OoG5a+vi;ari8O>KkU!KAWg_fa3btK2x*_@ -z0bEc7J;Ubghm}n9bOi(Sv_B66nQ7U)J7f0fO}8Wuf*uorcIgEG -zOHc|-V6+HlRhOP}?Cn?@5iwSl43abmBA^2lyL$+cpabCGVES+v^j^FO_}?FIp%En%Ll?Z*7*}TwrZyg5OSZ9rY-`aU~Mc-jjv{Ll)FLMgtB4ujktfQ`Xhqrka -zT=P!A;9w^;Z?PqpLwOLu=cj3L>TdUKw2;DMu)`oVkj}#bcDx4tYg=j%D`+i{W~fVM -zVmZ>W9VMyin9c-0KzI_;iZ-g|OyzuG`Yq%(%dvl;ifnVr0;jWE&S`z|rQu=!yHBBO -zx`OJ;oOQ(KKM<$(bC38o>pD0%|HA(E0TRw7qj$fJ_pRN+7Nm>dSC(gLg{(`t+5Z=?o+}wXU4tHy+&%F&aRhFebeEhR2R5|$#Ycbp^w@t -zTl%=f1t=w+WpJzF<|CE@?SCNAz)%9?w33lQ8vrHJqPfH9@}qs*QXOG71W=ylx;wOB -zcx!Bj^)Yy6WX$a^vBkBJ5CobqlaDx_B0c<3b+8)f84LCrt;e;qxc+7>VbwVK{skNv!wvBiTa^9Iu -zkwP;VK)jH$WJ{`MRwAA9fal!y0dtV;FWg8PTkWU>CwnqD>1ZX2B@;$DlX%C5MI+}{ -z9xQVnffR*~v2KAUj*hCdgul~`bk#mk`o>zk9)<2Uc8?hUZAEvd!`9em)~$Z)zev>w^8 -zyAgCP_$&Y)7HSQ84`xG}OeTavaEswwF|8Xpi5iZzZa@hCiv(J-%bfFC&)HLlO+Rhw -zG6g?9eL5&A!SuJnQ6}LxG%tU+@vZ`i+!+Rz6iYvsTdhnPo7lW{m-}{hya@viX4)XZ -zngaw+j;gloB#|UwI@8sOmQpc`h+bicQJnQIB5eifIMQNgD2+oai33m!34~xU|0Azj -zhu$8z+T5^;Pxx@d{N)pzOJLSa^e;aDf$W%N5XcOf!mGC9l9j$Ev2h6N+6ZQC+CJzl -zaM7?S!SrFLS2DASjj(h6y1WN3N?|bmqmyzm!&nLoE|`rKBOc_yDF$a#FsUn!IQf(t -zdC&Us(kQz*7mvH^j*^MC@>wTDb}g%~sx*ng#>{@lR=XG-Z5_ -z#<9*Oh0joMzt;nS)ObAp)347`D=}r-;nV!TbIq&xrGRGsF6fZg+!VkfUei@_&l-M& -zPqQ+Dw)RV}+)I8RuqAxa`Pv8e&!_gXS=e2-un>=Ktn}-;%lLZxaVn?Q>yZCb2R3Wk -z77zr%;Rq&h|2ncqyKYmFI0148JVY7Q$V5p=dWj+Qqpu%i|xp2C=WaOb2Wudn^h0EcD%$p9YVU1fnoRV9`(cy(vv6K>FXS!2jY>1GnU--7)4usH&K -zao*&P^@9~YmUe|ZdLW@C>H;!*Vt3>Nw4M*;=?j(TBD#O@XCv0|MEhA;z}kTFRv@`tPHhp=&Yh -zg%Zhg4i7o_k{a5i&f5;tZ==%}^Sn4aD_6%qs_XAuJt&EumdH4Yu`UjT<-+XHTuHss+b -YOmM2;hq8Egm*4=7_P9T{21QBYH*F=mfB*mh - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/hooks/prepare-commit-msg.sample b/tests/resources/small.git/hooks/prepare-commit-msg.sample -new file mode 100755 -index 0000000000000000000000000000000000000000..b1970da1b6d3f42f00069fd17c325de72cda812e -GIT binary patch -literal 1702 -zcmb_cTW{Mo6n>t6#i?x6xmZ$SFLf{QfG*3r0L?Pg?px55l8$UTGO3bO;spKi{V3XX -z))weX0X>M9bNMcZ-6yG%>(n}JI2|25dr}WZBP@ih?JX^+@ -zu#5O48P>yRX(mfDIhYP)doc1&TADZa@ZGpusJ$6G+e$ZMcmC -zoOosDQPS}l{H?YPsq(4;0SGkATa9eeqAaDcjq8n2wALbFwU@2i@FAaRV!=uw-nwx1gKn2SvY -z>Ff>;2sg!+Hxfkwv1lsiii=p6WenF=5)6LZcQaZ=aS_}+-4Y&?!@HWh|<^gJ21!|T@+%On#w6azxPHV}XsRbe*w -zR_TZ2XEsQa1lPK~biYqg@0-RW@5J1@=<87cFzEUABdCoFH2CZo?}l(Z*!OFqUxo>K -z_d`l#4d9|H6;VPT{X?^{VJ>oL|D7K{BJwwqB>`YcPoGk+9hbvHnoQ{EM|kPgD_`wk -zKm4#2xu;-y`RAm!=L_BnLvJ8$AZm8@?)v<%vwvsw8AF2x6!mTT;c72A_~U9nIq0ST -zv)N0!I!^1p=g8-RQfx5)E_Mb_4I2vtQpI30XZ&t-9h5!Hn - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/hooks/push-to-checkout.sample b/tests/resources/small.git/hooks/push-to-checkout.sample -new file mode 100755 -index 0000000000000000000000000000000000000000..a80611e18896f212c390d845e49a3f6d5693b41d -GIT binary patch -literal 2840 -zcmai0U31$u5PXh)#YOS7cE^-rw@uolNhe9&aUS|HtvhX>G$45tVUYj>fRdF?|9kfU -zNR~aG=E)WbEbeyq7JTw}ZuHIE2kUtL<AoeCNptd-NM1aZLhESzC;I`+Ns -zfmNNjdAp^W8#Q*}l>CT7RB9F5(BbI8ly2l~+E};JW|>&d1)=epZ-8vm8ppkbEVn#R -zt30a5A-c(YQR8eM5%;|UAnO>rt!&@x@G@yp+92%w-}%(5P_+P&Wf_zb$f-Qrl5(7z -z2ah(bkE;!DK(&aAMuQ%1TS>ai?wSXCOCSj=_}8x4IbCx^$}9q)whwv)SBt| -zg#MX4;;Oau`m=MI9(^&zPbueY@~>3*ixX%mvR5m_1&nAg@ZKvY1E$O}&EtLiG;mhV -z1xhMIm~fGjmf_#{62f`y;09?I7M1W2tWQvz<}i9lR>OpQyUJi45_&*pQus&EkwY<> -zI|ZAx=*3i9a-)g)hXkvO7>UJ5MNgL(Z+-wpXVcgbSgpmFmbf1~DPA(OVGI&FNLeIE -zNH!_aiH$vsif$_j7=T2{cS(!DOI`~bn@)vSd-0d7xL=DF;UNP|tW}4ih>DvHtu9tY_pbJ6x(6E*hxgC -zzNDao%qlr-IE%YGbS4hF!n!on7#W3$bX-_hbZAaws^nHu#)Dx=WzdbJ>AKzAy@T$x -zSWE^x9+|TEHVEPyaPYa0DOChp?AeHSBBDbZNokQpAY{lE!7geZI=jV)G^2@l)&91Zb1+`T+oq9wWF -zRV~kGTGce0O~p^6mj{kT5kL(pv>r;Lvd7VDX*P>A^Th`$3cWO0L81p4Ysdo3ZP1(SrR-peEdTo;-@bkB((G -zPHYQXUL!@Q$e(OQ;R9r%@Afz+50I7>*^^c&&|E*r-jN)LH=pM4AqMwWxSv|nqjddE -Z4{_hwv8!W(T -zYw`X3V>TCdnSD1ru8&`j=2DIPbCT@SnIgUw>$+lEYP}+x8(BMYnr=iT3*ndq)xzaV -z>I+qjv}vC#8_9M+b1p#uNS0M0)q

    8!3p_LRQ0MA3M`!2foxzRUjbFY@}O~(ki=S -zqscnq8cU*dY)D$$cqE}n)V0yIk>CNKHCrndOtSP*HbOb;nbwAHSb;R+gs^?^Dve%) -zoW}t(*D}$>O3ab0TS^-;J|u&sb-PkZzo#kn*#xYt(;FGuwzSb^g&RDiGcOz9TB;Hu`nJh)$W=C=XCSm2AY=$w3G3P-V#Oo+N*;#2 -z4ijJ-pBZ=;T(RTgp_HYrD!uW-dTMfkuqY5jwOy)~gM;#=P^i{!l7`pXTS^s(&^{RU -zydaw}OpS#^D1cXM8?FW+fh`t7D(g;yr6|}fdaNtZBx3hlK~IpkTu3!Qq%R+zAo#t}Bs8^3$vHD+-TGT@`F>H1Cc#WAVW;&$S6%fE2d6@kLS0g&ihIM{}0z -z8#XhD>b>3{(BH|Px7}&lJ4%y1v(CihZJx@8MPoGdl*BJGD;usf*iS7%;{Joe; -zNFuBa>*~o&qETDPo~u&~$FxE1xb^x&(CbE`Y3GfsibL2rl+L;>P6j&Y3U>K$mkp*6 -zd`Q{<^+^&;GskGjwD-%!boR&i-TCA9UOR|@=GYb5x#+dhd7fkaVIR^pol`Mv+rUbmZ43dVL6^S7g3{NsPiG$iy$5EDB% -z6KIgnb$H(n&t3e4E6d4V7w^B?JS}JkG)PM6+X3Co`SQs($O*AA+MG~{S7RJ=cy-l& -z>~%3y`tjfx2>uOutB_^s -ziwG=e=ch|FQ0IkN91US7rhdQkXhwwt$gU0WEVDjo=IPb+?6PC=s8}J*ua(Ms))`UL -fi$|vMHn?H_tSE3ettp-hLlsZCxaLX8(nU;bVRB;Ce6@s#eu2|WvLz>- -zvy(&>Gyfp@+BtKnpqWkKi^+v{4jn_pNw_zeuxETifiGO|)w}OANj2n2D^K=o3j6P6uOL70#cbA{uzWXDlk1wr9GV1X(2W{RuTvjXV -zCmd8u -zH%V`94=q3)Dk)PHNrnFC(T1)Om6f{Usj;u1R->&XoCYVK2V3ZlgZuF?N}1+33OER*x -z*9Z=L=zI8CN>A_^jYjt0F$psO$sL=38q5q|SG)qCN6{^>RFh5E&l5GZ$pEahnF&d+ -z5c>64t}uJPkf~_!VUj#&N%nC-gUMj%=@B=!V>&}xtj2%@-mOm#rQUSJ3(ccmc+fza -znZ#uxF>N?QN5UrIEd!5RgHEfW#;(nKYF+D<*rdshJ$X-z2OZ2X;)nn@KSVdVhaA?}@3;6gZxb4v -zozoWSr{{+!h}zGpumG3H`=AvWpm^9kW;J$Jp^Xl*?8ckr`fqN%c|Z;VC0|cM4vSrk -zH_O8Yvh85nvJp^;``wo8=z0f`FWg?`>gO#y1hjX1{}rTlg9rwIKia8eyGexA3GnuR -z`Rg~XZoW;0pA)vI8=p5!+6sIn#C^FCvR>ffv39h6SCNi9v);%WD;WZ`of_MgwyRWy -z-yY%n*Y>X89W-v4`Ff%bx$Vkn}$!Ay}rnY6F$m-Kg*KD_+;Lx#g4|^&N -I02NaX#p`nv=Kufz - -literal 0 -HcmV?d00001 - -diff --git a/tests/resources/small.git/objects/af/5626b4a114abcb82d63db7c8082c3c4756e51b b/tests/resources/small.git/objects/af/5626b4a114abcb82d63db7c8082c3c4756e51b -new file mode 100644 -index 0000000000000000000000000000000000000000..822bc151862ec3763cf2d3fa2372b93bbd3a4b65 -GIT binary patch -literal 30 -mcmb>0i}&W3IZ_@1U=^!a~EV1casc=c+{&un1qQN*i9hD|0|m(2n|iwp*q%W -z%N;b$hu%cM`$TMo*~EnC1BFP&Pfj~;jZVKXQ96s_PhV<-XAROi+@-v8dBLUa`!;GB -k^iXlEv8$>R)1G>9th&t3j;s7J{?^9n|7U^`%mXoWC24Q^m!3%@{ - -literal 0 -HcmV?d00001 - diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index b013190f97a..5ff760a6143 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -714,7 +714,7 @@ EOF place_channel_configuration() { if [ -z "${NIX_INSTALLER_NO_CHANNEL_ADD:-}" ]; then - echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > "$SCRATCH/.nix-channels" + echo "https://channels.nixos.org/nixpkgs-unstable nixpkgs" > "$SCRATCH/.nix-channels" _sudo "to set up the default system channel (part 1)" \ install -m 0644 "$SCRATCH/.nix-channels" "$ROOT_HOME/.nix-channels" fi diff --git a/scripts/install-nix-from-tarball.sh b/scripts/install-nix-from-tarball.sh index fd00460ec25..f17e4c2af3b 100644 --- a/scripts/install-nix-from-tarball.sh +++ b/scripts/install-nix-from-tarball.sh @@ -213,7 +213,7 @@ fi # Subscribe the user to the Nixpkgs channel and fetch it. if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then if ! "$nix/bin/nix-channel" --list | grep -q "^nixpkgs "; then - "$nix/bin/nix-channel" --add https://nixos.org/channels/nixpkgs-unstable + "$nix/bin/nix-channel" --add https://channels.nixos.org/nixpkgs-unstable fi if [ -z "$_NIX_INSTALLER_TEST" ]; then if ! "$nix/bin/nix-channel" --update nixpkgs; then diff --git a/src/external-api-docs/README.md b/src/external-api-docs/README.md index 8760ac88b4c..1940cc1c0fc 100644 --- a/src/external-api-docs/README.md +++ b/src/external-api-docs/README.md @@ -15,7 +15,7 @@ programmatically: 1. Embedding the evaluator 2. Writing language plug-ins -Embedding means you link the Nix C libraries in your program and use them from +Embedding means you link the Nix C API libraries in your program and use them from there. Adding a plug-in means you make a library that gets loaded by the Nix language evaluator, specified through a configuration option. diff --git a/src/json-schema-checks/.version b/src/json-schema-checks/.version new file mode 120000 index 00000000000..b7badcd0cc8 --- /dev/null +++ b/src/json-schema-checks/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/src/json-schema-checks/build-result b/src/json-schema-checks/build-result new file mode 120000 index 00000000000..8010d0fddd3 --- /dev/null +++ b/src/json-schema-checks/build-result @@ -0,0 +1 @@ +../../src/libstore-tests/data/build-result \ No newline at end of file diff --git a/src/json-schema-checks/build-trace-entry b/src/json-schema-checks/build-trace-entry new file mode 120000 index 00000000000..9175e750eaf --- /dev/null +++ b/src/json-schema-checks/build-trace-entry @@ -0,0 +1 @@ +../../src/libstore-tests/data/realisation \ No newline at end of file diff --git a/src/json-schema-checks/content-address b/src/json-schema-checks/content-address new file mode 120000 index 00000000000..194a265a1f7 --- /dev/null +++ b/src/json-schema-checks/content-address @@ -0,0 +1 @@ +../../src/libstore-tests/data/content-address \ No newline at end of file diff --git a/src/json-schema-checks/derivation b/src/json-schema-checks/derivation new file mode 120000 index 00000000000..3dc1cbe0670 --- /dev/null +++ b/src/json-schema-checks/derivation @@ -0,0 +1 @@ +../../src/libstore-tests/data/derivation \ No newline at end of file diff --git a/src/json-schema-checks/derivation-options b/src/json-schema-checks/derivation-options new file mode 120000 index 00000000000..00c6cde65c5 --- /dev/null +++ b/src/json-schema-checks/derivation-options @@ -0,0 +1 @@ +../libstore-tests/data/derivation \ No newline at end of file diff --git a/src/json-schema-checks/deriving-path b/src/json-schema-checks/deriving-path new file mode 120000 index 00000000000..4f50b2ee915 --- /dev/null +++ b/src/json-schema-checks/deriving-path @@ -0,0 +1 @@ +../../src/libstore-tests/data/derived-path \ No newline at end of file diff --git a/src/json-schema-checks/file-system-object b/src/json-schema-checks/file-system-object new file mode 120000 index 00000000000..b26e030c9d3 --- /dev/null +++ b/src/json-schema-checks/file-system-object @@ -0,0 +1 @@ +../../src/libutil-tests/data/memory-source-accessor \ No newline at end of file diff --git a/src/json-schema-checks/hash b/src/json-schema-checks/hash new file mode 120000 index 00000000000..d6876387963 --- /dev/null +++ b/src/json-schema-checks/hash @@ -0,0 +1 @@ +../../src/libutil-tests/data/hash \ No newline at end of file diff --git a/src/json-schema-checks/meson.build b/src/json-schema-checks/meson.build new file mode 100644 index 00000000000..20acfe41102 --- /dev/null +++ b/src/json-schema-checks/meson.build @@ -0,0 +1,253 @@ +# Run with: +# meson test --suite json-schema +# Run with: (without shell / configure) +# nix build .#nix-json-schema-checks + +project( + 'nix-json-schema-checks', + version : files('.version'), + meson_version : '>= 1.1', + license : 'LGPL-2.1-or-later', +) + +fs = import('fs') + +# Note: The 'jsonschema' package provides the 'jv' command +jv = find_program('jv', required : true) + +# The schema directory is a committed symlink to the actual schema location +schema_dir = meson.current_source_dir() / 'schema' + +# Get all example files +schemas = [ + { + 'stem' : 'file-system-object', + 'schema' : schema_dir / 'file-system-object-v1.yaml', + 'files' : [ + 'simple.json', + 'complex.json', + ], + }, + { + 'stem' : 'hash', + 'schema' : schema_dir / 'hash-v1.yaml', + 'files' : [ + 'sha256.json', + 'blake3.json', + ], + }, + { + 'stem' : 'content-address', + 'schema' : schema_dir / 'content-address-v1.yaml', + 'files' : [ + 'text.json', + 'nar.json', + ], + }, + { + 'stem' : 'store-path', + 'schema' : schema_dir / 'store-path-v1.yaml', + 'files' : [ + 'simple.json', + ], + }, + { + 'stem' : 'deriving-path', + 'schema' : schema_dir / 'deriving-path-v1.yaml', + 'files' : [ + 'single_opaque.json', + 'single_built.json', + 'single_built_built.json', + ], + }, + { + 'stem' : 'build-trace-entry', + 'schema' : schema_dir / 'build-trace-entry-v1.yaml', + 'files' : [ + 'simple.json', + 'with-dependent-realisations.json', + 'with-signature.json', + ], + }, + { + 'stem' : 'derivation-options', + 'schema' : schema_dir / 'derivation-options-v1.yaml', + 'files' : [ + 'ia' / 'derivation-options' / 'defaults.json', + 'ia' / 'derivation-options' / 'all_set.json', + 'ia' / 'derivation-options' / 'structuredAttrs_defaults.json', + 'ia' / 'derivation-options' / 'structuredAttrs_all_set.json', + 'ca' / 'derivation-options' / 'all_set.json', + 'ca' / 'derivation-options' / 'structuredAttrs_all_set.json', + ], + }, +] + +# Derivation and Derivation output +schemas += [ + # Match overall + { + 'stem' : 'derivation', + 'schema' : schema_dir / 'derivation-v4.yaml', + 'files' : [ + 'dyn-dep-derivation.json', + 'simple-derivation.json', + ], + }, + { + 'stem' : 'derivation', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/overall', + 'files' : [ + 'output-caFixedFlat.json', + 'output-caFixedNAR.json', + 'output-caFixedText.json', + 'output-caFloating.json', + 'output-deferred.json', + 'output-impure.json', + 'output-inputAddressed.json', + ], + }, + # Match exact variant + { + 'stem' : 'derivation', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/inputAddressed', + 'files' : [ + 'output-inputAddressed.json', + ], + }, + { + 'stem' : 'derivation', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/caFixed', + 'files' : [ + 'output-caFixedFlat.json', + 'output-caFixedNAR.json', + 'output-caFixedText.json', + ], + }, + { + 'stem' : 'derivation', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/caFloating', + 'files' : [ + 'output-caFloating.json', + ], + }, + { + 'stem' : 'derivation', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/deferred', + 'files' : [ + 'output-deferred.json', + ], + }, + { + 'stem' : 'derivation', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/impure', + 'files' : [ + 'output-impure.json', + ], + }, +] + +# Store object info +schemas += [ + # Match overall + { + 'stem' : 'store-object-info', + 'schema' : schema_dir / 'store-object-info-v2.yaml', + 'files' : [ + 'json-2' / 'pure.json', + 'json-2' / 'impure.json', + 'json-2' / 'empty_pure.json', + 'json-2' / 'empty_impure.json', + ], + }, + { + 'stem' : 'nar-info', + 'schema' : schema_dir / 'store-object-info-v2.yaml', + 'files' : [ + 'json-2' / 'pure.json', + 'json-2' / 'impure.json', + ], + }, + { + 'stem' : 'build-result', + 'schema' : schema_dir / 'build-result-v1.yaml', + 'files' : [ + 'success.json', + 'output-rejected.json', + 'not-deterministic.json', + ], + }, + # Match exact variant + { + 'stem' : 'store-object-info', + 'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/base', + 'files' : [ + 'json-2' / 'pure.json', + 'json-2' / 'empty_pure.json', + ], + }, + { + 'stem' : 'store-object-info', + 'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/impure', + 'files' : [ + 'json-2' / 'impure.json', + 'json-2' / 'empty_impure.json', + ], + }, + { + 'stem' : 'nar-info', + 'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/base', + 'files' : [ + 'json-2' / 'pure.json', + ], + }, + { + 'stem' : 'nar-info', + 'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/narInfo', + 'files' : [ + 'json-2' / 'impure.json', + ], + }, +] + +# Dummy store +schemas += [ + { + 'stem' : 'store', + 'schema' : schema_dir / 'store-v1.yaml', + 'files' : [ + 'empty.json', + 'one-flat-file.json', + 'one-derivation.json', + ], + }, +] + +# Validate each example against the schema +foreach schema : schemas + stem = schema['stem'] + schema_file = schema['schema'] + if '#' not in schema_file + # Validate the schema itself against JSON Schema Draft 04 + test( + stem + '-schema-valid', + jv, + args : [ + 'http://json-schema.org/draft-04/schema', + schema_file, + ], + suite : 'json-schema', + ) + endif + foreach example : schema['files'] + test( + stem + '-example-' + fs.stem(example), + jv, + args : [ + schema_file, + files(stem / example), + ], + suite : 'json-schema', + ) + endforeach +endforeach diff --git a/src/json-schema-checks/nar-info b/src/json-schema-checks/nar-info new file mode 120000 index 00000000000..0ba4d5870e9 --- /dev/null +++ b/src/json-schema-checks/nar-info @@ -0,0 +1 @@ +../../src/libstore-tests/data/nar-info \ No newline at end of file diff --git a/src/json-schema-checks/package.nix b/src/json-schema-checks/package.nix new file mode 100644 index 00000000000..a5ee1f0592f --- /dev/null +++ b/src/json-schema-checks/package.nix @@ -0,0 +1,56 @@ +# Run with: nix build .#nix-json-schema-checks +{ + lib, + mkMesonDerivation, + + meson, + ninja, + jsonschema, + + # Configuration Options + + version, +}: + +mkMesonDerivation (finalAttrs: { + pname = "nix-json-schema-checks"; + inherit version; + + workDir = ./.; + fileset = lib.fileset.unions [ + ../../.version + ../../doc/manual/source/protocols/json/schema + ../../src/libutil-tests/data/memory-source-accessor + ../../src/libutil-tests/data/hash + ../../src/libstore-tests/data/content-address + ../../src/libstore-tests/data/store-path + ../../src/libstore-tests/data/realisation + ../../src/libstore-tests/data/derivation + ../../src/libstore-tests/data/derived-path + ../../src/libstore-tests/data/path-info + ../../src/libstore-tests/data/nar-info + ../../src/libstore-tests/data/build-result + ../../src/libstore-tests/data/dummy-store + ./. + ]; + + outputs = [ "out" ]; + + nativeBuildInputs = [ + meson + ninja + jsonschema + ]; + + doCheck = true; + + mesonCheckFlags = [ "--print-errorlogs" ]; + + postInstall = '' + touch $out + ''; + + meta = { + platforms = lib.platforms.all; + }; +}) diff --git a/src/json-schema-checks/schema b/src/json-schema-checks/schema new file mode 120000 index 00000000000..473e47b1bb1 --- /dev/null +++ b/src/json-schema-checks/schema @@ -0,0 +1 @@ +../../doc/manual/source/protocols/json/schema \ No newline at end of file diff --git a/src/json-schema-checks/store b/src/json-schema-checks/store new file mode 120000 index 00000000000..442f0749a83 --- /dev/null +++ b/src/json-schema-checks/store @@ -0,0 +1 @@ +../../src/libstore-tests/data/dummy-store \ No newline at end of file diff --git a/src/json-schema-checks/store-object-info b/src/json-schema-checks/store-object-info new file mode 120000 index 00000000000..a3c9e07c4f8 --- /dev/null +++ b/src/json-schema-checks/store-object-info @@ -0,0 +1 @@ +../../src/libstore-tests/data/path-info \ No newline at end of file diff --git a/src/json-schema-checks/store-path b/src/json-schema-checks/store-path new file mode 120000 index 00000000000..003b1dbbb03 --- /dev/null +++ b/src/json-schema-checks/store-path @@ -0,0 +1 @@ +../../src/libstore-tests/data/store-path \ No newline at end of file diff --git a/src/kaitai-struct-checks/.version b/src/kaitai-struct-checks/.version new file mode 120000 index 00000000000..b7badcd0cc8 --- /dev/null +++ b/src/kaitai-struct-checks/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/src/kaitai-struct-checks/meson.build b/src/kaitai-struct-checks/meson.build new file mode 100644 index 00000000000..f705a6744c0 --- /dev/null +++ b/src/kaitai-struct-checks/meson.build @@ -0,0 +1,77 @@ +# Run with: +# meson test --suite kaitai-struct +# Run with: (without shell / configure) +# nix build .#nix-kaitai-struct-checks + +project( + 'nix-kaitai-struct-checks', + 'cpp', + version : files('.version'), + default_options : [ + 'cpp_std=c++23', + # TODO(Qyriad): increase the warning level + 'warning_level=1', + 'errorlogs=true', # Please print logs for tests that fail + ], + meson_version : '>= 1.1', + license : 'LGPL-2.1-or-later', +) + +kaitai_runtime_dep = dependency('kaitai-struct-cpp-stl-runtime', required : true) +gtest_dep = dependency('gtest') +gtest_main_dep = dependency('gtest_main', required : true) + +# Find the Kaitai Struct compiler +ksc = find_program('ksc', required : true) + +kaitai_generated_srcs = custom_target( + 'kaitai-generated-sources', + input : [ 'nar.ksy' ], + output : [ 'nix_nar.cpp', 'nix_nar.h' ], + command : [ + ksc, + '@INPUT@', + '--target', 'cpp_stl', + '--outdir', + meson.current_build_dir(), + ], +) + +nar_kaitai_lib = library( + 'nix-nar-kaitai-lib', + kaitai_generated_srcs, + dependencies : [ kaitai_runtime_dep ], + install : true, +) + +nar_kaitai_dep = declare_dependency( + link_with : nar_kaitai_lib, + sources : kaitai_generated_srcs[1], +) + +# The nar directory is a committed symlink to the actual nars location +nars_dir = meson.current_source_dir() / 'nars' + +# Get all example files +nars = [ + 'dot.nar', +] + +test_deps = [ + nar_kaitai_dep, + kaitai_runtime_dep, + gtest_main_dep, +] + +this_exe = executable( + meson.project_name(), + 'test-parse-nar.cc', + dependencies : test_deps, +) + +test( + meson.project_name(), + this_exe, + env : [ 'NIX_NARS_DIR=' + nars_dir ], + protocol : 'gtest', +) diff --git a/src/kaitai-struct-checks/nar.ksy b/src/kaitai-struct-checks/nar.ksy new file mode 120000 index 00000000000..c3a79a3b656 --- /dev/null +++ b/src/kaitai-struct-checks/nar.ksy @@ -0,0 +1 @@ +../../doc/manual/source/protocols/nix-archive/nar.ksy \ No newline at end of file diff --git a/src/kaitai-struct-checks/nars b/src/kaitai-struct-checks/nars new file mode 120000 index 00000000000..ed0b4ecc75b --- /dev/null +++ b/src/kaitai-struct-checks/nars @@ -0,0 +1 @@ +../libutil-tests/data/nars \ No newline at end of file diff --git a/src/kaitai-struct-checks/nix-meson-build-support b/src/kaitai-struct-checks/nix-meson-build-support new file mode 120000 index 00000000000..0b140f56bde --- /dev/null +++ b/src/kaitai-struct-checks/nix-meson-build-support @@ -0,0 +1 @@ +../../nix-meson-build-support \ No newline at end of file diff --git a/src/kaitai-struct-checks/package.nix b/src/kaitai-struct-checks/package.nix new file mode 100644 index 00000000000..4257ceb7681 --- /dev/null +++ b/src/kaitai-struct-checks/package.nix @@ -0,0 +1,70 @@ +# Run with: nix build .#nix-kaitai-struct-checks +# or: `nix develop .#nix-kaitai-struct-checks` to enter a dev shell +{ + lib, + mkMesonDerivation, + gtest, + meson, + ninja, + pkg-config, + kaitai-struct-compiler, + fetchzip, + kaitai-struct-cpp-stl-runtime, + # Configuration Options + version, +}: +let + inherit (lib) fileset; +in +mkMesonDerivation (finalAttrs: { + pname = "nix-kaitai-struct-checks"; + inherit version; + + workDir = ./.; + fileset = lib.fileset.unions [ + ../../nix-meson-build-support + ./nix-meson-build-support + ./.version + ../../.version + ../../doc/manual/source/protocols/nix-archive/nar.ksy + ./nars + ../../src/libutil-tests/data + ./meson.build + ./nar.ksy + (fileset.fileFilter (file: file.hasExt "cc") ./.) + (fileset.fileFilter (file: file.hasExt "hh") ./.) + ]; + + outputs = [ "out" ]; + + buildInputs = [ + gtest + kaitai-struct-cpp-stl-runtime + ]; + + nativeBuildInputs = [ + meson + ninja + pkg-config + # This can go away when we bump up to 25.11 + (kaitai-struct-compiler.overrideAttrs (finalAttrs: { + version = "0.11"; + src = fetchzip { + url = "https://github.com/kaitai-io/kaitai_struct_compiler/releases/download/${version}/kaitai-struct-compiler-${version}.zip"; + sha256 = "sha256-j9TEilijqgIiD0GbJfGKkU1FLio9aTopIi1v8QT1b+A="; + }; + })) + ]; + + doCheck = true; + + mesonCheckFlags = [ "--print-errorlogs" ]; + + postInstall = '' + touch $out + ''; + + meta = { + platforms = lib.platforms.unix; + }; +}) diff --git a/src/kaitai-struct-checks/test-parse-nar.cc b/src/kaitai-struct-checks/test-parse-nar.cc new file mode 100644 index 00000000000..456ffb12741 --- /dev/null +++ b/src/kaitai-struct-checks/test-parse-nar.cc @@ -0,0 +1,48 @@ +#include +#include +#include +#include +#include + +#include + +#include +#include +#include + +#include "nix_nar.h" + +static const std::vector NarFiles = { + "empty.nar", + "dot.nar", + "dotdot.nar", + "executable-after-contents.nar", + "invalid-tag-instead-of-contents.nar", + "name-after-node.nar", + "nul-character.nar", + "slash.nar", +}; + +class NarParseTest : public ::testing::TestWithParam +{}; + +TEST_P(NarParseTest, ParseSucceeds) +{ + const auto nar_file = GetParam(); + + const char * nars_dir_env = std::getenv("NIX_NARS_DIR"); + if (nars_dir_env == nullptr) { + FAIL() << "NIX_NARS_DIR environment variable not set."; + } + + const std::filesystem::path nar_file_path = std::filesystem::path(nars_dir_env) / "dot.nar"; + ASSERT_TRUE(std::filesystem::exists(nar_file_path)) << "Missing test file: " << nar_file_path; + + std::ifstream ifs(nar_file_path, std::ifstream::binary); + ASSERT_TRUE(ifs.is_open()) << "Failed to open file: " << nar_file; + kaitai::kstream ks(&ifs); + nix_nar_t nar(&ks); + ASSERT_TRUE(nar.root_node() != nullptr) << "Failed to parse NAR file: " << nar_file; +} + +INSTANTIATE_TEST_SUITE_P(AllNarFiles, NarParseTest, ::testing::ValuesIn(NarFiles)); diff --git a/src/libcmd/built-path.cc b/src/libcmd/built-path.cc index 4d76dd6da39..fc7f1849384 100644 --- a/src/libcmd/built-path.cc +++ b/src/libcmd/built-path.cc @@ -117,10 +117,11 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const "the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)", store.printStorePath(p.drvPath->outPath()), outputName); - auto thisRealisation = store.queryRealisation(DrvOutput{*drvOutput, outputName}); + DrvOutput key{*drvOutput, outputName}; + auto thisRealisation = store.queryRealisation(key); assert(thisRealisation); // We’ve built it, so we must // have the realisation - res.insert(*thisRealisation); + res.insert(Realisation{*thisRealisation, std::move(key)}); } else { res.insert(outputPath); } diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index 077381eee43..226b65f4a7c 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -2,6 +2,7 @@ #include #include "nix/cmd/command.hh" +#include "nix/cmd/legacy.hh" #include "nix/cmd/markdown.hh" #include "nix/store/store-open.hh" #include "nix/store/local-fs-store.hh" @@ -14,6 +15,18 @@ namespace nix { +RegisterCommand::Commands & RegisterCommand::commands() +{ + static RegisterCommand::Commands commands; + return commands; +} + +RegisterLegacyCommand::Commands & RegisterLegacyCommand::commands() +{ + static RegisterLegacyCommand::Commands commands; + return commands; +} + nix::Commands RegisterCommand::getCommandsFor(const std::vector & prefix) { nix::Commands res; @@ -291,7 +304,7 @@ void MixProfile::updateProfile(const BuiltPaths & buildables) MixDefaultProfile::MixDefaultProfile() { - profile = getDefaultProfile(); + profile = getDefaultProfile().string(); } MixEnvironment::MixEnvironment() @@ -385,7 +398,7 @@ void createOutLinks(const std::filesystem::path & outLink, const BuiltPaths & bu auto symlink = outLink; if (i) symlink += fmt("-%d", i); - store.addPermRoot(bo.path, absPath(symlink.string())); + store.addPermRoot(bo.path, absPath(symlink).string()); }, [&](const BuiltPath::Built & bfd) { for (auto & output : bfd.outputs) { @@ -394,7 +407,7 @@ void createOutLinks(const std::filesystem::path & outLink, const BuiltPaths & bu symlink += fmt("-%d", i); if (output.first != "out") symlink += fmt("-%s", output.first); - store.addPermRoot(output.second, absPath(symlink.string())); + store.addPermRoot(output.second, absPath(symlink).string()); } }, }, diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index 39e7139f945..865901febf4 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -29,7 +29,7 @@ EvalSettings evalSettings{ auto flakeRef = parseFlakeRef(fetchSettings, std::string{rest}, {}, true, false); debug("fetching flake search path element '%s''", rest); auto [accessor, lockedRef] = - flakeRef.resolve(fetchSettings, state.store).lazyFetch(fetchSettings, state.store); + flakeRef.resolve(fetchSettings, *state.store).lazyFetch(fetchSettings, *state.store); auto storePath = nix::fetchToStore( state.fetchSettings, *state.store, @@ -160,29 +160,30 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) state.parseExprFromString( arg.expr, compatibilitySettings.nixShellShebangArgumentsRelativeToScript - ? state.rootPath(absPath(getCommandBaseDir())) + ? state.rootPath(absPath(getCommandBaseDir()).string()) : state.rootPath("."))); }, - [&](const AutoArgString & arg) { v->mkString(arg.s); }, - [&](const AutoArgFile & arg) { v->mkString(readFile(arg.path.string())); }, - [&](const AutoArgStdin & arg) { v->mkString(readFile(STDIN_FILENO)); }}, + [&](const AutoArgString & arg) { v->mkString(arg.s, state.mem); }, + [&](const AutoArgFile & arg) { v->mkString(readFile(arg.path.string()), state.mem); }, + [&](const AutoArgStdin & arg) { v->mkString(readFile(STDIN_FILENO), state.mem); }}, arg); res.insert(state.symbols.create(name), v); } return res.finish(); } -SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir) +SourcePath lookupFileArg(EvalState & state, std::string_view s, const std::filesystem::path * baseDir) { if (EvalSettings::isPseudoUrl(s)) { - auto accessor = fetchers::downloadTarball(state.store, state.fetchSettings, EvalSettings::resolvePseudoUrl(s)); + auto accessor = fetchers::downloadTarball(*state.store, state.fetchSettings, EvalSettings::resolvePseudoUrl(s)); auto storePath = fetchToStore(state.fetchSettings, *state.store, SourcePath(accessor), FetchMode::Copy); return state.storePath(storePath); } else if (hasPrefix(s, "flake:")) { auto flakeRef = parseFlakeRef(fetchSettings, std::string(s.substr(6)), {}, true, false); - auto [accessor, lockedRef] = flakeRef.resolve(fetchSettings, state.store).lazyFetch(fetchSettings, state.store); + auto [accessor, lockedRef] = + flakeRef.resolve(fetchSettings, *state.store).lazyFetch(fetchSettings, *state.store); auto storePath = nix::fetchToStore( state.fetchSettings, *state.store, SourcePath(accessor), FetchMode::Copy, lockedRef.input.getName()); state.allowPath(storePath); @@ -190,12 +191,13 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * bas } else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { - Path p(s.substr(1, s.size() - 2)); + // Should perhaps be a `CanonPath`? + std::string p(s.substr(1, s.size() - 2)); return state.findFile(p); } else - return state.rootPath(baseDir ? absPath(s, *baseDir) : absPath(s)); + return state.rootPath(absPath(std::filesystem::path{s}, baseDir).string()); } } // namespace nix diff --git a/src/libcmd/include/nix/cmd/command.hh b/src/libcmd/include/nix/cmd/command.hh index bd5786fcdea..c860f019260 100644 --- a/src/libcmd/include/nix/cmd/command.hh +++ b/src/libcmd/include/nix/cmd/command.hh @@ -134,7 +134,7 @@ struct MixFlakeOptions : virtual Args, EvalCommand struct SourceExprCommand : virtual Args, MixFlakeOptions { - std::optional file; + std::optional file; std::optional expr; SourceExprCommand(); @@ -288,11 +288,7 @@ struct RegisterCommand { typedef std::map, std::function()>> Commands; - static Commands & commands() - { - static Commands commands; - return commands; - } + static Commands & commands(); RegisterCommand(std::vector && name, std::function()> command) { @@ -316,7 +312,7 @@ static RegisterCommand registerCommand2(std::vector && name) struct MixProfile : virtual StoreCommand { - std::optional profile; + std::optional profile; MixProfile(); diff --git a/src/libcmd/include/nix/cmd/common-eval-args.hh b/src/libcmd/include/nix/cmd/common-eval-args.hh index dd8c34c1d9c..4f9ebb83df5 100644 --- a/src/libcmd/include/nix/cmd/common-eval-args.hh +++ b/src/libcmd/include/nix/cmd/common-eval-args.hh @@ -81,6 +81,6 @@ private: /** * @param baseDir Optional [base directory](https://nix.dev/manual/nix/development/glossary#gloss-base-directory) */ -SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr); +SourcePath lookupFileArg(EvalState & state, std::string_view s, const std::filesystem::path * baseDir = nullptr); } // namespace nix diff --git a/src/libcmd/include/nix/cmd/installable-flake.hh b/src/libcmd/include/nix/cmd/installable-flake.hh index 935ea87799d..9f449ad48f2 100644 --- a/src/libcmd/include/nix/cmd/installable-flake.hh +++ b/src/libcmd/include/nix/cmd/installable-flake.hh @@ -69,7 +69,7 @@ struct InstallableFlake : InstallableValue */ std::vector> getCursors(EvalState & state) override; - std::shared_ptr getLockedFlake() const; + ref getLockedFlake() const; FlakeRef nixpkgsFlakeRef() const; }; @@ -87,6 +87,4 @@ static inline FlakeRef defaultNixpkgsFlakeRef() return FlakeRef::fromAttrs(fetchSettings, {{"type", "indirect"}, {"id", "nixpkgs"}}); } -ref openEvalCache(EvalState & state, std::shared_ptr lockedFlake); - } // namespace nix diff --git a/src/libcmd/include/nix/cmd/installable-value.hh b/src/libcmd/include/nix/cmd/installable-value.hh index 3521a415479..27a1fb9815d 100644 --- a/src/libcmd/include/nix/cmd/installable-value.hh +++ b/src/libcmd/include/nix/cmd/installable-value.hh @@ -17,7 +17,7 @@ class AttrCursor; struct App { std::vector context; - Path program; + std::filesystem::path program; // FIXME: add args, sandbox settings, metadata, ... }; diff --git a/src/libcmd/include/nix/cmd/legacy.hh b/src/libcmd/include/nix/cmd/legacy.hh index 54605718403..d408cde7ac4 100644 --- a/src/libcmd/include/nix/cmd/legacy.hh +++ b/src/libcmd/include/nix/cmd/legacy.hh @@ -13,11 +13,7 @@ struct RegisterLegacyCommand { typedef std::map Commands; - static Commands & commands() - { - static Commands commands; - return commands; - } + static Commands & commands(); RegisterLegacyCommand(const std::string & name, MainFunction fun) { diff --git a/src/libcmd/include/nix/cmd/repl.hh b/src/libcmd/include/nix/cmd/repl.hh index a2c905f86c4..b72a9b7d1d7 100644 --- a/src/libcmd/include/nix/cmd/repl.hh +++ b/src/libcmd/include/nix/cmd/repl.hh @@ -19,7 +19,16 @@ struct AbstractNixRepl typedef std::vector> AnnotatedValues; - using RunNix = void(Path program, const Strings & args, const std::optional & input); + /** + * Run a nix executable + * + * @todo this is a layer violation + * + * @param programName Name of the command, e.g. `nix` or `nix-env`. + * @param args aguments to the command. + */ + using RunNix = + void(const std::string & programName, const Strings & args, const std::optional & input); /** * @param runNix Function to run the nix CLI to support various diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index 8ac80806235..70267a65c09 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -172,7 +172,7 @@ std::vector> InstallableFlake::getCursors(EvalState for (auto & attrPath : attrPaths) { debug("trying flake output attribute '%s'", attrPath); - auto attr = root->findAlongAttrPath(parseAttrPath(state, attrPath)); + auto attr = root->findAlongAttrPath(AttrPath::parse(state, attrPath)); if (attr) { res.push_back(ref(*attr)); } else { @@ -186,16 +186,16 @@ std::vector> InstallableFlake::getCursors(EvalState return res; } -std::shared_ptr InstallableFlake::getLockedFlake() const +ref InstallableFlake::getLockedFlake() const { if (!_lockedFlake) { flake::LockFlags lockFlagsApplyConfig = lockFlags; // FIXME why this side effect? lockFlagsApplyConfig.applyNixConfig = true; - _lockedFlake = - std::make_shared(lockFlake(flakeSettings, *state, flakeRef, lockFlagsApplyConfig)); + _lockedFlake = make_ref(lockFlake(flakeSettings, *state, flakeRef, lockFlagsApplyConfig)); } - return _lockedFlake; + // _lockedFlake is now non-null but still just a shared_ptr + return ref(_lockedFlake); } FlakeRef InstallableFlake::nixpkgsFlakeRef() const diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 8293254db08..be64a429297 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -133,7 +133,7 @@ MixFlakeOptions::MixFlakeOptions() lockFlags.writeLockFile = false; lockFlags.inputOverrides.insert_or_assign( flake::parseInputAttrPath(inputAttrPath), - parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir()), true)); + parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir()).string(), true)); }}, .completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) { if (n == 0) { @@ -174,7 +174,7 @@ MixFlakeOptions::MixFlakeOptions() auto flake = flake::lockFlake( flakeSettings, *evalState, - parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir())), + parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir()).string()), {.writeLockFile = false}); for (auto & [inputName, input] : flake.lockFile.root->inputs) { auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes @@ -264,7 +264,7 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s evalSettings.pureEval = false; auto state = getEvalState(); - auto e = state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, *file))); + auto e = state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, file->string()))); Value root; state->eval(e, root); @@ -343,8 +343,7 @@ void completeFlakeRefWithFragment( parseFlakeRef(fetchSettings, expandTilde(flakeRefS), std::filesystem::current_path().string()); auto evalCache = openEvalCache( - *evalState, - std::make_shared(lockFlake(flakeSettings, *evalState, flakeRef, lockFlags))); + *evalState, make_ref(lockFlake(flakeSettings, *evalState, flakeRef, lockFlags))); auto root = evalCache->getRoot(); @@ -357,9 +356,9 @@ void completeFlakeRefWithFragment( attrPathPrefixes.push_back(""); for (auto & attrPathPrefixS : attrPathPrefixes) { - auto attrPathPrefix = parseAttrPath(*evalState, attrPathPrefixS); + auto attrPathPrefix = AttrPath::parse(*evalState, attrPathPrefixS); auto attrPathS = attrPathPrefixS + std::string(fragment); - auto attrPath = parseAttrPath(*evalState, attrPathS); + auto attrPath = AttrPath::parse(*evalState, attrPathS); std::string lastAttr; if (!attrPath.empty() && !hasSuffix(attrPathS, ".")) { @@ -377,9 +376,7 @@ void completeFlakeRefWithFragment( /* Strip the attrpath prefix. */ attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size()); // FIXME: handle names with dots - completions.add( - flakeRefS + "#" + prefixRoot - + concatStringsSep(".", evalState->symbols.resolve(attrPath2))); + completions.add(flakeRefS + "#" + prefixRoot + attrPath2.to_string(*evalState)); } } } @@ -388,7 +385,7 @@ void completeFlakeRefWithFragment( attrpaths. */ if (fragment.empty()) { for (auto & attrPath : defaultFlakeAttrPaths) { - auto attr = root->findAlongAttrPath(parseAttrPath(*evalState, attrPath)); + auto attr = root->findAlongAttrPath(AttrPath::parse(*evalState, attrPath)); if (!attr) continue; completions.add(flakeRefS + "#" + prefixRoot); @@ -408,7 +405,7 @@ void completeFlakeRef(AddCompletions & completions, ref store, std::strin Args::completeDir(completions, 0, prefix); /* Look for registry entries that match the prefix. */ - for (auto & registry : fetchers::getRegistries(fetchSettings, store)) { + for (auto & registry : fetchers::getRegistries(fetchSettings, *store)) { for (auto & entry : registry->entries) { auto from = entry.from.to_string(); if (!hasPrefix(prefix, "flake:") && hasPrefix(from, "flake:")) { @@ -441,42 +438,6 @@ static StorePath getDeriver(ref store, const Installable & i, const Store return *derivers.begin(); } -ref openEvalCache(EvalState & state, std::shared_ptr lockedFlake) -{ - auto fingerprint = evalSettings.useEvalCache && evalSettings.pureEval - ? lockedFlake->getFingerprint(state.store, state.fetchSettings) - : std::nullopt; - auto rootLoader = [&state, lockedFlake]() { - /* For testing whether the evaluation cache is - complete. */ - if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0") - throw Error("not everything is cached, but evaluation is not allowed"); - - auto vFlake = state.allocValue(); - flake::callFlake(state, *lockedFlake, *vFlake); - - state.forceAttrs(*vFlake, noPos, "while parsing cached flake data"); - - auto aOutputs = vFlake->attrs()->get(state.symbols.create("outputs")); - assert(aOutputs); - - return aOutputs->value; - }; - - if (fingerprint) { - auto search = state.evalCaches.find(fingerprint.value()); - if (search == state.evalCaches.end()) { - search = - state.evalCaches - .emplace(fingerprint.value(), make_ref(fingerprint, state, rootLoader)) - .first; - } - return search->second; - } else { - return make_ref(std::nullopt, state, rootLoader); - } -} - Installables SourceExprCommand::parseInstallables(ref store, std::vector ss) { Installables result; @@ -500,10 +461,10 @@ Installables SourceExprCommand::parseInstallables(ref store, std::vector< state->eval(e, *vFile); } else if (file) { auto dir = absPath(getCommandBaseDir()); - state->evalFile(lookupFileArg(*state, *file, &dir), *vFile); + state->evalFile(lookupFileArg(*state, file->string(), &dir), *vFile); } else { - Path dir = absPath(getCommandBaseDir()); - auto e = state->parseExprFromString(*expr, state->rootPath(dir)); + auto dir = absPath(getCommandBaseDir()); + auto e = state->parseExprFromString(*expr, state->rootPath(dir.string())); state->eval(e, *vFile); } @@ -869,7 +830,8 @@ std::vector RawInstallablesCommand::getFlakeRefsForCompletion() std::vector res; res.reserve(rawInstallables.size()); for (const auto & i : rawInstallables) - res.push_back(parseFlakeRefWithFragment(fetchSettings, expandTilde(i), absPath(getCommandBaseDir())).first); + res.push_back( + parseFlakeRefWithFragment(fetchSettings, expandTilde(i), absPath(getCommandBaseDir()).string()).first); return res; } @@ -888,7 +850,8 @@ void RawInstallablesCommand::run(ref store) std::vector InstallableCommand::getFlakeRefsForCompletion() { - return {parseFlakeRefWithFragment(fetchSettings, expandTilde(_installable), absPath(getCommandBaseDir())).first}; + return {parseFlakeRefWithFragment(fetchSettings, expandTilde(_installable), absPath(getCommandBaseDir()).string()) + .first}; } void InstallablesCommand::run(ref store, std::vector && rawInstallables) diff --git a/src/libcmd/meson.build b/src/libcmd/meson.build index 3833d7e0a9d..f553afa0ba1 100644 --- a/src/libcmd/meson.build +++ b/src/libcmd/meson.build @@ -67,7 +67,6 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'built-path.cc', diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index a88365c8d50..841c75737ff 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -58,8 +58,7 @@ struct NixRepl : AbstractNixRepl, detail::ReplCompleterMixin, gc { size_t debugTraceIndex; - // Arguments passed to :load, saved so they can be reloaded with :reload - Strings loadedFiles; + std::list loadedFiles; // Arguments passed to :load-flake, saved so they can be reloaded with :reload Strings loadedFlakes; std::function getValues; @@ -73,7 +72,7 @@ struct NixRepl : AbstractNixRepl, detail::ReplCompleterMixin, gc RunNix * runNixPtr; - void runNix(Path program, const Strings & args, const std::optional & input = {}); + void runNix(const std::string & program, const Strings & args, const std::optional & input = {}); std::unique_ptr interacter; @@ -92,7 +91,7 @@ struct NixRepl : AbstractNixRepl, detail::ReplCompleterMixin, gc StorePath getDerivationPath(Value & v); ProcessLineResult processLine(std::string line); - void loadFile(const Path & path); + void loadFile(const std::filesystem::path & path); void loadFlake(const std::string & flakeRef); void loadFiles(); void loadFlakes(); @@ -143,7 +142,7 @@ NixRepl::NixRepl( , getValues(getValues) , staticEnv(new StaticEnv(nullptr, state->staticBaseEnv)) , runNixPtr{runNix} - , interacter(make_unique(getDataDir() + "/repl-history")) + , interacter(make_unique((getDataDir() / "repl-history").string())) { } @@ -540,7 +539,9 @@ ProcessLineResult NixRepl::processLine(std::string line) Value v; evalString(arg, v); StorePath drvPath = getDerivationPath(v); - Path drvPathRaw = state->store->printStorePath(drvPath); + // N.B. This need not be a local / native file path. For + // example, we might be using an SSH store to a different OS. + std::string drvPathRaw = state->store->printStorePath(drvPath); if (command == ":b" || command == ":bl") { state->store->buildPaths({ @@ -713,12 +714,12 @@ ProcessLineResult NixRepl::processLine(std::string line) return ProcessLineResult::PromptAgain; } -void NixRepl::loadFile(const Path & path) +void NixRepl::loadFile(const std::filesystem::path & path) { loadedFiles.remove(path); loadedFiles.push_back(path); Value v, v2; - state->evalFile(lookupFileArg(*state, path), v); + state->evalFile(lookupFileArg(*state, path.string()), v); state->autoCallFunction(*autoArgs, v, v2); addAttrsToScope(v2); } @@ -791,7 +792,7 @@ void NixRepl::reloadFilesAndFlakes() void NixRepl::loadFiles() { - Strings old = loadedFiles; + decltype(loadedFiles) old = loadedFiles; loadedFiles.clear(); for (auto & i : old) { @@ -889,7 +890,7 @@ void NixRepl::evalString(std::string s, Value & v) state->forceValue(v, v.determinePos(noPos)); } -void NixRepl::runNix(Path program, const Strings & args, const std::optional & input) +void NixRepl::runNix(const std::string & program, const Strings & args, const std::optional & input) { if (runNixPtr) (*runNixPtr)(program, args, input); diff --git a/src/libexpr-c/meson.build b/src/libexpr-c/meson.build index 03cee41a09a..c47704ce411 100644 --- a/src/libexpr-c/meson.build +++ b/src/libexpr-c/meson.build @@ -28,7 +28,6 @@ deps_public_maybe_subproject = [ subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_expr.cc', diff --git a/src/libexpr-c/nix_api_expr.cc b/src/libexpr-c/nix_api_expr.cc index 6ba5c07cf89..bfbd0a9c361 100644 --- a/src/libexpr-c/nix_api_expr.cc +++ b/src/libexpr-c/nix_api_expr.cc @@ -69,8 +69,8 @@ nix_err nix_expr_eval_from_string( context->last_err_code = NIX_OK; try { nix::Expr * parsedExpr = state->state.parseExprFromString(expr, state->state.rootPath(nix::CanonPath(path))); - state->state.eval(parsedExpr, value->value); - state->state.forceValue(value->value, nix::noPos); + state->state.eval(parsedExpr, *value->value); + state->state.forceValue(*value->value, nix::noPos); state->state.waitForAllPaths(); } NIXC_CATCH_ERRS @@ -81,8 +81,8 @@ nix_err nix_value_call(nix_c_context * context, EvalState * state, Value * fn, n if (context) context->last_err_code = NIX_OK; try { - state->state.callFunction(fn->value, arg->value, value->value, nix::noPos); - state->state.forceValue(value->value, nix::noPos); + state->state.callFunction(*fn->value, *arg->value, *value->value, nix::noPos); + state->state.forceValue(*value->value, nix::noPos); state->state.waitForAllPaths(); } NIXC_CATCH_ERRS @@ -93,9 +93,15 @@ nix_err nix_value_call_multi( { if (context) context->last_err_code = NIX_OK; + + std::vector internal_args; + internal_args.reserve(nargs); + for (size_t i = 0; i < nargs; i++) + internal_args.push_back(args[i]->value); + try { - state->state.callFunction(fn->value, {(nix::Value **) args, nargs}, value->value, nix::noPos); - state->state.forceValue(value->value, nix::noPos); + state->state.callFunction(*fn->value, {internal_args.data(), nargs}, *value->value, nix::noPos); + state->state.forceValue(*value->value, nix::noPos); state->state.waitForAllPaths(); } NIXC_CATCH_ERRS @@ -106,7 +112,7 @@ nix_err nix_value_force(nix_c_context * context, EvalState * state, nix_value * if (context) context->last_err_code = NIX_OK; try { - state->state.forceValue(value->value, nix::noPos); + state->state.forceValue(*value->value, nix::noPos); state->state.waitForAllPaths(); } NIXC_CATCH_ERRS @@ -117,7 +123,7 @@ nix_err nix_value_force_deep(nix_c_context * context, EvalState * state, nix_val if (context) context->last_err_code = NIX_OK; try { - state->state.forceValueDeep(value->value); + state->state.forceValueDeep(*value->value); state->state.waitForAllPaths(); } NIXC_CATCH_ERRS @@ -142,6 +148,8 @@ nix_eval_state_builder * nix_eval_state_builder_new(nix_c_context * context, Sto void nix_eval_state_builder_free(nix_eval_state_builder * builder) { + if (builder) + builder->~nix_eval_state_builder(); operator delete(builder, static_cast(alignof(nix_eval_state_builder))); } @@ -208,6 +216,8 @@ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath_c void nix_state_free(EvalState * state) { + if (state) + state->~EvalState(); operator delete(state, static_cast(alignof(EvalState))); } diff --git a/src/libexpr-c/nix_api_expr.h b/src/libexpr-c/nix_api_expr.h index 2be7399551e..3623ee076f6 100644 --- a/src/libexpr-c/nix_api_expr.h +++ b/src/libexpr-c/nix_api_expr.h @@ -4,11 +4,14 @@ * @brief Bindings to the Nix language evaluator * * See *[Embedding the Nix Evaluator](@ref nix_evaluator_example)* for an example. - * @{ */ /** @file * @brief Main entry for the libexpr C bindings */ +/** @defgroup libexpr_init Initialization + * @ingroup libexpr + * @{ + */ #include "nix_api_store.h" #include "nix_api_util.h" @@ -45,7 +48,10 @@ typedef struct nix_eval_state_builder nix_eval_state_builder; */ typedef struct EvalState EvalState; // nix::EvalState +/** @} */ + /** @brief A Nix language value, or thunk that may evaluate to a value. + * @ingroup value * * Values are the primary objects manipulated in the Nix language. * They are considered to be immutable from a user's perspective, but the process of evaluating a value changes its @@ -56,7 +62,8 @@ typedef struct EvalState EvalState; // nix::EvalState * * The evaluator manages its own memory, but your use of the C API must follow the reference counting rules. * - * @see value_manip + * @struct nix_value + * @see value_create, value_extract * @see nix_value_incref, nix_value_decref */ typedef struct nix_value nix_value; @@ -65,6 +72,7 @@ NIX_DEPRECATED("use nix_value instead") typedef nix_value Value; // Function prototypes /** * @brief Initialize the Nix language evaluator. + * @ingroup libexpr_init * * This function must be called at least once, * at some point before constructing a EvalState for the first time. @@ -77,6 +85,7 @@ nix_err nix_libexpr_init(nix_c_context * context); /** * @brief Parses and evaluates a Nix expression from a string. + * @ingroup value_create * * @param[out] context Optional, stores error information * @param[in] state The state of the evaluation. @@ -93,6 +102,7 @@ nix_err nix_expr_eval_from_string( /** * @brief Calls a Nix function with an argument. + * @ingroup value_create * * @param[out] context Optional, stores error information * @param[in] state The state of the evaluation. @@ -107,6 +117,7 @@ nix_err nix_value_call(nix_c_context * context, EvalState * state, nix_value * f /** * @brief Calls a Nix function with multiple arguments. + * @ingroup value_create * * Technically these are functions that return functions. It is common for Nix * functions to be curried, so this function is useful for calling them. @@ -126,10 +137,12 @@ nix_err nix_value_call_multi( /** * @brief Calls a Nix function with multiple arguments. + * @ingroup value_create * * Technically these are functions that return functions. It is common for Nix * functions to be curried, so this function is useful for calling them. * + * @def NIX_VALUE_CALL * @param[out] context Optional, stores error information * @param[in] state The state of the evaluation. * @param[out] value The result of the function call. @@ -147,6 +160,7 @@ nix_err nix_value_call_multi( /** * @brief Forces the evaluation of a Nix value. + * @ingroup value_create * * The Nix interpreter is lazy, and not-yet-evaluated values can be * of type NIX_TYPE_THUNK instead of their actual value. @@ -180,18 +194,20 @@ nix_err nix_value_force_deep(nix_c_context * context, EvalState * state, nix_val /** * @brief Create a new nix_eval_state_builder + * @ingroup libexpr_init * * The settings are initialized to their default value. * Values can be sourced elsewhere with nix_eval_state_builder_load. * * @param[out] context Optional, stores error information * @param[in] store The Nix store to use. - * @return A new nix_eval_state_builder or NULL on failure. + * @return A new nix_eval_state_builder or NULL on failure. Call nix_eval_state_builder_free() when you're done. */ nix_eval_state_builder * nix_eval_state_builder_new(nix_c_context * context, Store * store); /** * @brief Read settings from the ambient environment + * @ingroup libexpr_init * * Settings are sourced from environment variables and configuration files, * as documented in the Nix manual. @@ -204,6 +220,7 @@ nix_err nix_eval_state_builder_load(nix_c_context * context, nix_eval_state_buil /** * @brief Set the lookup path for `<...>` expressions + * @ingroup libexpr_init * * @param[in] context Optional, stores error information * @param[in] builder The builder to modify. @@ -214,18 +231,21 @@ nix_err nix_eval_state_builder_set_lookup_path( /** * @brief Create a new Nix language evaluator state + * @ingroup libexpr_init * - * Remember to nix_eval_state_builder_free after building the state. + * The builder becomes unusable after this call. Remember to call nix_eval_state_builder_free() + * after building the state. * * @param[out] context Optional, stores error information * @param[in] builder The builder to use and free - * @return A new Nix state or NULL on failure. + * @return A new Nix state or NULL on failure. Call nix_state_free() when you're done. * @see nix_eval_state_builder_new, nix_eval_state_builder_free */ EvalState * nix_eval_state_build(nix_c_context * context, nix_eval_state_builder * builder); /** * @brief Free a nix_eval_state_builder + * @ingroup libexpr_init * * Does not fail. * @@ -235,19 +255,21 @@ void nix_eval_state_builder_free(nix_eval_state_builder * builder); /** * @brief Create a new Nix language evaluator state + * @ingroup libexpr_init * * For more control, use nix_eval_state_builder * * @param[out] context Optional, stores error information * @param[in] lookupPath Null-terminated array of strings corresponding to entries in NIX_PATH. * @param[in] store The Nix store to use. - * @return A new Nix state or NULL on failure. + * @return A new Nix state or NULL on failure. Call nix_state_free() when you're done. * @see nix_state_builder_new */ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath, Store * store); /** * @brief Frees a Nix state. + * @ingroup libexpr_init * * Does not fail. * @@ -256,6 +278,7 @@ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath, void nix_state_free(EvalState * state); /** @addtogroup GC + * @ingroup libexpr * @brief Reference counting and garbage collector operations * * The Nix language evaluator uses a garbage collector. To ease C interop, we implement @@ -286,6 +309,9 @@ nix_err nix_gc_incref(nix_c_context * context, const void * object); /** * @brief Decrement the garbage collector reference counter for the given object * + * @deprecated We are phasing out the general nix_gc_decref() in favor of type-specified free functions, such as + * nix_value_decref(). + * * We also provide typed `nix_*_decref` functions, which are * - safer to use * - easier to integrate when deriving bindings @@ -314,12 +340,11 @@ void nix_gc_now(); */ void nix_gc_register_finalizer(void * obj, void * cd, void (*finalizer)(void * obj, void * cd)); -/** @} */ +/** @} */ // doxygen group GC + // cffi end #ifdef __cplusplus } #endif -/** @} */ - #endif // NIX_API_EXPR_H diff --git a/src/libexpr-c/nix_api_expr_internal.h b/src/libexpr-c/nix_api_expr_internal.h index 3aa1d993225..07c7a2194df 100644 --- a/src/libexpr-c/nix_api_expr_internal.h +++ b/src/libexpr-c/nix_api_expr_internal.h @@ -39,7 +39,13 @@ struct ListBuilder struct nix_value { - nix::Value value; + nix::Value * value; + /** + * As we move to a managed heap, we need EvalMemory in more places. Ideally, we would take in EvalState or + * EvalMemory as an argument when we need it, but we don't want to make changes to the stable C api, so we stuff it + * into the nix_value that will get passed in to the relevant functions. + */ + nix::EvalMemory * mem; }; struct nix_string_return diff --git a/src/libexpr-c/nix_api_external.h b/src/libexpr-c/nix_api_external.h index f4a32728100..96c479d5769 100644 --- a/src/libexpr-c/nix_api_external.h +++ b/src/libexpr-c/nix_api_external.h @@ -2,11 +2,12 @@ #define NIX_API_EXTERNAL_H /** @ingroup libexpr * @addtogroup Externals - * @brief Deal with external values + * @brief Externals let Nix expressions work with foreign values that aren't part of the normal Nix value data model * @{ */ /** @file * @brief libexpr C bindings dealing with external values + * @see Externals */ #include "nix_api_expr.h" @@ -115,7 +116,7 @@ typedef struct NixCExternalValueDesc * @brief Try to compare two external values * * Optional, the default is always false. - * If the other object was not a Nix C external value, this comparison will + * If the other object was not a Nix C API external value, this comparison will * also return false * @param[in] self the void* passed to nix_create_external_value * @param[in] other the void* passed to the other object's @@ -168,7 +169,7 @@ typedef struct NixCExternalValueDesc /** * @brief Create an external value, that can be given to nix_init_external * - * Owned by the GC. Use nix_gc_decref when you're done with the pointer. + * Call nix_gc_decref() when you're done with the pointer. * * @param[out] context Optional, stores error information * @param[in] desc a NixCExternalValueDesc, you should keep this alive as long @@ -180,10 +181,11 @@ typedef struct NixCExternalValueDesc ExternalValue * nix_create_external_value(nix_c_context * context, NixCExternalValueDesc * desc, void * v); /** - * @brief Extract the pointer from a nix c external value. + * @brief Extract the pointer from a Nix C API external value. * @param[out] context Optional, stores error information * @param[in] b The external value - * @returns The pointer, or null if the external value was not from nix c. + * @returns The pointer, valid while the external value is valid, or null if the external value was not from the Nix C + * API. * @see nix_get_external */ void * nix_get_external_value_content(nix_c_context * context, ExternalValue * b); diff --git a/src/libexpr-c/nix_api_value.cc b/src/libexpr-c/nix_api_value.cc index 5ad85128e1e..b6a838284ef 100644 --- a/src/libexpr-c/nix_api_value.cc +++ b/src/libexpr-c/nix_api_value.cc @@ -20,7 +20,7 @@ static const nix::Value & check_value_not_null(const nix_value * value) if (!value) { throw std::runtime_error("nix_value is null"); } - return *((const nix::Value *) value); + return *value->value; } static nix::Value & check_value_not_null(nix_value * value) @@ -28,7 +28,7 @@ static nix::Value & check_value_not_null(nix_value * value) if (!value) { throw std::runtime_error("nix_value is null"); } - return value->value; + return *value->value; } static const nix::Value & check_value_in(const nix_value * value) @@ -58,9 +58,14 @@ static nix::Value & check_value_out(nix_value * value) return v; } -static inline nix_value * as_nix_value_ptr(nix::Value * v) +static nix_value * new_nix_value(nix::Value * v, nix::EvalMemory & mem) { - return reinterpret_cast(v); + nix_value * ret = new (mem.allocBytes(sizeof(nix_value))) nix_value{ + .value = v, + .mem = &mem, + }; + nix_gc_incref(nullptr, ret); + return ret; } /** @@ -69,7 +74,13 @@ static inline nix_value * as_nix_value_ptr(nix::Value * v) * Deals with errors and converts arguments from C++ into C types. */ static void nix_c_primop_wrapper( - PrimOpFun f, void * userdata, nix::EvalState & state, const nix::PosIdx pos, nix::Value ** args, nix::Value & v) + PrimOpFun f, + void * userdata, + int arity, + nix::EvalState & state, + const nix::PosIdx pos, + nix::Value ** args, + nix::Value & v) { nix_c_context ctx; @@ -85,8 +96,15 @@ static void nix_c_primop_wrapper( // ok because we don't see a need for this yet (e.g. inspecting thunks, // or maybe something to make blackholes work better; we don't know). nix::Value vTmp; + nix_value * vTmpPtr = new_nix_value(&vTmp, state.mem); - f(userdata, &ctx, (EvalState *) &state, (nix_value **) args, (nix_value *) &vTmp); + std::vector external_args; + external_args.reserve(arity); + for (int i = 0; i < arity; i++) { + nix_value * external_arg = new_nix_value(args[i], state.mem); + external_args.push_back(external_arg); + } + f(userdata, &ctx, (EvalState *) &state, external_args.data(), vTmpPtr); if (ctx.last_err_code != NIX_OK) { /* TODO: Throw different errors depending on the error code */ @@ -135,7 +153,7 @@ PrimOp * nix_alloc_primop( .args = {}, .arity = (size_t) arity, .doc = doc, - .fun = std::bind(nix_c_primop_wrapper, fun, user_data, _1, _2, _3, _4)}; + .fun = std::bind(nix_c_primop_wrapper, fun, user_data, arity, _1, _2, _3, _4)}; if (args) for (size_t i = 0; args[i]; i++) p->args.emplace_back(*args); @@ -160,8 +178,7 @@ nix_value * nix_alloc_value(nix_c_context * context, EvalState * state) if (context) context->last_err_code = NIX_OK; try { - nix_value * res = as_nix_value_ptr(state->state.allocValue()); - nix_gc_incref(nullptr, res); + nix_value * res = new_nix_value(state->state.allocValue(), state->state.mem); return res; } NIXC_CATCH_ERRS_NULL @@ -237,7 +254,7 @@ nix_get_string(nix_c_context * context, const nix_value * value, nix_get_string_ try { auto & v = check_value_in(value); assert(v.type() == nix::nString); - call_nix_get_string_callback(v.c_str(), callback, user_data); + call_nix_get_string_callback(v.string_view(), callback, user_data); } NIXC_CATCH_ERRS } @@ -333,10 +350,10 @@ nix_value * nix_get_list_byidx(nix_c_context * context, const nix_value * value, return nullptr; } auto * p = v.listView()[ix]; - nix_gc_incref(nullptr, p); - if (p != nullptr) - state->state.forceValue(*p, nix::noPos); - return as_nix_value_ptr(p); + if (p == nullptr) + return nullptr; + state->state.forceValue(*p, nix::noPos); + return new_nix_value(p, state->state.mem); } NIXC_CATCH_ERRS_NULL } @@ -354,9 +371,8 @@ nix_get_list_byidx_lazy(nix_c_context * context, const nix_value * value, EvalSt return nullptr; } auto * p = v.listView()[ix]; - nix_gc_incref(nullptr, p); // Note: intentionally NOT calling forceValue() to keep the element lazy - return as_nix_value_ptr(p); + return new_nix_value(p, state->state.mem); } NIXC_CATCH_ERRS_NULL } @@ -371,10 +387,9 @@ nix_value * nix_get_attr_byname(nix_c_context * context, const nix_value * value nix::Symbol s = state->state.symbols.create(name); auto attr = v.attrs()->get(s); if (attr) { - nix_gc_incref(nullptr, attr->value); state->state.forceValue(*attr->value, nix::noPos); state->state.waitForAllPaths(); - return as_nix_value_ptr(attr->value); + return new_nix_value(attr->value, state->state.mem); } nix_set_err_msg(context, NIX_ERR_KEY, "missing attribute"); return nullptr; @@ -393,9 +408,8 @@ nix_get_attr_byname_lazy(nix_c_context * context, const nix_value * value, EvalS nix::Symbol s = state->state.symbols.create(name); auto attr = v.attrs()->get(s); if (attr) { - nix_gc_incref(nullptr, attr->value); // Note: intentionally NOT calling forceValue() to keep the attribute lazy - return as_nix_value_ptr(attr->value); + return new_nix_value(attr->value, state->state.mem); } nix_set_err_msg(context, NIX_ERR_KEY, "missing attribute"); return nullptr; @@ -443,9 +457,8 @@ nix_get_attr_byidx(nix_c_context * context, nix_value * value, EvalState * state } const nix::Attr & a = (*v.attrs())[i]; *name = state->state.symbols[a.name].c_str(); - nix_gc_incref(nullptr, a.value); state->state.forceValue(*a.value, nix::noPos); - return as_nix_value_ptr(a.value); + return new_nix_value(a.value, state->state.mem); } NIXC_CATCH_ERRS_NULL } @@ -464,9 +477,8 @@ nix_value * nix_get_attr_byidx_lazy( } const nix::Attr & a = (*v.attrs())[i]; *name = state->state.symbols[a.name].c_str(); - nix_gc_incref(nullptr, a.value); // Note: intentionally NOT calling forceValue() to keep the attribute lazy - return as_nix_value_ptr(a.value); + return new_nix_value(a.value, state->state.mem); } NIXC_CATCH_ERRS_NULL } @@ -506,7 +518,7 @@ nix_err nix_init_string(nix_c_context * context, nix_value * value, const char * context->last_err_code = NIX_OK; try { auto & v = check_value_out(value); - v.mkString(std::string_view(str)); + v.mkString(std::string_view(str), *value->mem); } NIXC_CATCH_ERRS } @@ -517,7 +529,7 @@ nix_err nix_init_path_string(nix_c_context * context, EvalState * s, nix_value * context->last_err_code = NIX_OK; try { auto & v = check_value_out(value); - v.mkPath(s->state.rootPath(nix::CanonPath(str))); + v.mkPath(s->state.rootPath(nix::CanonPath(str)), s->state.mem); } NIXC_CATCH_ERRS } diff --git a/src/libexpr-c/nix_api_value.h b/src/libexpr-c/nix_api_value.h index 7540ba77d22..a01bfb28059 100644 --- a/src/libexpr-c/nix_api_value.h +++ b/src/libexpr-c/nix_api_value.h @@ -1,9 +1,6 @@ #ifndef NIX_API_VALUE_H #define NIX_API_VALUE_H -/** @addtogroup libexpr - * @{ - */ /** @file * @brief libexpr C bindings dealing with values */ @@ -20,19 +17,92 @@ extern "C" { #endif // cffi start +/** @defgroup value Value + * @ingroup libexpr + * @brief nix_value type and core operations for working with Nix values + * @see value_create + * @see value_extract + */ + +/** @defgroup value_create Value Creation + * @ingroup libexpr + * @brief Functions for allocating and initializing Nix values + * + * Values are usually created with `nix_alloc_value` followed by `nix_init_*` functions. + * In primop callbacks, allocation is already done and only initialization is needed. + */ + +/** @defgroup value_extract Value Extraction + * @ingroup libexpr + * @brief Functions for extracting data from Nix values + */ + +/** @defgroup primops PrimOps and Builtins + * @ingroup libexpr + */ + // Type definitions +/** @brief Represents the state of a Nix value + * + * Thunk values (NIX_TYPE_THUNK) change to their final, unchanging type when forced. + * + * @see https://nix.dev/manual/nix/latest/language/evaluation.html + * @enum ValueType + * @ingroup value + */ typedef enum { + /** Unevaluated expression + * + * Thunks often contain an expression and closure, but may contain other + * representations too. + * + * Their state is mutable, unlike that of the other types. + */ NIX_TYPE_THUNK, + /** + * A 64 bit signed integer. + */ NIX_TYPE_INT, + /** @brief IEEE 754 double precision floating point number + * @see https://nix.dev/manual/nix/latest/language/types.html#type-float + */ NIX_TYPE_FLOAT, + /** @brief Boolean true or false value + * @see https://nix.dev/manual/nix/latest/language/types.html#type-bool + */ NIX_TYPE_BOOL, + /** @brief String value with context + * + * String content may contain arbitrary bytes, not necessarily UTF-8. + * @see https://nix.dev/manual/nix/latest/language/types.html#type-string + */ NIX_TYPE_STRING, + /** @brief Filesystem path + * @see https://nix.dev/manual/nix/latest/language/types.html#type-path + */ NIX_TYPE_PATH, + /** @brief Null value + * @see https://nix.dev/manual/nix/latest/language/types.html#type-null + */ NIX_TYPE_NULL, + /** @brief Attribute set (key-value mapping) + * @see https://nix.dev/manual/nix/latest/language/types.html#type-attrs + */ NIX_TYPE_ATTRS, + /** @brief Ordered list of values + * @see https://nix.dev/manual/nix/latest/language/types.html#type-list + */ NIX_TYPE_LIST, + /** @brief Function (lambda or builtin) + * @see https://nix.dev/manual/nix/latest/language/types.html#type-function + */ NIX_TYPE_FUNCTION, + /** @brief External value from C++ plugins or C API + * @see Externals + */ NIX_TYPE_EXTERNAL, + /** @brief Failed value. Contains an exception that can be rethrown. + */ NIX_TYPE_FAILED, } ValueType; @@ -40,22 +110,41 @@ typedef enum { typedef struct nix_value nix_value; typedef struct EvalState EvalState; +/** @deprecated Use nix_value instead */ [[deprecated("use nix_value instead")]] typedef nix_value Value; // type defs /** @brief Stores an under-construction set of bindings - * @ingroup value_manip + * @ingroup value_create + * + * Each builder can only be used once. After calling nix_make_attrs(), the builder + * becomes invalid and must not be used again. Call nix_bindings_builder_free() to release it. + * + * Typical usage pattern: + * 1. Create with nix_make_bindings_builder() + * 2. Insert attributes with nix_bindings_builder_insert() + * 3. Create final attribute set with nix_make_attrs() + * 4. Free builder with nix_bindings_builder_free() * - * Do not reuse. + * @struct BindingsBuilder * @see nix_make_bindings_builder, nix_bindings_builder_free, nix_make_attrs * @see nix_bindings_builder_insert */ typedef struct BindingsBuilder BindingsBuilder; /** @brief Stores an under-construction list - * @ingroup value_manip + * @ingroup value_create * - * Do not reuse. + * Each builder can only be used once. After calling nix_make_list(), the builder + * becomes invalid and must not be used again. Call nix_list_builder_free() to release it. + * + * Typical usage pattern: + * 1. Create with nix_make_list_builder() + * 2. Insert elements with nix_list_builder_insert() + * 3. Create final list with nix_make_list() + * 4. Free builder with nix_list_builder_free() + * + * @struct ListBuilder * @see nix_make_list_builder, nix_list_builder_free, nix_make_list * @see nix_list_builder_insert */ @@ -64,25 +153,28 @@ typedef struct ListBuilder ListBuilder; /** @brief PrimOp function * @ingroup primops * - * Owned by the GC - * @see nix_alloc_primop, nix_init_primop + * Can be released with nix_gc_decref() when necessary. + * @struct PrimOp + * @see nix_alloc_primop, nix_init_primop, nix_register_primop */ typedef struct PrimOp PrimOp; /** @brief External Value * @ingroup Externals * - * Owned by the GC + * Can be released with nix_gc_decref() when necessary. + * @struct ExternalValue + * @see nix_create_external_value, nix_init_external, nix_get_external */ typedef struct ExternalValue ExternalValue; /** @brief String without placeholders, and realised store paths + * @struct nix_realised_string + * @see nix_string_realise, nix_realised_string_free */ typedef struct nix_realised_string nix_realised_string; -/** @defgroup primops Adding primops - * @{ - */ /** @brief Function pointer for primops + * @ingroup primops * * When you want to return an error, call nix_set_err_msg(context, NIX_ERR_UNKNOWN, "your error message here"). * @@ -98,9 +190,9 @@ typedef void (*PrimOpFun)( void * user_data, nix_c_context * context, EvalState * state, nix_value ** args, nix_value * ret); /** @brief Allocate a PrimOp + * @ingroup primops * - * Owned by the garbage collector. - * Use nix_gc_decref() when you're done with the returned PrimOp. + * Call nix_gc_decref() when you're done with the returned PrimOp. * * @param[out] context Optional, stores error information * @param[in] fun callback @@ -122,35 +214,38 @@ PrimOp * nix_alloc_primop( void * user_data); /** @brief add a primop to the `builtins` attribute set + * @ingroup primops * * Only applies to States created after this call. * - * Moves your PrimOp content into the global evaluator - * registry, meaning your input PrimOp pointer is no longer usable. - * You are free to remove your references to it, - * after which it will be garbage collected. + * Moves your PrimOp content into the global evaluator registry, meaning + * your input PrimOp pointer becomes invalid. The PrimOp must not be used + * with nix_init_primop() before or after this call, as this would cause + * undefined behavior. + * You must call nix_gc_decref() on the original PrimOp pointer + * after this call to release your reference. * * @param[out] context Optional, stores error information - * @return primop, or null in case of errors - * + * @param[in] primOp PrimOp to register + * @return error code, NIX_OK on success */ nix_err nix_register_primop(nix_c_context * context, PrimOp * primOp); -/** @} */ // Function prototypes /** @brief Allocate a Nix value + * @ingroup value_create * - * Owned by the GC. Use nix_gc_decref() when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] state nix evaluator state * @return value, or null in case of errors - * */ nix_value * nix_alloc_value(nix_c_context * context, EvalState * state); /** * @brief Increment the garbage collector reference counter for the given `nix_value`. + * @ingroup value * * The Nix language evaluator C API keeps track of alive objects by reference counting. * When you're done with a refcounted pointer, call nix_value_decref(). @@ -162,21 +257,19 @@ nix_err nix_value_incref(nix_c_context * context, nix_value * value); /** * @brief Decrement the garbage collector reference counter for the given object + * @ingroup value + * + * When the counter reaches zero, the `nix_value` object becomes invalid. + * The data referenced by `nix_value` may not be deallocated until the memory + * garbage collector has run, but deallocation is not guaranteed. * * @param[out] context Optional, stores error information * @param[in] value The object to stop referencing */ nix_err nix_value_decref(nix_c_context * context, nix_value * value); -/** @addtogroup value_manip Manipulating values - * @brief Functions to inspect and change Nix language values, represented by nix_value. - * @{ - */ -/** @anchor getters - * @name Getters - */ -/**@{*/ /** @brief Get value type + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return type of nix value @@ -184,14 +277,15 @@ nix_err nix_value_decref(nix_c_context * context, nix_value * value); ValueType nix_get_type(nix_c_context * context, const nix_value * value); /** @brief Get type name of value as defined in the evaluator + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect - * @return type name, owned string - * @todo way to free the result + * @return type name string, free with free() */ const char * nix_get_typename(nix_c_context * context, const nix_value * value); /** @brief Get boolean value + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return true or false, error info via context @@ -199,6 +293,7 @@ const char * nix_get_typename(nix_c_context * context, const nix_value * value); bool nix_get_bool(nix_c_context * context, const nix_value * value); /** @brief Get the raw string + * @ingroup value_extract * * This may contain placeholders. * @@ -206,21 +301,21 @@ bool nix_get_bool(nix_c_context * context, const nix_value * value); * @param[in] value Nix value to inspect * @param[in] callback Called with the string value. * @param[in] user_data optional, arbitrary data, passed to the callback when it's called. - * @return string * @return error code, NIX_OK on success. */ nix_err nix_get_string(nix_c_context * context, const nix_value * value, nix_get_string_callback callback, void * user_data); /** @brief Get path as string + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect - * @return string, if the type is NIX_TYPE_PATH - * @return NULL in case of error. + * @return string valid while value is valid, NULL in case of error */ const char * nix_get_path_string(nix_c_context * context, const nix_value * value); /** @brief Get the length of a list + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return length of list, error info via context @@ -228,6 +323,7 @@ const char * nix_get_path_string(nix_c_context * context, const nix_value * valu unsigned int nix_get_list_size(nix_c_context * context, const nix_value * value); /** @brief Get the element count of an attrset + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return attrset element count, error info via context @@ -235,6 +331,7 @@ unsigned int nix_get_list_size(nix_c_context * context, const nix_value * value) unsigned int nix_get_attrs_size(nix_c_context * context, const nix_value * value); /** @brief Get float value in 64 bits + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return float contents, error info via context @@ -242,6 +339,7 @@ unsigned int nix_get_attrs_size(nix_c_context * context, const nix_value * value double nix_get_float(nix_c_context * context, const nix_value * value); /** @brief Get int value + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return int contents, error info via context @@ -249,15 +347,18 @@ double nix_get_float(nix_c_context * context, const nix_value * value); int64_t nix_get_int(nix_c_context * context, const nix_value * value); /** @brief Get external reference + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect - * @return reference to external, NULL in case of error + * @return reference valid while value is valid. Call nix_gc_incref() if you need it to live longer, then only in that + * case call nix_gc_decref() when done. NULL in case of error */ ExternalValue * nix_get_external(nix_c_context * context, nix_value * value); /** @brief Get the ix'th element of a list + * @ingroup value_extract * - * Owned by the GC. Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state @@ -267,11 +368,12 @@ ExternalValue * nix_get_external(nix_c_context * context, nix_value * value); nix_value * nix_get_list_byidx(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int ix); /** @brief Get the ix'th element of a list without forcing evaluation of the element + * @ingroup value_extract * * Returns the list element without forcing its evaluation, allowing access to lazy values. * The list value itself must already be evaluated. * - * Owned by the GC. Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect (must be an evaluated list) * @param[in] state nix evaluator state @@ -282,8 +384,9 @@ nix_value * nix_get_list_byidx_lazy(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int ix); /** @brief Get an attr by name + * @ingroup value_extract * - * Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state @@ -293,11 +396,12 @@ nix_get_list_byidx_lazy(nix_c_context * context, const nix_value * value, EvalSt nix_value * nix_get_attr_byname(nix_c_context * context, const nix_value * value, EvalState * state, const char * name); /** @brief Get an attribute value by attribute name, without forcing evaluation of the attribute's value + * @ingroup value_extract * * Returns the attribute value without forcing its evaluation, allowing access to lazy values. * The attribute set value itself must already be evaluated. * - * Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect (must be an evaluated attribute set) * @param[in] state nix evaluator state @@ -308,6 +412,7 @@ nix_value * nix_get_attr_byname_lazy(nix_c_context * context, const nix_value * value, EvalState * state, const char * name); /** @brief Check if an attribute name exists on a value + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state @@ -317,6 +422,7 @@ nix_get_attr_byname_lazy(nix_c_context * context, const nix_value * value, EvalS bool nix_has_attr_byname(nix_c_context * context, const nix_value * value, EvalState * state, const char * name); /** @brief Get an attribute by index + * @ingroup value_extract * * Also gives you the name. * @@ -330,18 +436,19 @@ bool nix_has_attr_byname(nix_c_context * context, const nix_value * value, EvalS * lexicographic order by Unicode scalar value for valid UTF-8). We recommend * applying this same ordering for consistency. * - * Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state * @param[in] i attribute index - * @param[out] name will store a pointer to the attribute name + * @param[out] name will store a pointer to the attribute name, valid until state is freed * @return value, NULL in case of errors */ nix_value * nix_get_attr_byidx(nix_c_context * context, nix_value * value, EvalState * state, unsigned int i, const char ** name); /** @brief Get an attribute by index, without forcing evaluation of the attribute's value + * @ingroup value_extract * * Also gives you the name. * @@ -358,18 +465,19 @@ nix_get_attr_byidx(nix_c_context * context, nix_value * value, EvalState * state * lexicographic order by Unicode scalar value for valid UTF-8). We recommend * applying this same ordering for consistency. * - * Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect (must be an evaluated attribute set) * @param[in] state nix evaluator state * @param[in] i attribute index - * @param[out] name will store a pointer to the attribute name + * @param[out] name will store a pointer to the attribute name, valid until state is freed * @return value, NULL in case of errors */ nix_value * nix_get_attr_byidx_lazy( nix_c_context * context, nix_value * value, EvalState * state, unsigned int i, const char ** name); /** @brief Get an attribute name by index + * @ingroup value_extract * * Returns the attribute name without forcing evaluation of the attribute's value. * @@ -383,16 +491,14 @@ nix_value * nix_get_attr_byidx_lazy( * lexicographic order by Unicode scalar value for valid UTF-8). We recommend * applying this same ordering for consistency. * - * Owned by the nix EvalState * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state * @param[in] i attribute index - * @return name, NULL in case of errors + * @return name string valid until state is freed, NULL in case of errors */ const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value, EvalState * state, unsigned int i); -/**@}*/ /** @name Initializers * * Values are typically "returned" by initializing already allocated memory that serves as the return value. @@ -402,6 +508,7 @@ const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value, */ /**@{*/ /** @brief Set boolean value + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] b the boolean value @@ -410,6 +517,7 @@ const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value, nix_err nix_init_bool(nix_c_context * context, nix_value * value, bool b); /** @brief Set a string + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] str the string, copied @@ -418,6 +526,7 @@ nix_err nix_init_bool(nix_c_context * context, nix_value * value, bool b); nix_err nix_init_string(nix_c_context * context, nix_value * value, const char * str); /** @brief Set a path + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] str the path string, copied @@ -426,6 +535,7 @@ nix_err nix_init_string(nix_c_context * context, nix_value * value, const char * nix_err nix_init_path_string(nix_c_context * context, EvalState * s, nix_value * value, const char * str); /** @brief Set a float + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] d the float, 64-bits @@ -434,6 +544,7 @@ nix_err nix_init_path_string(nix_c_context * context, EvalState * s, nix_value * nix_err nix_init_float(nix_c_context * context, nix_value * value, double d); /** @brief Set an int + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] i the int @@ -442,6 +553,7 @@ nix_err nix_init_float(nix_c_context * context, nix_value * value, double d); nix_err nix_init_int(nix_c_context * context, nix_value * value, int64_t i); /** @brief Set null + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @return error code, NIX_OK on success. @@ -449,6 +561,7 @@ nix_err nix_init_int(nix_c_context * context, nix_value * value, int64_t i); nix_err nix_init_null(nix_c_context * context, nix_value * value); /** @brief Set the value to a thunk that will perform a function application when needed. + * @ingroup value_create * * Thunks may be put into attribute sets and lists to perform some computation lazily; on demand. * However, note that in some places, a thunk must not be returned, such as in the return value of a PrimOp. @@ -465,6 +578,7 @@ nix_err nix_init_null(nix_c_context * context, nix_value * value); nix_err nix_init_apply(nix_c_context * context, nix_value * value, nix_value * fn, nix_value * arg); /** @brief Set an external value + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] val the external value to set. Will be GC-referenced by the value. @@ -473,18 +587,25 @@ nix_err nix_init_apply(nix_c_context * context, nix_value * value, nix_value * f nix_err nix_init_external(nix_c_context * context, nix_value * value, ExternalValue * val); /** @brief Create a list from a list builder + * @ingroup value_create + * + * After this call, the list builder becomes invalid and cannot be used again. + * The only necessary next step is to free it with nix_list_builder_free(). + * * @param[out] context Optional, stores error information - * @param[in] list_builder list builder to use. Make sure to unref this afterwards. + * @param[in] list_builder list builder to use * @param[out] value Nix value to modify * @return error code, NIX_OK on success. + * @see nix_list_builder_free */ nix_err nix_make_list(nix_c_context * context, ListBuilder * list_builder, nix_value * value); /** @brief Create a list builder + * @ingroup value_create * @param[out] context Optional, stores error information * @param[in] state nix evaluator state * @param[in] capacity how many bindings you'll add. Don't exceed. - * @return owned reference to a list builder. Make sure to unref when you're done. + * @return list builder. Call nix_list_builder_free() when you're done. */ ListBuilder * nix_make_list_builder(nix_c_context * context, EvalState * state, size_t capacity); @@ -506,14 +627,21 @@ nix_list_builder_insert(nix_c_context * context, ListBuilder * list_builder, uns void nix_list_builder_free(ListBuilder * list_builder); /** @brief Create an attribute set from a bindings builder + * @ingroup value_create + * + * After this call, the bindings builder becomes invalid and cannot be used again. + * The only necessary next step is to free it with nix_bindings_builder_free(). + * * @param[out] context Optional, stores error information * @param[out] value Nix value to modify - * @param[in] b bindings builder to use. Make sure to unref this afterwards. + * @param[in] b bindings builder to use * @return error code, NIX_OK on success. + * @see nix_bindings_builder_free */ nix_err nix_make_attrs(nix_c_context * context, nix_value * value, BindingsBuilder * b); /** @brief Set primop + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] op primop, will be gc-referenced by the value @@ -522,6 +650,7 @@ nix_err nix_make_attrs(nix_c_context * context, nix_value * value, BindingsBuild */ nix_err nix_init_primop(nix_c_context * context, nix_value * value, PrimOp * op); /** @brief Copy from another value + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] source value to copy from @@ -531,12 +660,11 @@ nix_err nix_copy_value(nix_c_context * context, nix_value * value, const nix_val /**@}*/ /** @brief Create a bindings builder -* @param[out] context Optional, stores error information -* @param[in] state nix evaluator state -* @param[in] capacity how many bindings you'll add. Don't exceed. -* @return owned reference to a bindings builder. Make sure to unref when you're -done. -*/ + * @param[out] context Optional, stores error information + * @param[in] state nix evaluator state + * @param[in] capacity how many bindings you'll add. Don't exceed. + * @return bindings builder. Call nix_bindings_builder_free() when you're done. + */ BindingsBuilder * nix_make_bindings_builder(nix_c_context * context, EvalState * state, size_t capacity); /** @brief Insert bindings into a builder @@ -555,7 +683,6 @@ nix_bindings_builder_insert(nix_c_context * context, BindingsBuilder * builder, * @param[in] builder the builder to free */ void nix_bindings_builder_free(BindingsBuilder * builder); -/**@}*/ /** @brief Realise a string context. * @@ -572,13 +699,13 @@ void nix_bindings_builder_free(BindingsBuilder * builder); * @param[in] isIFD If true, disallow derivation outputs if setting `allow-import-from-derivation` is false. You should set this to true when this call is part of a primop. You should set this to false when building for your application's purpose. - * @return NULL if failed, are a new nix_realised_string, which must be freed with nix_realised_string_free + * @return NULL if failed, or a new nix_realised_string, which must be freed with nix_realised_string_free */ nix_realised_string * nix_string_realise(nix_c_context * context, EvalState * state, nix_value * value, bool isIFD); /** @brief Start of the string * @param[in] realised_string - * @return pointer to the start of the string. It may not be null-terminated. + * @return pointer to the start of the string, valid until realised_string is freed. It may not be null-terminated. */ const char * nix_realised_string_get_buffer_start(nix_realised_string * realised_string); @@ -597,7 +724,7 @@ size_t nix_realised_string_get_store_path_count(nix_realised_string * realised_s /** @brief Get a store path. The store paths are stored in an arbitrary order. * @param[in] realised_string * @param[in] index index of the store path, must be less than the count - * @return store path + * @return store path valid until realised_string is freed */ const StorePath * nix_realised_string_get_store_path(nix_realised_string * realised_string, size_t index); @@ -611,5 +738,4 @@ void nix_realised_string_free(nix_realised_string * realised_string); } #endif -/** @} */ #endif // NIX_API_VALUE_H diff --git a/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh b/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh index a1320e14a25..658a6ffe0a3 100644 --- a/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh +++ b/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh @@ -104,9 +104,10 @@ MATCHER(IsAttrs, "") MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) { if (arg.type() != nString) { + *result_listener << "Expected a string got " << arg.type(); return false; } - return std::string_view(arg.c_str()) == s; + return arg.string_view() == s; } MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) diff --git a/src/libexpr-test-support/meson.build b/src/libexpr-test-support/meson.build index 01a3f3bcbbf..df28661b7e7 100644 --- a/src/libexpr-test-support/meson.build +++ b/src/libexpr-test-support/meson.build @@ -31,7 +31,6 @@ rapidcheck = dependency('rapidcheck') deps_public += rapidcheck subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'tests/value/context.cc', diff --git a/src/libexpr-tests/error_traces.cc b/src/libexpr-tests/error_traces.cc index 7e7b5eb846b..e722cc48499 100644 --- a/src/libexpr-tests/error_traces.cc +++ b/src/libexpr-tests/error_traces.cc @@ -139,63 +139,6 @@ TEST_F(ErrorTraceTest, NestedThrows) #define ASSERT_DERIVATION_TRACE3(args, type, message, context1, context2) \ ASSERT_TRACE4(args, type, message, context1, context2, DERIVATION_TRACE_HINTFMT("foo")) -TEST_F(ErrorTraceTest, genericClosure) -{ - ASSERT_TRACE2( - "genericClosure 1", - TypeError, - HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.genericClosure")); - - ASSERT_TRACE2( - "genericClosure {}", - TypeError, - HintFmt("attribute '%s' missing", "startSet"), - HintFmt("in the attrset passed as argument to builtins.genericClosure")); - - ASSERT_TRACE2( - "genericClosure { startSet = 1; }", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); - - ASSERT_TRACE2( - "genericClosure { startSet = [{ key = 1;}]; operator = true; }", - TypeError, - HintFmt("expected a function but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); - - ASSERT_TRACE2( - "genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", - TypeError, - HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); - - ASSERT_TRACE2( - "genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", - TypeError, - HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); - - ASSERT_TRACE2( - "genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", - TypeError, - HintFmt("attribute '%s' missing", "key"), - HintFmt("in one of the attrsets generated by (or initially passed to) builtins.genericClosure")); - - ASSERT_TRACE2( - "genericClosure { startSet = [{ key = 1;}]; operator = item: [{ key = ''a''; }]; }", - EvalError, - HintFmt("cannot compare %s with %s", "a string", "an integer"), - HintFmt("while comparing the `key` attributes of two genericClosure elements")); - - ASSERT_TRACE2( - "genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", - TypeError, - HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); -} - TEST_F(ErrorTraceTest, replaceStrings) { ASSERT_TRACE2( @@ -1050,17 +993,35 @@ TEST_F(ErrorTraceTest, bitXor) TEST_F(ErrorTraceTest, lessThan) { - ASSERT_TRACE1("lessThan 1 \"foo\"", EvalError, HintFmt("cannot compare %s with %s", "an integer", "a string")); + ASSERT_TRACE1( + "lessThan 1 \"foo\"", + EvalError, + HintFmt( + "cannot compare %s with %s; values are %s and %s", + "an integer", + "a string", + Uncolored(ANSI_CYAN "1" ANSI_NORMAL), + Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL))); ASSERT_TRACE1( "lessThan {} {}", EvalError, - HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); + HintFmt( + "cannot compare %s with %s; values of that type are incomparable (values are %s and %s)", + "a set", + "a set", + Uncolored("{ }"), + Uncolored("{ }"))); ASSERT_TRACE2( "lessThan [ 1 2 ] [ \"foo\" ]", EvalError, - HintFmt("cannot compare %s with %s", "an integer", "a string"), + HintFmt( + "cannot compare %s with %s; values are %s and %s", + "an integer", + "a string", + Uncolored(ANSI_CYAN "1" ANSI_NORMAL), + Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), HintFmt("while comparing two list elements")); } diff --git a/src/libexpr-tests/json.cc b/src/libexpr-tests/json.cc index 8b1bd7d96d9..31e7a18c5bd 100644 --- a/src/libexpr-tests/json.cc +++ b/src/libexpr-tests/json.cc @@ -1,5 +1,6 @@ #include "nix/expr/tests/libexpr.hh" #include "nix/expr/value-to-json.hh" +#include "nix/expr/static-string-data.hh" namespace nix { // Testing the conversion to JSON @@ -54,7 +55,7 @@ TEST_F(JSONValueTest, IntNegative) TEST_F(JSONValueTest, String) { Value v; - v.mkStringNoCopy("test"); + v.mkStringNoCopy("test"_sds); ASSERT_EQ(getJSONValue(v), "\"test\""); } @@ -62,7 +63,7 @@ TEST_F(JSONValueTest, StringQuotes) { Value v; - v.mkStringNoCopy("test\""); + v.mkStringNoCopy("test\""_sds); ASSERT_EQ(getJSONValue(v), "\"test\\\"\""); } @@ -72,7 +73,7 @@ TEST_F(JSONValueTest, StringQuotes) TEST_F(JSONValueTest, DISABLED_Path) { Value v; - v.mkPath(state.rootPath(CanonPath("/test"))); + v.mkPath(state.rootPath(CanonPath("/test")), state.mem); ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\""); } } /* namespace nix */ diff --git a/src/libexpr-tests/meson.build b/src/libexpr-tests/meson.build index 7f7c08955c0..c5dafe0de84 100644 --- a/src/libexpr-tests/meson.build +++ b/src/libexpr-tests/meson.build @@ -45,7 +45,6 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'derived-path.cc', @@ -83,7 +82,7 @@ this_exe = executable( test( meson.project_name(), this_exe, - env : asan_test_options_env + { + env : { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, protocol : 'gtest', diff --git a/src/libexpr-tests/nix_api_expr.cc b/src/libexpr-tests/nix_api_expr.cc index de508b4e40b..c7e246c727f 100644 --- a/src/libexpr-tests/nix_api_expr.cc +++ b/src/libexpr-tests/nix_api_expr.cc @@ -14,17 +14,17 @@ namespace nixC { -TEST_F(nix_api_store_test, nix_eval_state_lookup_path) +TEST_F(nix_api_expr_test, nix_eval_state_lookup_path) { auto tmpDir = nix::createTempDir(); auto delTmpDir = std::make_unique(tmpDir, true); - auto nixpkgs = tmpDir + "/pkgs"; - auto nixos = tmpDir + "/cfg"; + auto nixpkgs = tmpDir / "pkgs"; + auto nixos = tmpDir / "cfg"; std::filesystem::create_directories(nixpkgs); std::filesystem::create_directories(nixos); - std::string nixpkgsEntry = "nixpkgs=" + nixpkgs; - std::string nixosEntry = "nixos-config=" + nixos; + std::string nixpkgsEntry = "nixpkgs=" + nixpkgs.string(); + std::string nixosEntry = "nixos-config=" + nixos.string(); const char * lookupPath[] = {nixpkgsEntry.c_str(), nixosEntry.c_str(), nullptr}; auto builder = nix_eval_state_builder_new(ctx, store); @@ -42,12 +42,16 @@ TEST_F(nix_api_store_test, nix_eval_state_lookup_path) nix_expr_eval_from_string(ctx, state, "builtins.seq ", ".", value); assert_ctx_ok(); + nix_state_free(state); + ASSERT_EQ(nix_get_type(ctx, value), NIX_TYPE_PATH); assert_ctx_ok(); auto pathStr = nix_get_path_string(ctx, value); assert_ctx_ok(); - ASSERT_EQ(0, strcmp(pathStr, nixpkgs.c_str())); + ASSERT_EQ(0, strcmp(pathStr, nixpkgs.string().c_str())); + + nix_gc_decref(nullptr, value); } TEST_F(nix_api_expr_test, nix_expr_eval_from_string) @@ -228,22 +232,22 @@ TEST_F(nix_api_expr_test, nix_expr_realise_context) nix_realised_string_free(r); } -const char * SAMPLE_USER_DATA = "whatever"; +static const char SAMPLE_USER_DATA = 0; static void primop_square(void * user_data, nix_c_context * context, EvalState * state, nix_value ** args, nix_value * ret) { assert(context); assert(state); - assert(user_data == SAMPLE_USER_DATA); + assert(user_data == &SAMPLE_USER_DATA); auto i = nix_get_int(context, args[0]); nix_init_int(context, ret, i * i); } TEST_F(nix_api_expr_test, nix_expr_primop) { - PrimOp * primop = - nix_alloc_primop(ctx, primop_square, 1, "square", nullptr, "square an integer", (void *) SAMPLE_USER_DATA); + PrimOp * primop = nix_alloc_primop( + ctx, primop_square, 1, "square", nullptr, "square an integer", const_cast(&SAMPLE_USER_DATA)); assert_ctx_ok(); nix_value * primopValue = nix_alloc_value(ctx, state); assert_ctx_ok(); @@ -269,7 +273,7 @@ primop_repeat(void * user_data, nix_c_context * context, EvalState * state, nix_ { assert(context); assert(state); - assert(user_data == SAMPLE_USER_DATA); + assert(user_data == &SAMPLE_USER_DATA); // Get the string to repeat std::string s; @@ -291,8 +295,8 @@ primop_repeat(void * user_data, nix_c_context * context, EvalState * state, nix_ TEST_F(nix_api_expr_test, nix_expr_primop_arity_2_multiple_calls) { - PrimOp * primop = - nix_alloc_primop(ctx, primop_repeat, 2, "repeat", nullptr, "repeat a string", (void *) SAMPLE_USER_DATA); + PrimOp * primop = nix_alloc_primop( + ctx, primop_repeat, 2, "repeat", nullptr, "repeat a string", const_cast(&SAMPLE_USER_DATA)); assert_ctx_ok(); nix_value * primopValue = nix_alloc_value(ctx, state); assert_ctx_ok(); @@ -326,8 +330,8 @@ TEST_F(nix_api_expr_test, nix_expr_primop_arity_2_multiple_calls) TEST_F(nix_api_expr_test, nix_expr_primop_arity_2_single_call) { - PrimOp * primop = - nix_alloc_primop(ctx, primop_repeat, 2, "repeat", nullptr, "repeat a string", (void *) SAMPLE_USER_DATA); + PrimOp * primop = nix_alloc_primop( + ctx, primop_repeat, 2, "repeat", nullptr, "repeat a string", const_cast(&SAMPLE_USER_DATA)); assert_ctx_ok(); nix_value * primopValue = nix_alloc_value(ctx, state); assert_ctx_ok(); diff --git a/src/libexpr-tests/nix_api_value_internal.cc b/src/libexpr-tests/nix_api_value_internal.cc index 34db6ac81c8..085b0798ff5 100644 --- a/src/libexpr-tests/nix_api_value_internal.cc +++ b/src/libexpr-tests/nix_api_value_internal.cc @@ -14,12 +14,4 @@ namespace nixC { -TEST_F(nix_api_expr_test, as_nix_value_ptr) -{ - // nix_alloc_value casts nix::Value to nix_value - // It should be obvious from the decl that that works, but if it doesn't, - // the whole implementation would be utterly broken. - ASSERT_EQ(sizeof(nix::Value), sizeof(nix_value)); -} - } // namespace nixC diff --git a/src/libexpr-tests/package.nix b/src/libexpr-tests/package.nix index c36aa2dc725..51d52e935bf 100644 --- a/src/libexpr-tests/package.nix +++ b/src/libexpr-tests/package.nix @@ -62,7 +62,6 @@ mkMesonExecutable (finalAttrs: { mkdir -p "$HOME" '' + '' - export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0 export _NIX_TEST_UNIT_DATA=${resolvePath ./data} ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out diff --git a/src/libexpr-tests/primops.cc b/src/libexpr-tests/primops.cc index 74d676844b7..36e3fa59803 100644 --- a/src/libexpr-tests/primops.cc +++ b/src/libexpr-tests/primops.cc @@ -661,8 +661,14 @@ INSTANTIATE_TEST_SUITE_P( CASE(R"(null)", ""), CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"), CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"), - CASE(R"({ outPath = "foo"; })", "foo"), - CASE(R"(./test)", "/test"))); + CASE(R"({ outPath = "foo"; })", "foo") +// this is broken on cygwin because canonPath("//./test", false) returns //./test +// FIXME: don't use canonPath +#ifndef __CYGWIN__ + , + CASE(R"(./test)", "/test") +#endif + )); #undef CASE TEST_F(PrimOpTest, substring) @@ -771,7 +777,7 @@ TEST_F(PrimOpTest, derivation) ASSERT_EQ(v.type(), nFunction); ASSERT_TRUE(v.isLambda()); ASSERT_NE(v.lambda().fun, nullptr); - ASSERT_TRUE(v.lambda().fun->hasFormals()); + ASSERT_TRUE(v.lambda().fun->getFormals()); } TEST_F(PrimOpTest, currentTime) diff --git a/src/libexpr-tests/trivial.cc b/src/libexpr-tests/trivial.cc index a287ce4d185..d112c269a5f 100644 --- a/src/libexpr-tests/trivial.cc +++ b/src/libexpr-tests/trivial.cc @@ -1,4 +1,5 @@ #include "nix/expr/tests/libexpr.hh" +#include "nix/util/tests/gmock-matchers.hh" namespace nix { // Testing of trivial expressions @@ -160,7 +161,8 @@ TEST_F(TrivialExpressionTest, assertPassed) ASSERT_THAT(v, IsIntEq(123)); } -class AttrSetMergeTrvialExpressionTest : public TrivialExpressionTest, public testing::WithParamInterface +class AttrSetMergeTrvialExpressionTest : public TrivialExpressionTest, + public ::testing::WithParamInterface {}; TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) @@ -196,7 +198,7 @@ TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) INSTANTIATE_TEST_SUITE_P( attrsetMergeLazy, AttrSetMergeTrvialExpressionTest, - testing::Values("{ a.b = 1; a.c = 2; }", "{ a = { b = 1; }; a = { c = 2; }; }")); + ::testing::Values("{ a.b = 1; a.c = 2; }", "{ a = { b = 1; }; a = { c = 2; }; }")); // The following macros ultimately define 48 tests (16 variations on three // templates). Each template tests an expression that can be written in 2^4 @@ -339,4 +341,18 @@ TEST_F(TrivialExpressionTest, orCantBeUsed) { ASSERT_THROW(eval("let or = 1; in or"), Error); } + +TEST_F(TrivialExpressionTest, tooManyFormals) +{ + std::string expr = "let f = { "; + for (uint32_t i = 0; i <= std::numeric_limits::max(); ++i) { + expr += fmt("arg%d, ", i); + } + expr += " }: 0 in; f {}"; + ASSERT_THAT( + [&]() { eval(expr); }, + ::testing::ThrowsMessage(::nix::testing::HasSubstrIgnoreANSIMatcher( + "too many formal arguments, implementation supports at most 65535"))); +} + } /* namespace nix */ diff --git a/src/libexpr-tests/value/print.cc b/src/libexpr-tests/value/print.cc index 6cadbc70ac1..d226062197d 100644 --- a/src/libexpr-tests/value/print.cc +++ b/src/libexpr-tests/value/print.cc @@ -1,4 +1,5 @@ #include "nix/expr/tests/libexpr.hh" +#include "nix/expr/static-string-data.hh" #include "nix/expr/value.hh" #include "nix/expr/print.hh" @@ -35,14 +36,14 @@ TEST_F(ValuePrintingTests, tBool) TEST_F(ValuePrintingTests, tString) { Value vString; - vString.mkStringNoCopy("some-string"); + vString.mkStringNoCopy("some-string"_sds); test(vString, "\"some-string\""); } TEST_F(ValuePrintingTests, tPath) { Value vPath; - vPath.mkStringNoCopy("/foo"); + vPath.mkStringNoCopy("/foo"_sds); test(vPath, "\"/foo\""); } @@ -110,9 +111,8 @@ TEST_F(ValuePrintingTests, vLambda) PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1); auto posIdx = state.positions.add(origin, 0); auto body = ExprInt(0); - auto formals = Formals{}; - ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body); + ExprLambda eLambda(posIdx, createSymbol("a"), &body); Value vLambda; vLambda.mkLambda(&env, &eLambda); @@ -268,7 +268,7 @@ struct StringPrintingTests : LibExprTest void test(std::string_view literal, std::string_view expected, unsigned int maxLength, A... args) { Value v; - v.mkString(literal); + v.mkString(literal, state.mem); std::stringstream out; printValue(state, out, v, PrintOptions{.maxStringLength = maxLength}); @@ -290,10 +290,10 @@ TEST_F(StringPrintingTests, maxLengthTruncation) TEST_F(ValuePrintingTests, attrsTypeFirst) { Value vType; - vType.mkStringNoCopy("puppy"); + vType.mkStringNoCopy("puppy"_sds); Value vApple; - vApple.mkStringNoCopy("apple"); + vApple.mkStringNoCopy("apple"_sds); BindingsBuilder builder = state.buildBindings(10); builder.insert(state.symbols.create("type"), &vType); @@ -334,7 +334,7 @@ TEST_F(ValuePrintingTests, ansiColorsBool) TEST_F(ValuePrintingTests, ansiColorsString) { Value v; - v.mkStringNoCopy("puppy"); + v.mkStringNoCopy("puppy"_sds); test(v, ANSI_MAGENTA "\"puppy\"" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } @@ -342,7 +342,7 @@ TEST_F(ValuePrintingTests, ansiColorsString) TEST_F(ValuePrintingTests, ansiColorsStringElided) { Value v; - v.mkStringNoCopy("puppy"); + v.mkStringNoCopy("puppy"_sds); test( v, @@ -353,7 +353,7 @@ TEST_F(ValuePrintingTests, ansiColorsStringElided) TEST_F(ValuePrintingTests, ansiColorsPath) { Value v; - v.mkPath(state.rootPath(CanonPath("puppy"))); + v.mkPath(state.rootPath(CanonPath("puppy")), state.mem); test(v, ANSI_GREEN "/puppy" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } @@ -390,7 +390,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrs) TEST_F(ValuePrintingTests, ansiColorsDerivation) { Value vDerivation; - vDerivation.mkStringNoCopy("derivation"); + vDerivation.mkStringNoCopy("derivation"_sds); BindingsBuilder builder = state.buildBindings(10); builder.insert(state.s.type, &vDerivation); @@ -413,7 +413,7 @@ TEST_F(ValuePrintingTests, ansiColorsError) { Value throw_ = state.getBuiltin("throw"); Value message; - message.mkStringNoCopy("uh oh!"); + message.mkStringNoCopy("uh oh!"_sds); Value vError; vError.mkApp(&throw_, &message); @@ -430,12 +430,12 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) { Value throw_ = state.getBuiltin("throw"); Value message; - message.mkStringNoCopy("uh oh!"); + message.mkStringNoCopy("uh oh!"_sds); Value vError; vError.mkApp(&throw_, &message); Value vDerivation; - vDerivation.mkStringNoCopy("derivation"); + vDerivation.mkStringNoCopy("derivation"_sds); BindingsBuilder builder = state.buildBindings(10); builder.insert(state.s.type, &vDerivation); @@ -500,9 +500,8 @@ TEST_F(ValuePrintingTests, ansiColorsLambda) PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1); auto posIdx = state.positions.add(origin, 0); auto body = ExprInt(0); - auto formals = Formals{}; - ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body); + ExprLambda eLambda(posIdx, createSymbol("a"), &body); Value vLambda; vLambda.mkLambda(&env, &eLambda); diff --git a/src/libexpr-tests/value/value.cc b/src/libexpr-tests/value/value.cc index c6349436fb7..bd8f0da7121 100644 --- a/src/libexpr-tests/value/value.cc +++ b/src/libexpr-tests/value/value.cc @@ -1,6 +1,8 @@ #include "nix/expr/value.hh" +#include "nix/expr/static-string-data.hh" #include "nix/store/tests/libstore.hh" +#include namespace nix { @@ -21,4 +23,21 @@ TEST_F(ValueTest, vInt) ASSERT_EQ(true, vInt.isValid()); } +TEST_F(ValueTest, staticString) +{ + Value vStr1; + Value vStr2; + vStr1.mkStringNoCopy("foo"_sds); + vStr2.mkStringNoCopy("foo"_sds); + + auto & sd1 = vStr1.string_data(); + auto & sd2 = vStr2.string_data(); + + // The strings should be the same + ASSERT_EQ(sd1.view(), sd2.view()); + + // The strings should also be backed by the same (static) allocation + ASSERT_EQ(&sd1, &sd2); +} + } // namespace nix diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc index 58705bfa1bd..575a135422a 100644 --- a/src/libexpr/attr-path.cc +++ b/src/libexpr/attr-path.cc @@ -1,5 +1,6 @@ #include "nix/expr/attr-path.hh" #include "nix/expr/eval-inline.hh" +#include "nix/util/strings-inline.hh" namespace nix { @@ -30,14 +31,24 @@ static Strings parseAttrPath(std::string_view s) return res; } -std::vector parseAttrPath(EvalState & state, std::string_view s) +AttrPath AttrPath::parse(EvalState & state, std::string_view s) { - std::vector res; + AttrPath res; for (auto & a : parseAttrPath(s)) res.push_back(state.symbols.create(a)); return res; } +std::string AttrPath::to_string(EvalState & state) const +{ + return dropEmptyInitThenConcatStringsSep(".", state.symbols.resolve({*this})); +} + +std::vector AttrPath::resolve(EvalState & state) const +{ + return state.symbols.resolve({*this}); +} + std::pair findAlongAttrPath(EvalState & state, const std::string & attrPath, Bindings & autoArgs, Value & vIn) { diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 821b426d84f..13bc1b8ea64 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -136,17 +136,19 @@ struct AttrDb }); } - AttrId setString(AttrKey key, std::string_view s, const char ** context = nullptr) + AttrId setString(AttrKey key, std::string_view s, const Value::StringWithContext::Context * context = nullptr) { return doSQLite([&]() { auto state(_state->lock()); if (context) { std::string ctx; - for (const char ** p = context; *p; ++p) { - if (p != context) + bool first = true; + for (auto * elem : *context) { + if (!first) ctx.push_back(' '); - ctx.append(*p); + ctx.append(elem->view()); + first = false; } state->insertAttributeWithContext.use()(key.first)(symbols[key.second])(AttrType::String) (s) (ctx) .exec(); @@ -362,7 +364,7 @@ void AttrCursor::fetchCachedValue() throw CachedEvalError(parent->first, parent->second); } -std::vector AttrCursor::getAttrPath() const +AttrPath AttrCursor::getAttrPath() const { if (parent) { auto attrPath = parent->first->getAttrPath(); @@ -372,7 +374,7 @@ std::vector AttrCursor::getAttrPath() const return {}; } -std::vector AttrCursor::getAttrPath(Symbol name) const +AttrPath AttrCursor::getAttrPath(Symbol name) const { auto attrPath = getAttrPath(); attrPath.push_back(name); @@ -386,12 +388,12 @@ std::string toAttrPathStr(EvalState & state, const AttrPath & attrPath) std::string AttrCursor::getAttrPathStr() const { - return toAttrPathStr(root->state, getAttrPath()); + return getAttrPath().to_string(root->state); } std::string AttrCursor::getAttrPathStr(Symbol name) const { - return toAttrPathStr(root->state, getAttrPath(name)); + return getAttrPath(name).to_string(root->state); } Value & AttrCursor::forceValue() @@ -411,7 +413,7 @@ Value & AttrCursor::forceValue() if (root->db && (!cachedValue || std::get_if(&cachedValue->second))) { if (v.type() == nString) - cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()), string_t{v.c_str(), {}}}; + cachedValue = {root->db->setString(getKey(), v.string_view(), v.context()), string_t{v.string_view(), {}}}; else if (v.type() == nPath) { auto path = v.path().path; cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}}; @@ -514,7 +516,7 @@ ref AttrCursor::getAttr(std::string_view name) return getAttr(root->state.symbols.create(name)); } -OrSuggestions> AttrCursor::findAlongAttrPath(const std::vector & attrPath) +OrSuggestions> AttrCursor::findAlongAttrPath(const AttrPath & attrPath) { auto res = shared_from_this(); for (auto & attr : attrPath) { @@ -546,7 +548,7 @@ std::string AttrCursor::getString() if (v.type() != nString && v.type() != nPath) root->state.error("'%s' is not a string but %s", getAttrPathStr(), showType(v)).debugThrow(); - return v.type() == nString ? v.c_str() : v.path().to_string(); + return v.type() == nString ? std::string(v.string_view()) : v.path().to_string(); } string_t AttrCursor::getStringWithContext() @@ -586,7 +588,7 @@ string_t AttrCursor::getStringWithContext() if (v.type() == nString) { NixStringContext context; copyContext(v, context); - return {v.c_str(), std::move(context)}; + return {std::string{v.string_view()}, std::move(context)}; } else if (v.type() == nPath) return {v.path().to_string(), {}}; else diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc index c9e271b952f..27205864b8b 100644 --- a/src/libexpr/eval-settings.cc +++ b/src/libexpr/eval-settings.cc @@ -60,18 +60,18 @@ EvalSettings::EvalSettings(bool & readOnlyMode, EvalSettings::LookupPathHooks lo Strings EvalSettings::getDefaultNixPath() { Strings res; - auto add = [&](const Path & p, const std::string & s = std::string()) { + auto add = [&](const std::filesystem::path & p, const std::string & s = std::string()) { if (std::filesystem::exists(p)) { if (s.empty()) { - res.push_back(p); + res.push_back(p.string()); } else { - res.push_back(s + "=" + p); + res.push_back(s + "=" + p.string()); } } }; - add(getNixDefExpr() + "/channels"); - add(rootChannelsDir() + "/nixpkgs", "nixpkgs"); + add(std::filesystem::path{getNixDefExpr()} / "channels"); + add(rootChannelsDir() / "nixpkgs", "nixpkgs"); add(rootChannelsDir()); return res; @@ -92,7 +92,7 @@ bool EvalSettings::isPseudoUrl(std::string_view s) std::string EvalSettings::resolvePseudoUrl(std::string_view url) { if (hasPrefix(url, "channel:")) { - auto realUrl = "https://nixos.org/channels/" + std::string(url.substr(8)) + "/nixexprs.tar.xz"; + auto realUrl = "https://channels.nixos.org/" + std::string(url.substr(8)) + "/nixexprs.tar.xz"; static bool haveWarned = false; warnOnce( haveWarned, @@ -113,9 +113,9 @@ const std::string & EvalSettings::getCurrentSystem() const return evalSystem != "" ? evalSystem : settings.thisSystem.get(); } -Path getNixDefExpr() +std::filesystem::path getNixDefExpr() { - return settings.useXDGBaseDirectories ? getStateDir() + "/defexpr" : getHome() + "/.nix-defexpr"; + return settings.useXDGBaseDirectories ? getStateDir() / "defexpr" : getHome() / ".nix-defexpr"; } } // namespace nix diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index f49f0edf534..46393b79c5e 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -3,6 +3,7 @@ #include "nix/expr/primops.hh" #include "nix/expr/print-options.hh" #include "nix/expr/symbol-table.hh" +#include "nix/expr/value.hh" #include "nix/util/exit.hh" #include "nix/util/types.hh" #include "nix/util/util.hh" @@ -30,6 +31,8 @@ #include "parser-tab.hh" #include +#include +#include #include #include #include @@ -38,6 +41,7 @@ #include #include #include +#include #include #include @@ -54,6 +58,9 @@ using json = nlohmann::json; namespace nix { +/** + * Just for doc strings. Not for regular string values. + */ static char * allocString(size_t size) { char * t; @@ -67,6 +74,9 @@ static char * allocString(size_t size) // string allocations. // This function handles makeImmutableString(std::string_view()) by returning // the empty string. +/** + * Just for doc strings. Not for regular string values. + */ static const char * makeImmutableString(std::string_view s) { const size_t size = s.size(); @@ -78,6 +88,25 @@ static const char * makeImmutableString(std::string_view s) return t; } +StringData & StringData::alloc(EvalMemory & mem, size_t size) +{ + void * t = mem.allocBytes(sizeof(StringData) + size + 1); + if (!t) + throw std::bad_alloc(); + auto res = new (t) StringData(size); + return *res; +} + +const StringData & StringData::make(EvalMemory & mem, std::string_view s) +{ + if (s.empty()) + return ""_sds; + auto & res = alloc(mem, s.size()); + std::memcpy(&res.data_, s.data(), s.size()); + res.data_[s.size()] = '\0'; + return res; +} + RootValue allocRootValue(Value * v) { return std::allocate_shared(traceable_allocator(), v); @@ -197,7 +226,7 @@ bool ValueStorage::isTrivial() const if (isApp) return false; auto expr = untagPointer(p1_); - return (dynamic_cast(expr) && ((ExprAttrs *) expr)->dynamicAttrs.empty()) + return (dynamic_cast(expr) && ((ExprAttrs *) expr)->dynamicAttrs->empty()) || dynamic_cast(expr) || dynamic_cast(expr); } @@ -518,15 +547,16 @@ Value * EvalState::addPrimOp(PrimOp && primOp) if (primOp.arity == 0) { primOp.arity = 1; auto vPrimOp = allocValue(); - vPrimOp->mkPrimOp(new PrimOp(primOp)); + vPrimOp->mkPrimOp(new PrimOp(std::move(primOp))); Value v; v.mkApp(vPrimOp, vPrimOp); + auto & primOp1 = *vPrimOp->primOp(); return addConstant( - primOp.name, + primOp1.name, v, { .type = nThunk, // FIXME - .doc = primOp.doc, + .doc = primOp1.doc ? primOp1.doc->c_str() : nullptr, }); } @@ -566,13 +596,14 @@ std::optional EvalState::getDoc(Value & v) { if (v.isPrimOp()) { auto v2 = &v; - if (auto * doc = v2->primOp()->doc) + auto & primOp = *v2->primOp(); + if (primOp.doc) return Doc{ .pos = {}, - .name = v2->primOp()->name, - .arity = v2->primOp()->arity, - .args = v2->primOp()->args, - .doc = doc, + .name = primOp.name, + .arity = primOp.arity, + .args = primOp.args, + .doc = primOp.doc->c_str(), }; } if (v.isLambda()) { @@ -614,7 +645,9 @@ std::optional EvalState::getDoc(Value & v) .name = name, .arity = 0, // FIXME: figure out how deep by syntax only? It's not semantically useful though... .args = {}, - .doc = makeImmutableString(s.view()), // NOTE: memory leak when compiled without GC + /* N.B. Can't use StringData here, because that would lead to an interior pointer. + NOTE: memory leak when compiled without GC. */ + .doc = makeImmutableString(s.view()), }; } if (isFunctor(v)) { @@ -846,38 +879,36 @@ DebugTraceStacker::DebugTraceStacker(EvalState & evalState, DebugTrace t) evalState.runDebugRepl(nullptr, trace.env, trace.expr); } -void Value::mkString(std::string_view s) +void Value::mkString(std::string_view s, EvalMemory & mem) { - mkStringNoCopy(makeImmutableString(s)); + mkStringNoCopy(StringData::make(mem, s)); } -static const char ** encodeContext(const NixStringContext & context) +Value::StringWithContext::Context * +Value::StringWithContext::Context::fromBuilder(const NixStringContext & context, EvalMemory & mem) { - if (!context.empty()) { - size_t n = 0; - auto ctx = (const char **) allocBytes((context.size() + 1) * sizeof(char *)); - for (auto & i : context) { - ctx[n++] = makeImmutableString({i.to_string()}); - } - ctx[n] = nullptr; - return ctx; - } else + if (context.empty()) return nullptr; + + auto ctx = new (mem.allocBytes(sizeof(Context) + context.size() * sizeof(value_type))) Context(context.size()); + std::ranges::transform( + context, ctx->elems, [&](const NixStringContextElem & elt) { return &StringData::make(mem, elt.to_string()); }); + return ctx; } -void Value::mkString(std::string_view s, const NixStringContext & context) +void Value::mkString(std::string_view s, const NixStringContext & context, EvalMemory & mem) { - mkStringNoCopy(makeImmutableString(s), encodeContext(context)); + mkStringNoCopy(StringData::make(mem, s), Value::StringWithContext::Context::fromBuilder(context, mem)); } -void Value::mkStringMove(const char * s, const NixStringContext & context) +void Value::mkStringMove(const StringData & s, const NixStringContext & context, EvalMemory & mem) { - mkStringNoCopy(s, encodeContext(context)); + mkStringNoCopy(s, Value::StringWithContext::Context::fromBuilder(context, mem)); } -void Value::mkPath(const SourcePath & path) +void Value::mkPath(const SourcePath & path, EvalMemory & mem) { - mkPath(&*path.accessor, makeImmutableString(path.path.abs())); + mkPath(&*path.accessor, StringData::make(mem, path.path.abs())); } inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) @@ -913,9 +944,9 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) } } -ListBuilder::ListBuilder(size_t size) +ListBuilder::ListBuilder(EvalMemory & mem, size_t size) : size(size) - , elems(size <= 2 ? inlineElems : (Value **) allocBytes(size * sizeof(Value *))) + , elems(size <= 2 ? inlineElems : (Value **) mem.allocBytes(size * sizeof(Value *))) { } @@ -946,9 +977,10 @@ void EvalState::mkPos(Value & v, PosIdx p) // FIXME: only do this for virtual store paths? attrs.alloc(s.file).mkString( path->path.abs(), - {NixStringContextElem::Path{.storePath = store->toStorePath(path->path.abs()).first}}); + {NixStringContextElem::Path{.storePath = store->toStorePath(path->path.abs()).first}}, + mem); else - attrs.alloc(s.file).mkString(path->path.abs()); + attrs.alloc(s.file).mkString(path->path.abs(), mem); makePositionThunks(*this, p, attrs.alloc(s.line), attrs.alloc(s.column)); v.mkAttrs(attrs); } else @@ -961,7 +993,8 @@ void EvalState::mkStorePathString(const StorePath & p, Value & v) store->printStorePath(p), NixStringContext{ NixStringContextElem::Opaque{.path = p}, - }); + }, + mem); } std::string EvalState::mkOutputStringRaw( @@ -983,7 +1016,7 @@ void EvalState::mkOutputString( std::optional optStaticOutputPath, const ExperimentalFeatureSettings & xpSettings) { - value.mkString(mkOutputStringRaw(b, optStaticOutputPath, xpSettings), NixStringContext{b}); + value.mkString(mkOutputStringRaw(b, optStaticOutputPath, xpSettings), NixStringContext{b}, mem); } std::string EvalState::mkSingleDerivedPathStringRaw(const SingleDerivedPath & p) @@ -1019,7 +1052,8 @@ void EvalState::mkSingleDerivedPathString(const SingleDerivedPath & p, Value & v mkSingleDerivedPathStringRaw(p), NixStringContext{ std::visit([](auto && v) -> NixStringContextElem { return v; }, p), - }); + }, + mem); } Value * Expr::maybeThunk(EvalState & state, Env & env) @@ -1145,6 +1179,7 @@ void EvalState::resetFileCache() importResolutionCache->clear(); fileEvalCache->clear(); inputCache->clear(); + positions.clear(); } void EvalState::eval(Expr * e, Value & v) @@ -1224,26 +1259,26 @@ Env * ExprAttrs::buildInheritFromEnv(EvalState & state, Env & up) void ExprAttrs::eval(EvalState & state, Env & env, Value & v) { - auto bindings = state.buildBindings(attrs.size() + dynamicAttrs.size()); + auto bindings = state.buildBindings(attrs->size() + dynamicAttrs->size()); auto dynamicEnv = &env; bool sort = false; if (recursive) { /* Create a new environment that contains the attributes in this `rec'. */ - Env & env2(state.mem.allocEnv(attrs.size())); + Env & env2(state.mem.allocEnv(attrs->size())); env2.up = &env; dynamicEnv = &env2; Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env2) : nullptr; - AttrDefs::iterator overrides = attrs.find(state.s.overrides); - bool hasOverrides = overrides != attrs.end(); + AttrDefs::iterator overrides = attrs->find(state.s.overrides); + bool hasOverrides = overrides != attrs->end(); /* The recursive attributes are evaluated in the new environment, while the inherited attributes are evaluated in the original environment. */ Displacement displ = 0; - for (auto & i : attrs) { + for (auto & i : *attrs) { Value * vAttr; if (hasOverrides && i.second.kind != AttrDef::Kind::Inherited) { vAttr = state.allocValue(); @@ -1270,8 +1305,8 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) "while evaluating the `__overrides` attribute"); bindings.grow(state.buildBindings(bindings.capacity() + vOverrides->attrs()->size())); for (auto & i : *vOverrides->attrs()) { - AttrDefs::iterator j = attrs.find(i.name); - if (j != attrs.end()) { + AttrDefs::iterator j = attrs->find(i.name); + if (j != attrs->end()) { (*bindings.bindings)[j->second.displ] = i; env2.values[j->second.displ] = i.value; } else @@ -1283,13 +1318,13 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) else { Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env) : nullptr; - for (auto & i : attrs) + for (auto & i : *attrs) bindings.insert( i.first, i.second.e->maybeThunk(state, *i.second.chooseByKind(&env, &env, inheritEnv)), i.second.pos); } /* Dynamic attrs apply *after* rec and __overrides. */ - for (auto & i : dynamicAttrs) { + for (auto & i : *dynamicAttrs) { Value nameVal; i.nameExpr->eval(state, *dynamicEnv, nameVal); state.forceValue(nameVal, i.pos); @@ -1323,7 +1358,7 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) { /* Create a new environment that contains the attributes in this `let'. */ - Env & env2(state.mem.allocEnv(attrs->attrs.size())); + Env & env2(state.mem.allocEnv(attrs->attrs->size())); env2.up = &env; Env * inheritEnv = attrs->inheritFromExprs ? attrs->buildInheritFromEnv(state, env2) : nullptr; @@ -1332,7 +1367,7 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) while the inherited attributes are evaluated in the original environment. */ Displacement displ = 0; - for (auto & i : attrs->attrs) { + for (auto & i : *attrs->attrs) { env2.values[displ++] = i.second.e->maybeThunk(state, *i.second.chooseByKind(&env2, &env, inheritEnv)); } @@ -1366,7 +1401,7 @@ void ExprVar::eval(EvalState & state, Env & env, Value & v) v = *v2; } -static std::string showAttrPath(EvalState & state, Env & env, std::span attrPath) +static std::string showAttrSelectionPath(EvalState & state, Env & env, std::span attrPath) { std::ostringstream out; bool first = true; @@ -1402,7 +1437,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) env, getPos(), "while evaluating the attribute '%1%'", - showAttrPath(state, env, getAttrPath())) + showAttrSelectionPath(state, env, getAttrPath())) : nullptr; for (auto & i : getAttrPath()) { @@ -1443,7 +1478,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) auto origin = std::get_if(&pos2r.origin); if (!(origin && *origin == state.derivationInternal)) state.addErrorTrace( - e, pos2, "while evaluating the attribute '%1%'", showAttrPath(state, env, getAttrPath())); + e, pos2, "while evaluating the attribute '%1%'", showAttrSelectionPath(state, env, getAttrPath())); } throw; } @@ -1531,15 +1566,13 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, ExprLambda & lambda(*vCur.lambda().fun); - auto size = (!lambda.arg ? 0 : 1) + (lambda.hasFormals() ? lambda.formals->formals.size() : 0); + auto size = (!lambda.arg ? 0 : 1) + (lambda.getFormals() ? lambda.getFormals()->formals.size() : 0); Env & env2(mem.allocEnv(size)); env2.up = vCur.lambda().env; Displacement displ = 0; - if (!lambda.hasFormals()) - env2.values[displ++] = args[0]; - else { + if (auto formals = lambda.getFormals()) { try { forceAttrs(*args[0], lambda.pos, "while evaluating the value passed for the lambda argument"); } catch (Error & e) { @@ -1555,7 +1588,7 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, there is no matching actual argument but the formal argument has a default, use the default. */ size_t attrsUsed = 0; - for (auto & i : lambda.formals->formals) { + for (auto & i : formals->formals) { auto j = args[0]->attrs()->get(i.name); if (!j) { if (!i.def) { @@ -1577,13 +1610,13 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, /* Check that each actual argument is listed as a formal argument (unless the attribute match specifies a `...'). */ - if (!lambda.formals->ellipsis && attrsUsed != args[0]->attrs()->size()) { + if (!formals->ellipsis && attrsUsed != args[0]->attrs()->size()) { /* Nope, so show the first unexpected argument to the user. */ for (auto & i : *args[0]->attrs()) - if (!lambda.formals->has(i.name)) { + if (!formals->has(i.name)) { StringSet formalNames; - for (auto & formal : lambda.formals->formals) + for (auto & formal : formals->formals) formalNames.insert(std::string(symbols[formal.name])); auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]); error( @@ -1598,6 +1631,8 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, } unreachable(); } + } else { + env2.values[displ++] = args[0]; } nrFunctionCalls++; @@ -1759,9 +1794,9 @@ void ExprCall::eval(EvalState & state, Env & env, Value & v) // 4: about 60 // 5: under 10 // This excluded attrset lambdas (`{...}:`). Contributions of mixed lambdas appears insignificant at ~150 total. - SmallValueVector<4> vArgs(args.size()); - for (size_t i = 0; i < args.size(); ++i) - vArgs[i] = args[i]->maybeThunk(state, env); + SmallValueVector<4> vArgs(args->size()); + for (size_t i = 0; i < args->size(); ++i) + vArgs[i] = (*args)[i]->maybeThunk(state, env); state.callFunction(vFun, vArgs, v, pos); } @@ -1789,14 +1824,15 @@ void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res } } - if (!fun.isLambda() || !fun.lambda().fun->hasFormals()) { + if (!fun.isLambda() || !fun.lambda().fun->getFormals()) { res = fun; return; } + auto formals = fun.lambda().fun->getFormals(); - auto attrs = buildBindings(std::max(static_cast(fun.lambda().fun->formals->formals.size()), args.size())); + auto attrs = buildBindings(std::max(static_cast(formals->formals.size()), args.size())); - if (fun.lambda().fun->formals->ellipsis) { + if (formals->ellipsis) { // If the formals have an ellipsis (eg the function accepts extra args) pass // all available automatic arguments (which includes arguments specified on // the command line via --arg/--argstr) @@ -1804,7 +1840,7 @@ void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res attrs.insert(v); } else { // Otherwise, only pass the arguments that the function accepts - for (auto & i : fun.lambda().fun->formals->formals) { + for (auto & i : formals->formals) { auto j = args.get(i.name); if (j) { attrs.insert(*j); @@ -2034,7 +2070,7 @@ void EvalState::concatLists( void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) { NixStringContext context; - std::vector s; + std::vector strings; size_t sSize = 0; NixInt n{0}; NixFloat nf = 0; @@ -2042,32 +2078,11 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) bool first = !forceString; ValueType firstType = nString; - const auto str = [&] { - std::string result; - result.reserve(sSize); - for (const auto & part : s) - result += *part; - return result; - }; - /* c_str() is not str().c_str() because we want to create a string - Value. allocating a GC'd string directly and moving it into a - Value lets us avoid an allocation and copy. */ - const auto c_str = [&] { - char * result = allocString(sSize + 1); - char * tmp = result; - for (const auto & part : s) { - memcpy(tmp, part->data(), part->size()); - tmp += part->size(); - } - *tmp = 0; - return result; - }; - // List of returned strings. References to these Values must NOT be persisted. - SmallTemporaryValueVector values(es->size()); + SmallTemporaryValueVector values(es.size()); Value * vTmpP = values.data(); - for (auto & [i_pos, i] : *es) { + for (auto & [i_pos, i] : es) { Value & vTmp = *vTmpP++; i->eval(state, env, vTmp); @@ -2110,33 +2125,46 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) .withFrame(env, *this) .debugThrow(); } else { - if (s.empty()) - s.reserve(es->size()); + if (strings.empty()) + strings.reserve(es.size()); /* skip canonization of first path, which would only be not canonized in the first place if it's coming from a ./${foo} type path */ auto part = state.coerceToString( i_pos, vTmp, context, "while evaluating a path segment", false, firstType == nString, !first); sSize += part->size(); - s.emplace_back(std::move(part)); + strings.emplace_back(std::move(part)); } first = false; } - if (firstType == nInt) + if (firstType == nInt) { v.mkInt(n); - else if (firstType == nFloat) + } else if (firstType == nFloat) { v.mkFloat(nf); - else if (firstType == nPath) { + } else if (firstType == nPath) { if (hasContext(context)) state.error("a string that refers to a store path cannot be appended to a path") .atPos(pos) .withFrame(env, *this) .debugThrow(); - v.mkPath(state.rootPath(CanonPath(str()))); - } else - v.mkStringMove(c_str(), context); + std::string resultStr; + resultStr.reserve(sSize); + for (const auto & part : strings) { + resultStr += *part; + } + v.mkPath(state.rootPath(CanonPath(resultStr)), state.mem); + } else { + auto & resultStr = StringData::alloc(state.mem, sSize); + auto * tmp = resultStr.data(); + for (const auto & part : strings) { + std::memcpy(tmp, part->data(), part->size()); + tmp += part->size(); + } + *tmp = '\0'; + v.mkStringMove(resultStr, context, state.mem); + } } void ExprPos::eval(EvalState & state, Env & env, Value & v) @@ -2164,42 +2192,47 @@ void EvalState::forceValueDeep(Value & v) { std::set seen; - std::function recurse; + [&, &state(*this)](this const auto & recurse, Value & v) { + auto _level = state.addCallDepth(v.determinePos(noPos)); - recurse = [&](Value & v) { if (!seen.insert(&v).second) return; - forceValue(v, v.determinePos(noPos)); + state.forceValue(v, v.determinePos(noPos)); if (v.type() == nAttrs) { for (auto & i : *v.attrs()) try { // If the value is a thunk, we're evaling. Otherwise no trace necessary. // FIXME: race, thunk might be updated by another thread - auto dts = debugRepl && i.value->isThunk() ? makeDebugTraceStacker( - *this, - *i.value->thunk().expr, - *i.value->thunk().env, - i.pos, - "while evaluating the attribute '%1%'", - symbols[i.name]) - : nullptr; + auto dts = state.debugRepl && i.value->isThunk() ? makeDebugTraceStacker( + state, + *i.value->thunk().expr, + *i.value->thunk().env, + i.pos, + "while evaluating the attribute '%1%'", + state.symbols[i.name]) + : nullptr; recurse(*i.value); } catch (Error & e) { - addErrorTrace(e, i.pos, "while evaluating the attribute '%1%'", symbols[i.name]); + state.addErrorTrace(e, i.pos, "while evaluating the attribute '%1%'", state.symbols[i.name]); throw; } } else if (v.isList()) { + size_t index = 0; for (auto v2 : v.listView()) - recurse(*v2); + try { + recurse(*v2); + index++; + } catch (Error & e) { + state.addErrorTrace(e, "while evaluating list element at index %1%", index); + throw; + } } - }; - - recurse(v); + }(v); } NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCtx) @@ -2303,9 +2336,9 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string void copyContext(const Value & v, NixStringContext & context, const ExperimentalFeatureSettings & xpSettings) { - if (v.context()) - for (const char ** p = v.context(); *p; ++p) - context.insert(NixStringContextElem::parse(*p, xpSettings)); + if (auto * ctx = v.context()) + for (auto * elem : *ctx) + context.insert(NixStringContextElem::parse(elem->view(), xpSettings)); } std::string_view EvalState::forceString( @@ -2330,7 +2363,7 @@ std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::s error( "the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), - v.context()[0]) + (*v.context()->begin())->view()) .withTrace(pos, errorCtx) .debugThrow(); } @@ -2390,18 +2423,19 @@ BackedStringView EvalState::coerceToString( // FIXME: instead of copying the path to the store, we could // return a virtual store path that lazily copies the path to // the store in devirtualize(). - return !canonicalizePath && !copyToStore - ? // FIXME: hack to preserve path literals that end in a - // slash, as in /foo/${x}. - v.pathStr() - : copyToStore ? store->printStorePath(copyPathToStore(context, v.path(), v.determinePos(pos))) : ({ - auto path = v.path(); - if (path.accessor == rootFS && store->isInStore(path.path.abs())) { - context.insert( - NixStringContextElem::Path{.storePath = store->toStorePath(path.path.abs()).first}); - } - std::string(path.path.abs()); - }); + if (!canonicalizePath && !copyToStore) { + // FIXME: hack to preserve path literals that end in a + // slash, as in /foo/${x}. + return v.pathStrView(); + } else if (copyToStore) { + return store->printStorePath(copyPathToStore(context, v.path(), v.determinePos(pos))); + } else { + auto path = v.path(); + if (path.accessor == rootFS && store->isInStore(path.path.abs())) { + context.insert(NixStringContextElem::Path{.storePath = store->toStorePath(path.path.abs()).first}); + } + return std::string(path.path.abs()); + } } if (v.type() == nAttrs) { @@ -2659,7 +2693,7 @@ void EvalState::assertEqValues(Value & v1, Value & v2, const PosIdx pos, std::st return; case nString: - if (strcmp(v1.c_str(), v2.c_str()) != 0) { + if (v1.string_view() != v2.string_view()) { error( "string '%s' is not equal to string '%s'", ValuePrinter(*this, v1, errorPrintOptions), @@ -2676,7 +2710,7 @@ void EvalState::assertEqValues(Value & v1, Value & v2, const PosIdx pos, std::st ValuePrinter(*this, v2, errorPrintOptions)) .debugThrow(); } - if (strcmp(v1.pathStr(), v2.pathStr()) != 0) { + if (v1.pathStrView() != v2.pathStrView()) { error( "path '%s' is not equal to path '%s'", ValuePrinter(*this, v1, errorPrintOptions), @@ -2845,12 +2879,12 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v return v1.boolean() == v2.boolean(); case nString: - return strcmp(v1.c_str(), v2.c_str()) == 0; + return v1.string_view() == v2.string_view(); case nPath: return // FIXME: compare accessors by their fingerprint. - v1.pathAccessor() == v2.pathAccessor() && strcmp(v1.pathStr(), v2.pathStr()) == 0; + v1.pathAccessor() == v2.pathAccessor() && v1.pathStrView() == v2.pathStrView(); case nNull: return true; @@ -3102,7 +3136,7 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path) return parseExprFromFile(path, staticBaseEnv); } -Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) +Expr * EvalState::parseExprFromFile(const SourcePath & path, const std::shared_ptr & staticEnv) { auto buffer = path.resolveSymlinks().readFile(); // readFile hopefully have left some extra space for terminators @@ -3110,8 +3144,8 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) +Expr * EvalState::parseExprFromString( + std::string s_, const SourcePath & basePath, const std::shared_ptr & staticEnv) { // NOTE this method (and parseStdin) must take care to *fully copy* their input // into their respective Pos::Origin until the parser stops overwriting its input @@ -3197,7 +3231,7 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pat if (EvalSettings::isPseudoUrl(value)) { try { - auto accessor = fetchers::downloadTarball(store, fetchSettings, EvalSettings::resolvePseudoUrl(value)); + auto accessor = fetchers::downloadTarball(*store, fetchSettings, EvalSettings::resolvePseudoUrl(value)); auto storePath = fetchToStore(fetchSettings, *store, SourcePath(accessor), FetchMode::Copy); return finish(this->storePath(storePath)); } catch (Error & e) { @@ -3245,7 +3279,11 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pat } Expr * EvalState::parse( - char * text, size_t length, Pos::Origin origin, const SourcePath & basePath, std::shared_ptr & staticEnv) + char * text, + size_t length, + Pos::Origin origin, + const SourcePath & basePath, + const std::shared_ptr & staticEnv) { DocCommentMap tmpDocComments; // Only used when not origin is not a SourcePath auto * docComments = &tmpDocComments; @@ -3255,8 +3293,8 @@ Expr * EvalState::parse( docComments = &*it->second; } - auto result = parseExprFromBuf( - text, length, origin, basePath, mem.exprs.alloc, symbols, settings, positions, *docComments, rootFS); + auto result = + parseExprFromBuf(text, length, origin, basePath, mem.exprs, symbols, settings, positions, *docComments, rootFS); result->bindVars(*this, staticEnv); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 5a7281b2b82..c4a2b00af3e 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -168,7 +168,7 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT for (auto elem : outTI->listView()) { if (elem->type() != nString) throw errMsg; - auto out = outputs.find(elem->c_str()); + auto out = outputs.find(elem->string_view()); if (out == outputs.end()) throw errMsg; result.insert(*out); @@ -245,7 +245,7 @@ std::string PackageInfo::queryMetaString(const std::string & name) Value * v = queryMeta(name); if (!v || v->type() != nString) return ""; - return v->c_str(); + return std::string{v->string_view()}; } NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def) @@ -258,7 +258,7 @@ NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def) if (v->type() == nString) { /* Backwards compatibility with before we had support for integer meta fields. */ - if (auto n = string2Int(v->c_str())) + if (auto n = string2Int(v->string_view())) return NixInt{*n}; } return def; @@ -274,7 +274,7 @@ NixFloat PackageInfo::queryMetaFloat(const std::string & name, NixFloat def) if (v->type() == nString) { /* Backwards compatibility with before we had support for float meta fields. */ - if (auto n = string2Float(v->c_str())) + if (auto n = string2Float(v->string_view())) return *n; } return def; diff --git a/src/libexpr/include/nix/expr/attr-path.hh b/src/libexpr/include/nix/expr/attr-path.hh index 10e3e300f00..fd48705b8b7 100644 --- a/src/libexpr/include/nix/expr/attr-path.hh +++ b/src/libexpr/include/nix/expr/attr-path.hh @@ -19,6 +19,15 @@ findAlongAttrPath(EvalState & state, const std::string & attrPath, Bindings & au */ std::pair findPackageFilename(EvalState & state, Value & v, std::string what); -std::vector parseAttrPath(EvalState & state, std::string_view s); +struct AttrPath : std::vector +{ + using std::vector::vector; + + static AttrPath parse(EvalState & state, std::string_view s); + + std::string to_string(EvalState & state) const; + + std::vector resolve(EvalState & state) const; +}; } // namespace nix diff --git a/src/libexpr/include/nix/expr/eval-cache.hh b/src/libexpr/include/nix/expr/eval-cache.hh index 61d5ff64521..6d82f8c7e35 100644 --- a/src/libexpr/include/nix/expr/eval-cache.hh +++ b/src/libexpr/include/nix/expr/eval-cache.hh @@ -4,6 +4,7 @@ #include "nix/util/sync.hh" #include "nix/util/hash.hh" #include "nix/expr/eval.hh" +#include "nix/expr/attr-path.hh" #include #include @@ -13,10 +14,6 @@ namespace nix::eval_cache { struct AttrDb; class AttrCursor; -using AttrPath = std::vector; - -std::string toAttrPathStr(EvalState & state, const AttrPath & attrPath); - struct CachedEvalError : EvalError { const ref cursor; @@ -128,9 +125,9 @@ public: Value * value = nullptr, std::optional> && cachedValue = {}); - std::vector getAttrPath() const; + AttrPath getAttrPath() const; - std::vector getAttrPath(Symbol name) const; + AttrPath getAttrPath(Symbol name) const; std::string getAttrPathStr() const; @@ -150,7 +147,7 @@ public: * Get an attribute along a chain of attrsets. Note that this does * not auto-call functors or functions. */ - OrSuggestions> findAlongAttrPath(const std::vector & attrPath); + OrSuggestions> findAlongAttrPath(const AttrPath & attrPath); std::string getString(); diff --git a/src/libexpr/include/nix/expr/eval-inline.hh b/src/libexpr/include/nix/expr/eval-inline.hh index c6c81db8fdc..35b54926157 100644 --- a/src/libexpr/include/nix/expr/eval-inline.hh +++ b/src/libexpr/include/nix/expr/eval-inline.hh @@ -12,7 +12,7 @@ namespace nix { * Note: Various places expect the allocated memory to be zeroed. */ [[gnu::always_inline]] -inline void * allocBytes(size_t n) +inline void * EvalMemory::allocBytes(size_t n) { void * p; #if NIX_USE_BOEHMGC diff --git a/src/libexpr/include/nix/expr/eval-settings.hh b/src/libexpr/include/nix/expr/eval-settings.hh index 6383bb6cd41..f367541ec2f 100644 --- a/src/libexpr/include/nix/expr/eval-settings.hh +++ b/src/libexpr/include/nix/expr/eval-settings.hh @@ -404,11 +404,12 @@ struct EvalSettings : Config /** * Conventionally part of the default nix path in impure mode. */ -Path getNixDefExpr(); +std::filesystem::path getNixDefExpr(); /** - * Stack size for evaluator threads. + * Stack size for evaluator threads. This used to be 64 MiB, but macOS as deployed on GitHub Actions has a + * hard limit slightly under that, so we round it down a bit. */ -constexpr size_t evalStackSize = 64 * 1024 * 1024; +constexpr size_t evalStackSize = 60 * 1024 * 1024; } // namespace nix diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index c9db6f48e74..c9cfb1a573b 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -110,7 +110,7 @@ struct PrimOp /** * Optional free-form documentation about the primop. */ - const char * doc = nullptr; + std::optional doc; /** * Add a trace item, while calling the `` builtin. @@ -337,6 +337,7 @@ public: EvalMemory & operator=(const EvalMemory &) = delete; EvalMemory & operator=(EvalMemory &&) = delete; + inline void * allocBytes(size_t n); inline Value * allocValue(); inline Env & allocEnv(size_t size); @@ -350,7 +351,7 @@ public: ListBuilder buildList(size_t size) { stats.nrListElems += size; - return ListBuilder(size); + return ListBuilder(*this, size); } const Statistics & getStats() const & @@ -476,18 +477,18 @@ private: /* Cache for calls to addToStore(); maps source paths to the store paths. */ - ref> srcToStore; + const ref> srcToStore; /** * A cache that maps paths to "resolved" paths for importing Nix * expressions, i.e. `/foo` to `/foo/default.nix`. */ - ref> importResolutionCache; + const ref> importResolutionCache; /** * A cache from resolved paths to values. */ - ref, @@ -510,12 +511,19 @@ private: /** * Cache used by prim_match(). */ - ref regexCache; + const ref regexCache; public: + /** + * @param lookupPath Only used during construction. + * @param store The store to use for instantiation + * @param fetchSettings Must outlive the lifetime of this EvalState! + * @param settings Must outlive the lifetime of this EvalState! + * @param buildStore The store to use for builds ("import from derivation", C API `nix_string_realise`) + */ EvalState( - const LookupPath & _lookupPath, + const LookupPath & lookupPath, ref store, const fetchers::Settings & fetchSettings, const EvalSettings & settings, @@ -596,12 +604,13 @@ public: * Parse a Nix expression from the specified file. */ Expr * parseExprFromFile(const SourcePath & path); - Expr * parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv); + Expr * parseExprFromFile(const SourcePath & path, const std::shared_ptr & staticEnv); /** * Parse a Nix expression from the specified string. */ - Expr * parseExprFromString(std::string s, const SourcePath & basePath, std::shared_ptr & staticEnv); + Expr * + parseExprFromString(std::string s, const SourcePath & basePath, const std::shared_ptr & staticEnv); Expr * parseExprFromString(std::string s, const SourcePath & basePath); Expr * parseStdin(); @@ -791,7 +800,7 @@ public: #if NIX_USE_BOEHMGC /** A GC root for the baseEnv reference. */ - std::shared_ptr baseEnvP; + const std::shared_ptr baseEnvP; #endif public: @@ -805,7 +814,7 @@ public: /** * The same, but used during parsing to resolve variables. */ - std::shared_ptr staticBaseEnv; // !!! should be private + const std::shared_ptr staticBaseEnv; // !!! should be private /** * Internal primops not exposed to the user. @@ -887,7 +896,7 @@ private: size_t length, Pos::Origin origin, const SourcePath & basePath, - std::shared_ptr & staticEnv); + const std::shared_ptr & staticEnv); /** * Current Nix call stack depth, used with `max-call-depth` diff --git a/src/libexpr/include/nix/expr/get-drvs.hh b/src/libexpr/include/nix/expr/get-drvs.hh index 3d42188bfab..4beccabe2ad 100644 --- a/src/libexpr/include/nix/expr/get-drvs.hh +++ b/src/libexpr/include/nix/expr/get-drvs.hh @@ -15,7 +15,7 @@ namespace nix { struct PackageInfo { public: - typedef std::map> Outputs; + typedef std::map, std::less<>> Outputs; private: EvalState * state; diff --git a/src/libexpr/include/nix/expr/meson.build b/src/libexpr/include/nix/expr/meson.build index 58881506c91..9f676b230f1 100644 --- a/src/libexpr/include/nix/expr/meson.build +++ b/src/libexpr/include/nix/expr/meson.build @@ -32,6 +32,7 @@ headers = [ config_pub_h ] + files( 'print.hh', 'repl-exit-status.hh', 'search-path.hh', + 'static-string-data.hh', 'symbol-table.hh', 'value-to-json.hh', 'value-to-xml.hh', diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 031ef508503..9bce1a9b91a 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -3,6 +3,7 @@ #include #include +#include #include #include #include @@ -10,8 +11,11 @@ #include "nix/expr/value.hh" #include "nix/expr/symbol-table.hh" #include "nix/expr/eval-error.hh" +#include "nix/expr/static-string-data.hh" #include "nix/util/pos-idx.hh" #include "nix/expr/counter.hh" +#include "nix/util/pos-table.hh" +#include "nix/util/error.hh" namespace nix { @@ -82,18 +86,9 @@ struct AttrName static_assert(std::is_trivially_copy_constructible_v); -typedef std::vector AttrPath; +typedef std::vector AttrSelectionPath; -std::string showAttrPath(const SymbolTable & symbols, std::span attrPath); - -class Exprs -{ - // FIXME: use std::pmr::monotonic_buffer_resource when parallel - // eval is disabled? - std::pmr::synchronized_pool_resource pool; -public: - std::pmr::polymorphic_allocator alloc{&pool}; -}; +std::string showAttrSelectionPath(const SymbolTable & symbols, std::span attrPath); /* Abstract syntax of Nix expressions. */ @@ -182,22 +177,18 @@ struct ExprString : Expr * This is only for strings already allocated in our polymorphic allocator, * or that live at least that long (e.g. c++ string literals) */ - ExprString(const char * s) + ExprString(const StringData & s) { v.mkStringNoCopy(s); }; ExprString(std::pmr::polymorphic_allocator & alloc, std::string_view sv) { - auto len = sv.length(); - if (len == 0) { - v.mkStringNoCopy(""); + if (sv.size() == 0) { + v.mkStringNoCopy(""_sds); return; } - char * s = alloc.allocate(len + 1); - sv.copy(s, len); - s[len] = '\0'; - v.mkStringNoCopy(s); + v.mkStringNoCopy(StringData::make(*alloc.resource(), sv)); }; Value * maybeThunk(EvalState & state, Env & env) override; @@ -212,11 +203,7 @@ struct ExprPath : Expr ExprPath(std::pmr::polymorphic_allocator & alloc, ref accessor, std::string_view sv) : accessor(accessor) { - auto len = sv.length(); - char * s = alloc.allocate(len + 1); - sv.copy(s, len); - s[len] = '\0'; - v.mkPath(&*accessor, s); + v.mkPath(&*accessor, StringData::make(*alloc.resource(), sv)); } Value * maybeThunk(EvalState & state, Env & env) override; @@ -341,7 +328,7 @@ struct ExprOpHasAttr : Expr Expr * e; std::span attrPath; - ExprOpHasAttr(std::pmr::polymorphic_allocator & alloc, Expr * e, std::vector attrPath) + ExprOpHasAttr(std::pmr::polymorphic_allocator & alloc, Expr * e, std::span attrPath) : e(e) , attrPath({alloc.allocate_object(attrPath.size()), attrPath.size()}) { @@ -397,9 +384,13 @@ struct ExprAttrs : Expr } }; - typedef std::map AttrDefs; - AttrDefs attrs; - std::unique_ptr> inheritFromExprs; + typedef std::pmr::map AttrDefs; + /** + * attrs will never be null. we use std::optional so that we can call emplace() to re-initialize the value with a + * new pmr::map using a different allocator (move assignment will copy into the old allocator) + */ + std::optional attrs; + std::unique_ptr> inheritFromExprs; struct DynamicAttrDef { @@ -411,13 +402,20 @@ struct ExprAttrs : Expr , pos(pos) {}; }; - typedef std::vector DynamicAttrDefs; - DynamicAttrDefs dynamicAttrs; + typedef std::pmr::vector DynamicAttrDefs; + /** + * dynamicAttrs will never be null. See comment on AttrDefs above. + */ + std::optional dynamicAttrs; ExprAttrs(const PosIdx & pos) : recursive(false) - , pos(pos) {}; + , pos(pos) + , attrs(AttrDefs{}) + , dynamicAttrs(DynamicAttrDefs{}) {}; ExprAttrs() - : recursive(false) {}; + : recursive(false) + , attrs(AttrDefs{}) + , dynamicAttrs(DynamicAttrDefs{}) {}; PosIdx getPos() const override { @@ -429,12 +427,19 @@ struct ExprAttrs : Expr std::shared_ptr bindInheritSources(EvalState & es, const std::shared_ptr & env); Env * buildInheritFromEnv(EvalState & state, Env & up); void showBindings(const SymbolTable & symbols, std::ostream & str) const; + void moveDataToAllocator(std::pmr::polymorphic_allocator & alloc); }; struct ExprList : Expr { - std::vector elems; - ExprList() {}; + std::span elems; + + ExprList(std::pmr::polymorphic_allocator & alloc, std::span exprs) + : elems({alloc.allocate_object(exprs.size()), exprs.size()}) + { + std::ranges::copy(exprs, elems.begin()); + }; + COMMON_METHODS Value * maybeThunk(EvalState & state, Env & env) override; @@ -451,7 +456,7 @@ struct Formal Expr * def; }; -struct Formals +struct FormalsBuilder { typedef std::vector Formals_; /** @@ -466,6 +471,23 @@ struct Formals formals.begin(), formals.end(), arg, [](const Formal & f, const Symbol & sym) { return f.name < sym; }); return it != formals.end() && it->name == arg; } +}; + +struct Formals +{ + std::span formals; + bool ellipsis; + + Formals(std::span formals, bool ellipsis) + : formals(formals) + , ellipsis(ellipsis) {}; + + bool has(Symbol arg) const + { + auto it = std::lower_bound( + formals.begin(), formals.end(), arg, [](const Formal & f, const Symbol & sym) { return f.name < sym; }); + return it != formals.end() && it->name == arg; + } std::vector lexicographicOrder(const SymbolTable & symbols) const { @@ -483,31 +505,71 @@ struct ExprLambda : Expr PosIdx pos; Symbol name; Symbol arg; - Formals * formals; + +private: + bool hasFormals; + bool ellipsis; + uint16_t nFormals; + Formal * formalsStart; +public: + + std::optional getFormals() const + { + if (hasFormals) + return Formals{{formalsStart, nFormals}, ellipsis}; + else + return std::nullopt; + } + Expr * body; DocComment docComment; - ExprLambda(PosIdx pos, Symbol arg, Formals * formals, Expr * body) + ExprLambda( + const PosTable & positions, + std::pmr::polymorphic_allocator & alloc, + PosIdx pos, + Symbol arg, + const FormalsBuilder & formals, + Expr * body) : pos(pos) , arg(arg) - , formals(formals) - , body(body) {}; - - ExprLambda(PosIdx pos, Formals * formals, Expr * body) - : pos(pos) - , formals(formals) + , hasFormals(true) + , ellipsis(formals.ellipsis) + , nFormals(formals.formals.size()) + , formalsStart(alloc.allocate_object(nFormals)) , body(body) { - } + if (formals.formals.size() > nFormals) [[unlikely]] { + auto err = Error( + "too many formal arguments, implementation supports at most %1%", + std::numeric_limits::max()); + if (pos) + err.atPos(positions[pos]); + throw err; + } + std::uninitialized_copy_n(formals.formals.begin(), nFormals, formalsStart); + }; + + ExprLambda(PosIdx pos, Symbol arg, Expr * body) + : pos(pos) + , arg(arg) + , hasFormals(false) + , ellipsis(false) + , nFormals(0) + , formalsStart(nullptr) + , body(body) {}; + + ExprLambda( + const PosTable & positions, + std::pmr::polymorphic_allocator & alloc, + PosIdx pos, + const FormalsBuilder & formals, + Expr * body) + : ExprLambda(positions, alloc, pos, Symbol(), formals, body) {}; void setName(Symbol name) override; std::string showNamePos(const EvalState & state) const; - inline bool hasFormals() const - { - return formals != nullptr; - } - PosIdx getPos() const override { return pos; @@ -520,11 +582,14 @@ struct ExprLambda : Expr struct ExprCall : Expr { Expr * fun; - std::vector args; + /** + * args will never be null. See comment on ExprAttrs::AttrDefs below. + */ + std::optional> args; PosIdx pos; std::optional cursedOrEndPos; // used during parsing to warn about https://github.com/NixOS/nix/issues/11118 - ExprCall(const PosIdx & pos, Expr * fun, std::vector && args) + ExprCall(const PosIdx & pos, Expr * fun, std::pmr::vector && args) : fun(fun) , args(args) , pos(pos) @@ -532,7 +597,7 @@ struct ExprCall : Expr { } - ExprCall(const PosIdx & pos, Expr * fun, std::vector && args, PosIdx && cursedOrEndPos) + ExprCall(const PosIdx & pos, Expr * fun, std::pmr::vector && args, PosIdx && cursedOrEndPos) : fun(fun) , args(args) , pos(pos) @@ -547,6 +612,7 @@ struct ExprCall : Expr virtual void resetCursedOr() override; virtual void warnIfCursedOr(const SymbolTable & symbols, const PosTable & positions) override; + void moveDataToAllocator(std::pmr::polymorphic_allocator & alloc); COMMON_METHODS }; @@ -563,8 +629,8 @@ struct ExprLet : Expr struct ExprWith : Expr { PosIdx pos; + uint32_t prevWith; Expr *attrs, *body; - size_t prevWith; ExprWith * parentWith; ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) : pos(pos) @@ -679,11 +745,31 @@ struct ExprConcatStrings : Expr { PosIdx pos; bool forceString; - std::vector> * es; - ExprConcatStrings(const PosIdx & pos, bool forceString, std::vector> * es) + std::span> es; + + ExprConcatStrings( + std::pmr::polymorphic_allocator & alloc, + const PosIdx & pos, + bool forceString, + std::span> es) : pos(pos) , forceString(forceString) - , es(es) {}; + , es({alloc.allocate_object>(es.size()), es.size()}) + { + std::ranges::copy(es, this->es.begin()); + }; + + ExprConcatStrings( + std::pmr::polymorphic_allocator & alloc, + const PosIdx & pos, + bool forceString, + std::initializer_list> es) + : pos(pos) + , forceString(forceString) + , es({alloc.allocate_object>(es.size()), es.size()}) + { + std::ranges::copy(es, this->es.begin()); + }; PosIdx getPos() const override { @@ -707,6 +793,63 @@ struct ExprPos : Expr COMMON_METHODS }; +class Exprs +{ + // FIXME: use std::pmr::monotonic_buffer_resource when parallel + // eval is disabled? + std::pmr::synchronized_pool_resource buffer; +public: + std::pmr::polymorphic_allocator alloc{&buffer}; + + template + [[gnu::always_inline]] + C * add(auto &&... args) + { + return alloc.new_object(std::forward(args)...); + } + + // we define some calls to add explicitly so that the argument can be passed in as initializer lists + template + [[gnu::always_inline]] + C * add(const PosIdx & pos, Expr * fun, std::pmr::vector && args) + requires(std::same_as) + { + return alloc.new_object(pos, fun, std::move(args)); + } + + template + [[gnu::always_inline]] + C * add(const PosIdx & pos, Expr * fun, std::pmr::vector && args, PosIdx && cursedOrEndPos) + requires(std::same_as) + { + return alloc.new_object(pos, fun, std::move(args), std::move(cursedOrEndPos)); + } + + template + [[gnu::always_inline]] + C * + add(std::pmr::polymorphic_allocator & alloc, + const PosIdx & pos, + bool forceString, + std::span> es) + requires(std::same_as) + { + return alloc.new_object(alloc, pos, forceString, es); + } + + template + [[gnu::always_inline]] + C * + add(std::pmr::polymorphic_allocator & alloc, + const PosIdx & pos, + bool forceString, + std::initializer_list> es) + requires(std::same_as) + { + return alloc.new_object(alloc, pos, forceString, es); + } +}; + /* Static environments are used to map variable names onto (level, displacement) pairs used to obtain the value of the variable at runtime. */ diff --git a/src/libexpr/include/nix/expr/parser-state.hh b/src/libexpr/include/nix/expr/parser-state.hh index 55dce30470b..f9bd06589e4 100644 --- a/src/libexpr/include/nix/expr/parser-state.hh +++ b/src/libexpr/include/nix/expr/parser-state.hh @@ -4,6 +4,8 @@ #include #include "nix/expr/eval.hh" +#include "nix/expr/value.hh" +#include "nix/expr/static-string-data.hh" namespace nix { @@ -45,6 +47,79 @@ struct ParserLocation } }; +/** + * This represents a string-like parse that possibly has yet to be constructed. + * + * Examples: + * "foo" + * ${"foo" + "bar"} + * "foo.bar" + * "foo-${a}" + * + * Using this type allows us to avoid construction altogether in cases where what we actually need is the string + * contents. For example in foo."bar.baz", there is no need to construct an AST node for "bar.baz", but we don't know + * that until we bubble the value up during parsing and see that it's a node in an AttrPath. + */ +class ToBeStringyExpr +{ +private: + using Raw = std::variant; + Raw raw; + +public: + ToBeStringyExpr() = default; + + ToBeStringyExpr(std::string_view v) + : raw(v) + { + } + + ToBeStringyExpr(Expr * expr) + : raw(expr) + { + assert(expr); + } + + /** + * Visits the expression and invokes an overloaded functor object \ref f. + * If the underlying Expr has a dynamic type of ExprString the overload taking std::string_view + * is invoked. + * + * Used to consistently handle simple StringExpr ${"string"} as non-dynamic attributes. + * @see https://github.com/NixOS/nix/issues/14642 + */ + template + void visit(F && f) + { + std::visit( + overloaded{ + [&](std::string_view str) { f(str); }, + [&](Expr * expr) { + ExprString * str = dynamic_cast(expr); + if (str) + f(str->v.string_view()); + else + f(expr); + }, + [](std::monostate) { unreachable(); }}, + raw); + } + + /** + * Get or create an Expr from either an existing Expr or from a string. + * Delays the allocation or an AST node in case the parser only cares about string contents. + */ + Expr * toExpr(Exprs & exprs) + { + return std::visit( + overloaded{ + [&](std::string_view str) -> Expr * { return exprs.add(exprs.alloc, str); }, + [&](Expr * expr) { return expr; }, + [](std::monostate) -> Expr * { unreachable(); }}, + raw); + } +}; + struct LexerState { /** @@ -78,7 +153,7 @@ struct LexerState struct ParserState { const LexerState & lexerState; - std::pmr::polymorphic_allocator & alloc; + Exprs & exprs; SymbolTable & symbols; PosTable & positions; Expr * result; @@ -88,20 +163,25 @@ struct ParserState static constexpr Expr::AstSymbols s = StaticEvalSymbols::create().exprSymbols; const EvalSettings & settings; - void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); + void dupAttr(const AttrSelectionPath & attrPath, const PosIdx pos, const PosIdx prevPos); void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); void addAttr( - ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc); - void addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def); - Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {}); - Expr * stripIndentation(const PosIdx pos, std::vector>> && es); + ExprAttrs * attrs, + AttrSelectionPath && attrPath, + const ParserLocation & loc, + Expr * e, + const ParserLocation & exprLoc); + void addAttr(ExprAttrs * attrs, AttrSelectionPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def); + void validateFormals(FormalsBuilder & formals, PosIdx pos = noPos, Symbol arg = {}); + Expr * stripIndentation(const PosIdx pos, std::span>> es); PosIdx at(const ParserLocation & loc); }; -inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) +inline void ParserState::dupAttr(const AttrSelectionPath & attrPath, const PosIdx pos, const PosIdx prevPos) { throw ParseError( - {.msg = HintFmt("attribute '%1%' already defined at %2%", showAttrPath(symbols, attrPath), positions[prevPos]), + {.msg = HintFmt( + "attribute '%1%' already defined at %2%", showAttrSelectionPath(symbols, attrPath), positions[prevPos]), .pos = positions[pos]}); } @@ -113,9 +193,13 @@ inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx pre } inline void ParserState::addAttr( - ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc) + ExprAttrs * attrs, + AttrSelectionPath && attrPath, + const ParserLocation & loc, + Expr * e, + const ParserLocation & exprLoc) { - AttrPath::iterator i; + AttrSelectionPath::iterator i; // All attrpaths have at least one attr assert(!attrPath.empty()); auto pos = at(loc); @@ -124,20 +208,20 @@ inline void ParserState::addAttr( for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) { ExprAttrs * nested; if (i->symbol) { - ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); - if (j != attrs->attrs.end()) { + ExprAttrs::AttrDefs::iterator j = attrs->attrs->find(i->symbol); + if (j != attrs->attrs->end()) { nested = dynamic_cast(j->second.e); if (!nested) { attrPath.erase(i + 1, attrPath.end()); dupAttr(attrPath, pos, j->second.pos); } } else { - nested = new ExprAttrs; - attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos); + nested = exprs.add(); + (*attrs->attrs)[i->symbol] = ExprAttrs::AttrDef(nested, pos); } } else { - nested = new ExprAttrs; - attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos)); + nested = exprs.add(); + attrs->dynamicAttrs->push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos)); } attrs = nested; } @@ -146,7 +230,7 @@ inline void ParserState::addAttr( if (i->symbol) { addAttr(attrs, attrPath, i->symbol, ExprAttrs::AttrDef(e, pos)); } else { - attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos)); + attrs->dynamicAttrs->push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos)); } auto it = lexerState.positionToDocComment.find(pos); @@ -161,10 +245,10 @@ inline void ParserState::addAttr( * symbol as its last element. */ inline void -ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def) +ParserState::addAttr(ExprAttrs * attrs, AttrSelectionPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def) { - ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(symbol); - if (j != attrs->attrs.end()) { + ExprAttrs::AttrDefs::iterator j = attrs->attrs->find(symbol); + if (j != attrs->attrs->end()) { // This attr path is already defined. However, if both // e and the expr pointed by the attr path are two attribute sets, // we want to merge them. @@ -179,8 +263,8 @@ ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symb // See https://github.com/NixOS/nix/issues/9020. if (jAttrs && ae) { if (ae->inheritFromExprs && !jAttrs->inheritFromExprs) - jAttrs->inheritFromExprs = std::make_unique>(); - for (auto & ad : ae->attrs) { + jAttrs->inheritFromExprs = std::make_unique>(); + for (auto & ad : *ae->attrs) { if (ad.second.kind == ExprAttrs::AttrDef::Kind::InheritedFrom) { auto & sel = dynamic_cast(*ad.second.e); auto & from = dynamic_cast(*sel.e); @@ -190,12 +274,12 @@ ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symb addAttr(jAttrs, attrPath, ad.first, std::move(ad.second)); attrPath.pop_back(); } - ae->attrs.clear(); - jAttrs->dynamicAttrs.insert( - jAttrs->dynamicAttrs.end(), - std::make_move_iterator(ae->dynamicAttrs.begin()), - std::make_move_iterator(ae->dynamicAttrs.end())); - ae->dynamicAttrs.clear(); + ae->attrs->clear(); + jAttrs->dynamicAttrs->insert( + jAttrs->dynamicAttrs->end(), + std::make_move_iterator(ae->dynamicAttrs->begin()), + std::make_move_iterator(ae->dynamicAttrs->end())); + ae->dynamicAttrs->clear(); if (ae->inheritFromExprs) { jAttrs->inheritFromExprs->insert( jAttrs->inheritFromExprs->end(), @@ -208,22 +292,22 @@ ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symb } } else { // This attr path is not defined. Let's create it. - attrs->attrs.emplace(symbol, def); + attrs->attrs->emplace(symbol, def); def.e->setName(symbol); } } -inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Symbol arg) +inline void ParserState::validateFormals(FormalsBuilder & formals, PosIdx pos, Symbol arg) { - std::sort(formals->formals.begin(), formals->formals.end(), [](const auto & a, const auto & b) { + std::sort(formals.formals.begin(), formals.formals.end(), [](const auto & a, const auto & b) { return std::tie(a.name, a.pos) < std::tie(b.name, b.pos); }); std::optional> duplicate; - for (size_t i = 0; i + 1 < formals->formals.size(); i++) { - if (formals->formals[i].name != formals->formals[i + 1].name) + for (size_t i = 0; i + 1 < formals.formals.size(); i++) { + if (formals.formals[i].name != formals.formals[i + 1].name) continue; - std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos}; + std::pair thisDup{formals.formals[i].name, formals.formals[i + 1].pos}; duplicate = std::min(thisDup, duplicate.value_or(thisDup)); } if (duplicate) @@ -231,18 +315,16 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym {.msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), .pos = positions[duplicate->second]}); - if (arg && formals->has(arg)) + if (arg && formals.has(arg)) throw ParseError( {.msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]), .pos = positions[pos]}); - - return formals; } inline Expr * -ParserState::stripIndentation(const PosIdx pos, std::vector>> && es) +ParserState::stripIndentation(const PosIdx pos, std::span>> es) { if (es.empty()) - return new ExprString(""); + return exprs.add(""_sds); /* Figure out the minimum indentation. Note that by design whitespace-only final lines are not taken into account. (So @@ -282,7 +364,7 @@ ParserState::stripIndentation(const PosIdx pos, std::vector>; + std::vector> es2{}; atStartOfLine = true; size_t curDropped = 0; size_t n = es.size(); @@ -290,7 +372,7 @@ ParserState::stripIndentation(const PosIdx pos, std::vectoremplace_back(i->first, e); + es2.emplace_back(i->first, e); }; const auto trimString = [&](const StringToken & t) { std::string s2; @@ -324,7 +406,7 @@ ParserState::stripIndentation(const PosIdx pos, std::vectoremplace_back(i->first, new ExprString(alloc, s2)); + es2.emplace_back(i->first, exprs.add(exprs.alloc, s2)); } }; for (; i != es.end(); ++i, --n) { @@ -333,19 +415,17 @@ ParserState::stripIndentation(const PosIdx pos, std::vectorsize() == 0) { - auto * const result = new ExprString(""); - delete es2; + if (es2.size() == 0) { + auto * const result = exprs.add(""_sds); return result; } /* If this is a single string, then don't do a concatenation. */ - if (es2->size() == 1 && dynamic_cast((*es2)[0].second)) { - auto * const result = (*es2)[0].second; - delete es2; + if (es2.size() == 1 && dynamic_cast((es2)[0].second)) { + auto * const result = (es2)[0].second; return result; } - return new ExprConcatStrings(pos, true, es2); + return exprs.add(exprs.alloc, pos, true, es2); } inline PosIdx LexerState::at(const ParserLocation & loc) diff --git a/src/libexpr/include/nix/expr/primops.hh b/src/libexpr/include/nix/expr/primops.hh index 6407ba84e50..8854f6b0384 100644 --- a/src/libexpr/include/nix/expr/primops.hh +++ b/src/libexpr/include/nix/expr/primops.hh @@ -12,11 +12,7 @@ struct RegisterPrimOp { typedef std::vector PrimOps; - static PrimOps & primOps() - { - static PrimOps primOps; - return primOps; - } + static PrimOps & primOps(); /** * You can register a constant by passing an arity of 0. fun diff --git a/src/libexpr/include/nix/expr/print-options.hh b/src/libexpr/include/nix/expr/print-options.hh index ffb80abc3fc..600b96ba2f4 100644 --- a/src/libexpr/include/nix/expr/print-options.hh +++ b/src/libexpr/include/nix/expr/print-options.hh @@ -110,7 +110,7 @@ struct PrintOptions * `PrintOptions` for unknown and therefore potentially large values in error messages, * to avoid printing "too much" output. */ -static PrintOptions errorPrintOptions = PrintOptions{ +static constexpr PrintOptions errorPrintOptions = PrintOptions{ .ansiColors = true, .maxDepth = 10, .maxAttrs = 10, diff --git a/src/libexpr/include/nix/expr/static-string-data.hh b/src/libexpr/include/nix/expr/static-string-data.hh new file mode 100644 index 00000000000..93b5d46a0f1 --- /dev/null +++ b/src/libexpr/include/nix/expr/static-string-data.hh @@ -0,0 +1,44 @@ +#pragma once +///@file + +#include "nix/expr/value.hh" + +namespace nix { + +template +struct StringData::Static +{ + /** + * @note Must be first to make layout compatible with StringData. + */ + const size_t size = N - 1; + char data[N]; + + consteval Static(const char (&str)[N]) + { + static_assert(N > 0); + if (str[size] != '\0') + throw; + std::copy_n(str, N, data); + } + + operator const StringData &() const & + { + static_assert(sizeof(decltype(*this)) >= sizeof(StringData)); + static_assert(alignof(decltype(*this)) == alignof(StringData)); + /* NOTE: This cast is somewhat on the fence of what's legal in C++. + The question boils down to whether flexible array members are + layout compatible with fixed-size arrays. This is a gray area, since + FAMs are not standard anyway. + */ + return *reinterpret_cast(this); + } +}; + +template +const StringData & operator""_sds() +{ + return S; +} + +} // namespace nix diff --git a/src/libexpr/include/nix/expr/symbol-table.hh b/src/libexpr/include/nix/expr/symbol-table.hh index 3d68def9919..23151082933 100644 --- a/src/libexpr/include/nix/expr/symbol-table.hh +++ b/src/libexpr/include/nix/expr/symbol-table.hh @@ -6,6 +6,7 @@ #include "nix/expr/value.hh" #include "nix/util/error.hh" #include "nix/util/sync.hh" +#include "nix/util/alignment.hh" #include #include @@ -19,8 +20,7 @@ class SymbolValue : protected Value operator std::string_view() const noexcept { - // The actual string is stored directly after the value. - return reinterpret_cast(this + 1); + return string_view(); } }; @@ -129,6 +129,12 @@ public: return *s == s2; } + [[gnu::always_inline]] + const StringData & string_data() const noexcept + { + return s->string_data(); + } + [[gnu::always_inline]] const char * c_str() const noexcept { @@ -145,13 +151,13 @@ public: [[gnu::always_inline]] bool empty() const noexcept { - return static_cast(*s).empty(); + return !s->string_data().size(); } [[gnu::always_inline]] size_t size() const noexcept { - return static_cast(*s).size(); + return s->string_data().size(); } [[gnu::always_inline]] @@ -200,8 +206,7 @@ public: constexpr static size_t computeSize(std::string_view s) { - auto rawSize = sizeof(Value) + s.size() + 1; - return ((rawSize + Symbol::alignment - 1) / Symbol::alignment) * Symbol::alignment; + return alignUp(sizeof(Value) + sizeof(StringData) + s.size() + 1, Symbol::alignment); } }; @@ -274,7 +279,7 @@ public: */ Symbol create(std::string_view s); - std::vector resolve(const std::vector & symbols) const + std::vector resolve(const std::span & symbols) const { std::vector result; result.reserve(symbols.size()); @@ -306,17 +311,11 @@ public: { std::string_view left{arena.data, arena.size}; left = left.substr(Symbol::alignment); - while (true) { - if (left.empty()) - break; - left = left.substr(sizeof(Value)); - auto p = left.find('\0'); - assert(p != left.npos); - auto sym = left.substr(0, p); - callback(sym); - // skip alignment padding - auto n = sym.size() + 1; - left = left.substr(n + (n % Symbol::alignment ? Symbol::alignment - (n % Symbol::alignment) : 0)); + while (!left.empty()) { + auto v = reinterpret_cast(left.data()); + callback(v->string_view()); + left = left.substr( + alignUp(sizeof(SymbolValue) + sizeof(StringData) + v->string_view().size() + 1, Symbol::alignment)); } } }; diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index fc6dc557456..10893347bd6 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -2,8 +2,14 @@ ///@file #include +#include #include +#include +#include +#include +#include #include +#include #include #include @@ -119,6 +125,7 @@ class PosIdx; struct Pos; class StorePath; class EvalState; +class EvalMemory; class XMLWriter; class Printer; @@ -192,7 +199,7 @@ class ListBuilder Value * inlineElems[2] = {nullptr, nullptr}; public: Value ** elems; - ListBuilder(size_t size); + ListBuilder(EvalMemory & mem, size_t size); // NOTE: Can be noexcept because we are just copying integral values and // raw pointers. @@ -223,6 +230,91 @@ public: friend struct Value; }; +class StringData +{ +public: + using size_type = std::size_t; + + size_type size_; + char data_[]; + + /* + * This in particular ensures that we cannot have a `StringData` + * that we use by value, which is just what we want! + * + * Dynamically sized types aren't a thing in C++ and even flexible array + * members are a language extension and beyond the realm of standard C++. + * Technically, sizeof data_ member is 0 and the intended way to use flexible + * array members is to allocate sizeof(StrindData) + count * sizeof(char) bytes + * and the compiler will consider alignment restrictions for the FAM. + * + */ + + StringData(StringData &&) = delete; + StringData & operator=(StringData &&) = delete; + StringData(const StringData &) = delete; + StringData & operator=(const StringData &) = delete; + ~StringData() = default; + +private: + StringData() = delete; + + explicit StringData(size_type size) + : size_(size) + { + } + +public: + /** + * Allocate StringData on the (possibly) GC-managed heap and copy + * the contents of s to it. + */ + static const StringData & make(EvalMemory & mem, std::string_view s); + + /** + * Allocate StringData on the (possibly) GC-managed heap. + * @param size Length of the string (without the NUL terminator). + */ + static StringData & alloc(EvalMemory & mem, size_t size); + + size_t size() const + { + return size_; + } + + char * data() noexcept + { + return data_; + } + + const char * data() const noexcept + { + return data_; + } + + const char * c_str() const noexcept + { + return data_; + } + + constexpr std::string_view view() const noexcept + { + return std::string_view(data_, size_); + } + + template + struct Static; + + static StringData & make(std::pmr::memory_resource & resource, std::string_view s) + { + auto & res = + *new (resource.allocate(sizeof(StringData) + s.size() + 1, alignof(StringData))) StringData(s.size()); + std::memcpy(res.data_, s.data(), s.size()); + res.data_[s.size()] = '\0'; + return res; + } +}; + namespace detail { /** @@ -256,14 +348,73 @@ struct ValueBase */ struct StringWithContext { - const char * c_str; - const char ** context; // must be in sorted order + const StringData * str; + + /** + * The type of the context itself. + * + * Currently, it is length-prefixed array of pointers to + * null-terminated strings. The strings are specially formatted + * to represent a flattening of the recursive sum type that is a + * context element. + * + * @See NixStringContext for an more easily understood type, + * that of the "builder" for this data structure. + */ + struct Context + { + using value_type = const StringData *; + using size_type = std::size_t; + using iterator = const value_type *; + + Context(size_type size) + : size_(size) + { + } + + private: + /** + * Number of items in the array + */ + size_type size_; + + /** + * @pre must be in sorted order + */ + value_type elems[]; + + public: + iterator begin() const + { + return elems; + } + + iterator end() const + { + return elems + size(); + } + + size_type size() const + { + return size_; + } + + /** + * @return null pointer when context.empty() + */ + static Context * fromBuilder(const NixStringContext & context, EvalMemory & mem); + }; + + /** + * May be null for a string without context. + */ + const Context * context; }; struct Path { SourceAccessor * accessor; - const char * path; + const StringData * path; }; struct Null @@ -683,13 +834,13 @@ protected: void getStorage(StringWithContext & string) const noexcept { string.context = untagPointer(p0); - string.c_str = std::bit_cast(p1); + string.str = std::bit_cast(p1); } void getStorage(Path & path) const noexcept { path.accessor = untagPointer(p0); - path.path = std::bit_cast(p1); + path.path = std::bit_cast(p1); } void getStorage(Failed *& failed) const noexcept @@ -739,7 +890,7 @@ protected: void setStorage(StringWithContext string) noexcept { - setUntaggablePayload(string.context, string.c_str); + setUntaggablePayload(string.context, string.str); } void setStorage(Path path) noexcept @@ -1181,22 +1332,22 @@ public: setStorage(b); } - void mkStringNoCopy(const char * s, const char ** context = 0) noexcept + void mkStringNoCopy(const StringData & s, const Value::StringWithContext::Context * context = nullptr) noexcept { - setStorage(StringWithContext{.c_str = s, .context = context}); + setStorage(StringWithContext{.str = &s, .context = context}); } - void mkString(std::string_view s); + void mkString(std::string_view s, EvalMemory & mem); - void mkString(std::string_view s, const NixStringContext & context); + void mkString(std::string_view s, const NixStringContext & context, EvalMemory & mem); - void mkStringMove(const char * s, const NixStringContext & context); + void mkStringMove(const StringData & s, const NixStringContext & context, EvalMemory & mem); - void mkPath(const SourcePath & path); + void mkPath(const SourcePath & path, EvalMemory & mem); - inline void mkPath(SourceAccessor * accessor, const char * path) noexcept + inline void mkPath(SourceAccessor * accessor, const StringData & path) noexcept { - setStorage(Path{.accessor = accessor, .path = path}); + setStorage(Path{.accessor = accessor, .path = &path}); } inline void mkNull() noexcept @@ -1290,20 +1441,26 @@ public: SourcePath path() const { - return SourcePath(ref(pathAccessor()->shared_from_this()), CanonPath(CanonPath::unchecked_t(), pathStr())); + return SourcePath( + ref(pathAccessor()->shared_from_this()), CanonPath(CanonPath::unchecked_t(), std::string(pathStrView()))); } - std::string_view string_view() const noexcept + const StringData & string_data() const noexcept { - return std::string_view(getStorage().c_str); + return *getStorage().str; } const char * c_str() const noexcept { - return getStorage().c_str; + return getStorage().str->data(); + } + + std::string_view string_view() const noexcept + { + return string_data().view(); } - const char ** context() const noexcept + const Value::StringWithContext::Context * context() const noexcept { return getStorage().context; } @@ -1362,7 +1519,12 @@ public: const char * pathStr() const noexcept { - return getStorage().path; + return getStorage().path->c_str(); + } + + std::string_view pathStrView() const noexcept + { + return getStorage().path->view(); } SourceAccessor * pathAccessor() const noexcept diff --git a/src/libexpr/include/nix/expr/value/context.hh b/src/libexpr/include/nix/expr/value/context.hh index bb7e8e72790..fa3d4e87c0f 100644 --- a/src/libexpr/include/nix/expr/value/context.hh +++ b/src/libexpr/include/nix/expr/value/context.hh @@ -24,6 +24,14 @@ public: } }; +/** + * @todo This should be renamed to `StringContextBuilderElem`, since: + * + * 1. We use `*Builder` for off-heap temporary data structures + * + * 2. The `Nix*` is totally redundant. (And my mistake from a long time + * ago.) + */ struct NixStringContextElem { /** @@ -101,6 +109,11 @@ struct NixStringContextElem std::string to_string() const; }; +/** + * @todo This should be renamed to `StringContextBuilder`. + * + * @see NixStringContextElem for explanation why. + */ typedef std::set NixStringContext; /** diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc index 9c645e7fd83..4a68308c641 100644 --- a/src/libexpr/json-to-value.cc +++ b/src/libexpr/json-to-value.cc @@ -151,7 +151,7 @@ class JSONSax : nlohmann::json_sax bool string(string_t & val) override { forceNoNullByte(val); - rs->value(state).mkString(val); + rs->value(state).mkString(val, state.mem); rs->add(); return true; } diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index f420fc13f34..810503bdc5b 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -142,11 +142,11 @@ or { return OR_KW; } return PIPE_INTO; } -{ID} { yylval->id = {yytext, (size_t) yyleng}; return ID; } +{ID} { yylval->emplace(yytext, (size_t) yyleng); return ID; } {INT} { errno = 0; std::optional numMay = string2Int(yytext); if (numMay.has_value()) { - yylval->n = NixInt{*numMay}; + yylval->emplace(*numMay); } else { throw ParseError(ErrorInfo{ .msg = HintFmt("invalid integer '%1%'", yytext), @@ -156,7 +156,7 @@ or { return OR_KW; } return INT_LIT; } {FLOAT} { errno = 0; - yylval->nf = strtod(yytext, 0); + yylval->emplace(strtod(yytext, 0)); if (errno != 0) throw ParseError(ErrorInfo{ .msg = HintFmt("invalid float '%1%'", yytext), @@ -183,7 +183,7 @@ or { return OR_KW; } /* It is impossible to match strings ending with '$' with one regex because trailing contexts are only valid at the end of a rule. (A sane but undocumented limitation.) */ - yylval->str = unescapeStr(yytext, yyleng, [&]() { return state->positions[CUR_POS]; }); + yylval->emplace(unescapeStr(yytext, yyleng, [&]() { return state->positions[CUR_POS]; })); return STR; } \$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } @@ -198,27 +198,27 @@ or { return OR_KW; } \'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; } ([^\$\']|\$[^\{\']|\'[^\'\$])+ { - yylval->str = {yytext, (size_t) yyleng, true}; - forceNoNullByte(yylval->str, [&]() { return state->positions[CUR_POS]; }); + yylval->emplace(yytext, (size_t) yyleng, true); + forceNoNullByte(yylval->as(), [&]() { return state->positions[CUR_POS]; }); return IND_STR; } \'\'\$ | \$ { - yylval->str = {"$", 1}; + yylval->emplace("$", 1); return IND_STR; } \'\'\' { - yylval->str = {"''", 2}; + yylval->emplace("''", 2); return IND_STR; } \'\'\\{ANY} { - yylval->str = unescapeStr(yytext + 2, yyleng - 2, [&]() { return state->positions[CUR_POS]; }); + yylval->emplace(unescapeStr(yytext + 2, yyleng - 2, [&]() { return state->positions[CUR_POS]; })); return IND_STR; } \$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } \'\' { POP_STATE(); return IND_STRING_CLOSE; } \' { - yylval->str = {"'", 1}; + yylval->emplace("'", 1); return IND_STR; } @@ -232,23 +232,31 @@ or { return OR_KW; } {PATH_SEG} { POP_STATE(); PUSH_STATE(INPATH_SLASH); - yylval->path = {yytext, (size_t) yyleng}; + yylval->emplace(yytext, (size_t) yyleng); return PATH; } {HPATH_START} { POP_STATE(); PUSH_STATE(INPATH_SLASH); - yylval->path = {yytext, (size_t) yyleng}; + yylval->emplace(yytext, (size_t) yyleng); return HPATH; } +{ANY} | +<> { + /* This should be unreachable: PATH_START is only entered after matching + PATH_SEG or HPATH_START, and we rewind to re-parse those same patterns. + This rule exists to satisfy flex's %option nodefault requirement. */ + unreachable(); +} + {PATH} { if (yytext[yyleng-1] == '/') PUSH_STATE(INPATH_SLASH); else PUSH_STATE(INPATH); - yylval->path = {yytext, (size_t) yyleng}; + yylval->emplace(yytext, (size_t) yyleng); return PATH; } {HPATH} { @@ -256,7 +264,7 @@ or { return OR_KW; } PUSH_STATE(INPATH_SLASH); else PUSH_STATE(INPATH); - yylval->path = {yytext, (size_t) yyleng}; + yylval->emplace(yytext, (size_t) yyleng); return HPATH; } @@ -272,7 +280,7 @@ or { return OR_KW; } PUSH_STATE(INPATH_SLASH); else PUSH_STATE(INPATH); - yylval->str = {yytext, (size_t) yyleng}; + yylval->emplace(yytext, (size_t) yyleng); return STR; } {ANY} | @@ -294,8 +302,8 @@ or { return OR_KW; } }); } -{SPATH} { yylval->path = {yytext, (size_t) yyleng}; return SPATH; } -{URI} { yylval->uri = {yytext, (size_t) yyleng}; return URI; } +{SPATH} { yylval->emplace(yytext, (size_t) yyleng); return SPATH; } +{URI} { yylval->emplace(yytext, (size_t) yyleng); return URI; } %{ // Doc comment rule diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index fc5acd118dc..3724db9e2cb 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -97,7 +97,6 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') parser_tab = custom_target( input : 'parser.y', @@ -186,17 +185,62 @@ subdir('primops') subdir('nix-meson-build-support/export-all-symbols') subdir('nix-meson-build-support/windows-version') +# Turns out that Bison/Flex are particularly sensitive to compilers +# failing to inline functions. For that reason we crank up the inlining +# threshold manually for optimized builds. Yes, this can be considered 'ricing' +# the compiler, but it does pay off. +# +# NOTE: missed inlining can be spotted (for Clang) using -Rpass-missed=inline +# and -fdump-ipa-inline-missed (for GCC). +parser_library_cpp_args = [] + +if not get_option('debug') + if cxx.get_id() == 'clang' + # The default as of LLVM 21 is 225: + # llc --help-hidden | grep inline-threshold + parser_library_cpp_args += [ + '-mllvm', + '-inline-threshold=5000', + ] + elif cxx.get_id() == 'gcc' + parser_library_cpp_args += [ + '--param=max-inline-insns-single=1000', + '--param=max-inline-insns-auto=1000', + '--param=inline-unit-growth=400', + ] + endif +endif + +# Working around https://github.com/mesonbuild/meson/issues/1367. +parser_library = static_library( + 'nixexpr-parser', + parser_tab, + lexer_tab, + cpp_args : parser_library_cpp_args, + dependencies : deps_public + deps_private + deps_other, + include_directories : include_dirs, + # 1. Stdlib and regular assertions regress parser performance significantly, so build without + # them for this one library when building in a release configuration. + # 2. Disable LTO for GCC because then inlining flags won't apply, since LTO in GCC is done + # by plonking down GIMPLE in the archive. + override_options : [ + 'b_ndebug=@0@'.format(not get_option('debug')), + 'b_lto=@0@'.format(get_option('b_lto') and cxx.get_id() != 'gcc'), + ], +) + this_library = library( 'nixexpr', sources, config_priv_h, - parser_tab, - lexer_tab, + parser_tab[1], + lexer_tab[1], generated_headers, soversion : nix_soversion, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, + link_whole : [ parser_library ], prelink : true, # For C++ static initializers install : true, cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 93853fc86a7..b52370816f5 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -43,7 +43,7 @@ void ExprString::show(const SymbolTable & symbols, std::ostream & str) const void ExprPath::show(const SymbolTable & symbols, std::ostream & str) const { - str << v.pathStr(); + str << v.pathStrView(); } void ExprVar::show(const SymbolTable & symbols, std::ostream & str) const @@ -55,7 +55,7 @@ void ExprSelect::show(const SymbolTable & symbols, std::ostream & str) const { str << "("; e->show(symbols, str); - str << ")." << showAttrPath(symbols, getAttrPath()); + str << ")." << showAttrSelectionPath(symbols, getAttrPath()); if (def) { str << " or ("; def->show(symbols, str); @@ -67,14 +67,14 @@ void ExprOpHasAttr::show(const SymbolTable & symbols, std::ostream & str) const { str << "(("; e->show(symbols, str); - str << ") ? " << showAttrPath(symbols, attrPath) << ")"; + str << ") ? " << showAttrSelectionPath(symbols, attrPath) << ")"; } void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) const { - typedef const decltype(attrs)::value_type * Attr; + typedef const AttrDefs::value_type * Attr; std::vector sorted; - for (auto & i : attrs) + for (auto & i : *attrs) sorted.push_back(&i); std::sort(sorted.begin(), sorted.end(), [&](Attr a, Attr b) { std::string_view sa = symbols[a->first], sb = symbols[b->first]; @@ -120,7 +120,7 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co str << "; "; } } - for (auto & i : dynamicAttrs) { + for (auto & i : *dynamicAttrs) { str << "\"${"; i.nameExpr->show(symbols, str); str << "}\" = "; @@ -152,7 +152,7 @@ void ExprList::show(const SymbolTable & symbols, std::ostream & str) const void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const { str << "("; - if (hasFormals()) { + if (auto formals = getFormals()) { str << "{ "; bool first = true; // the natural Symbol ordering is by creation time, which can lead to the @@ -169,7 +169,7 @@ void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const i.def->show(symbols, str); } } - if (formals->ellipsis) { + if (ellipsis) { if (!first) str << ", "; str << "..."; @@ -189,7 +189,7 @@ void ExprCall::show(const SymbolTable & symbols, std::ostream & str) const { str << '('; fun->show(symbols, str); - for (auto e : args) { + for (auto e : *args) { str << ' '; e->show(symbols, str); } @@ -244,7 +244,7 @@ void ExprConcatStrings::show(const SymbolTable & symbols, std::ostream & str) co { bool first = true; str << "("; - for (auto & i : *es) { + for (auto & i : es) { if (first) first = false; else @@ -259,7 +259,7 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const str << "__curPos"; } -std::string showAttrPath(const SymbolTable & symbols, std::span attrPath) +std::string showAttrSelectionPath(const SymbolTable & symbols, std::span attrPath) { std::ostringstream out; bool first = true; @@ -397,17 +397,29 @@ ExprAttrs::bindInheritSources(EvalState & es, const std::shared_ptr & alloc) +{ + AttrDefs newAttrs{std::move(*attrs), alloc}; + attrs.emplace(std::move(newAttrs), alloc); + DynamicAttrDefs newDynamicAttrs{std::move(*dynamicAttrs), alloc}; + dynamicAttrs.emplace(std::move(newDynamicAttrs), alloc); + if (inheritFromExprs) + inheritFromExprs = std::make_unique>(std::move(*inheritFromExprs), alloc); +} + void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr & env) { + moveDataToAllocator(es.mem.exprs.alloc); + if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); if (recursive) { auto newEnv = [&]() -> std::shared_ptr { - auto newEnv = std::make_shared(nullptr, env, attrs.size()); + auto newEnv = std::make_shared(nullptr, env, attrs->size()); Displacement displ = 0; - for (auto & i : attrs) + for (auto & i : *attrs) newEnv->vars.emplace_back(i.first, i.second.displ = displ++); return newEnv; }(); @@ -415,20 +427,20 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr // No need to sort newEnv since attrs is in sorted order. auto inheritFromEnv = bindInheritSources(es, newEnv); - for (auto & i : attrs) + for (auto & i : *attrs) i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, inheritFromEnv)); - for (auto & i : dynamicAttrs) { + for (auto & i : *dynamicAttrs) { i.nameExpr->bindVars(es, newEnv); i.valueExpr->bindVars(es, newEnv); } } else { auto inheritFromEnv = bindInheritSources(es, env); - for (auto & i : attrs) + for (auto & i : *attrs) i.second.e->bindVars(es, i.second.chooseByKind(env, env, inheritFromEnv)); - for (auto & i : dynamicAttrs) { + for (auto & i : *dynamicAttrs) { i.nameExpr->bindVars(es, env); i.valueExpr->bindVars(es, env); } @@ -450,14 +462,14 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); auto newEnv = - std::make_shared(nullptr, env, (hasFormals() ? formals->formals.size() : 0) + (!arg ? 0 : 1)); + std::make_shared(nullptr, env, (getFormals() ? getFormals()->formals.size() : 0) + (!arg ? 0 : 1)); Displacement displ = 0; if (arg) newEnv->vars.emplace_back(arg, displ++); - if (hasFormals()) { + if (auto formals = getFormals()) { for (auto & i : formals->formals) newEnv->vars.emplace_back(i.name, displ++); @@ -471,23 +483,31 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr body->bindVars(es, newEnv); } +void ExprCall::moveDataToAllocator(std::pmr::polymorphic_allocator & alloc) +{ + std::pmr::vector newArgs{std::move(*args), alloc}; + args.emplace(std::move(newArgs), alloc); +} + void ExprCall::bindVars(EvalState & es, const std::shared_ptr & env) { + moveDataToAllocator(es.mem.exprs.alloc); if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); fun->bindVars(es, env); - for (auto e : args) + for (auto e : *args) e->bindVars(es, env); } void ExprLet::bindVars(EvalState & es, const std::shared_ptr & env) { + attrs->moveDataToAllocator(es.mem.exprs.alloc); auto newEnv = [&]() -> std::shared_ptr { - auto newEnv = std::make_shared(nullptr, env, attrs->attrs.size()); + auto newEnv = std::make_shared(nullptr, env, attrs->attrs->size()); Displacement displ = 0; - for (auto & i : attrs->attrs) + for (auto & i : *attrs->attrs) newEnv->vars.emplace_back(i.first, i.second.displ = displ++); return newEnv; }(); @@ -495,7 +515,7 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr & // No need to sort newEnv since attrs->attrs is in sorted order. auto inheritFromEnv = attrs->bindInheritSources(es, newEnv); - for (auto & i : attrs->attrs) + for (auto & i : *attrs->attrs) i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, inheritFromEnv)); if (es.debugRepl) @@ -521,6 +541,7 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr & prevWith = 0; for (curEnv = env.get(), level = 1; curEnv; curEnv = curEnv->up.get(), level++) if (curEnv->isWith) { + assert(level <= std::numeric_limits::max()); prevWith = level; break; } @@ -562,7 +583,7 @@ void ExprConcatStrings::bindVars(EvalState & es, const std::shared_ptres) + for (auto & i : this->es) i.second->bindVars(es, env); } diff --git a/src/libexpr/package.nix b/src/libexpr/package.nix index 683e63d2cd7..c82e56de8ff 100644 --- a/src/libexpr/package.nix +++ b/src/libexpr/package.nix @@ -70,11 +70,6 @@ mkMesonLibrary (finalAttrs: { nix-util nix-store nix-fetchers - ] - ++ finalAttrs.passthru.externalPropagatedBuildInputs; - - # Hack for sake of the dev shell - passthru.externalPropagatedBuildInputs = [ boost nlohmann_json ] diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 9186fcf4b3f..c9ad3040771 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -14,6 +14,10 @@ %code requires { +// bison adds a bunch of switch statements with default: +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wswitch-enum" + #ifndef BISON_HEADER #define BISON_HEADER @@ -59,7 +63,7 @@ Expr * parseExprFromBuf( size_t length, Pos::Origin origin, const SourcePath & basePath, - std::pmr::polymorphic_allocator & alloc, + Exprs & exprs, SymbolTable & symbols, const EvalSettings & settings, PosTable & positions, @@ -109,57 +113,39 @@ static void setDocPosition(const LexerState & lexerState, ExprLambda * lambda, P } } -static Expr * makeCall(PosIdx pos, Expr * fn, Expr * arg) { +static Expr * makeCall(Exprs & exprs, PosIdx pos, Expr * fn, Expr * arg) { if (auto e2 = dynamic_cast(fn)) { - e2->args.push_back(arg); + e2->args->push_back(arg); return fn; } - return new ExprCall(pos, fn, {arg}); + return exprs.add(pos, fn, {arg}); } %} -%union { - // !!! We're probably leaking stuff here. - nix::Expr * e; - nix::ExprList * list; - nix::ExprAttrs * attrs; - nix::Formals * formals; - nix::Formal * formal; - nix::NixInt n; - nix::NixFloat nf; - nix::StringToken id; // !!! -> Symbol - nix::StringToken path; - nix::StringToken uri; - nix::StringToken str; - std::vector * attrNames; - std::vector> * inheritAttrs; - std::vector> * string_parts; - std::variant * to_be_string; - std::vector>> * ind_string_parts; -} - -%type start expr expr_function expr_if expr_op -%type expr_select expr_simple expr_app -%type expr_pipe_from expr_pipe_into -%type expr_list -%type binds binds1 -%type formals formal_set -%type formal -%type attrpath -%type attrs -%type string_parts_interpolated -%type ind_string_parts -%type path_start -%type string_parts string_attr -%type attr -%token ID -%token STR IND_STR -%token INT_LIT -%token FLOAT_LIT -%token PATH HPATH SPATH PATH_END -%token URI +%define api.value.type variant + +%type start expr expr_function expr_if expr_op +%type expr_select expr_simple expr_app +%type expr_pipe_from expr_pipe_into +%type > list +%type binds binds1 +%type formals formal_set +%type formal +%type > attrpath +%type >> attrs +%type >> string_parts_interpolated +%type >>> ind_string_parts +%type path_start +%type string_parts string_attr +%type attr +%token ID +%token STR IND_STR +%token INT_LIT +%token FLOAT_LIT +%token PATH HPATH SPATH PATH_END +%token URI %token IF THEN ELSE ASSERT WITH LET IN_KW REC INHERIT EQ NEQ AND OR IMPL OR_KW %token PIPE_FROM PIPE_INTO /* <| and |> */ %token DOLLAR_CURLY /* == ${ */ @@ -193,86 +179,90 @@ expr: expr_function; expr_function : ID ':' expr_function - { auto me = new ExprLambda(CUR_POS, state->symbols.create($1), 0, $3); + { auto me = state->exprs.add(CUR_POS, state->symbols.create($1), $3); $$ = me; SET_DOC_POS(me, @1); } | formal_set ':' expr_function[body] - { auto me = new ExprLambda(CUR_POS, state->validateFormals($formal_set), $body); + { + state->validateFormals($formal_set); + auto me = state->exprs.add(state->positions, state->exprs.alloc, CUR_POS, $formal_set, $body); $$ = me; SET_DOC_POS(me, @1); } | formal_set '@' ID ':' expr_function[body] { auto arg = state->symbols.create($ID); - auto me = new ExprLambda(CUR_POS, arg, state->validateFormals($formal_set, CUR_POS, arg), $body); + state->validateFormals($formal_set, CUR_POS, arg); + auto me = state->exprs.add(state->positions, state->exprs.alloc, CUR_POS, arg, $formal_set, $body); $$ = me; SET_DOC_POS(me, @1); } | ID '@' formal_set ':' expr_function[body] { auto arg = state->symbols.create($ID); - auto me = new ExprLambda(CUR_POS, arg, state->validateFormals($formal_set, CUR_POS, arg), $body); + state->validateFormals($formal_set, CUR_POS, arg); + auto me = state->exprs.add(state->positions, state->exprs.alloc, CUR_POS, arg, $formal_set, $body); $$ = me; SET_DOC_POS(me, @1); } | ASSERT expr ';' expr_function - { $$ = new ExprAssert(CUR_POS, $2, $4); } + { $$ = state->exprs.add(CUR_POS, $2, $4); } | WITH expr ';' expr_function - { $$ = new ExprWith(CUR_POS, $2, $4); } + { $$ = state->exprs.add(CUR_POS, $2, $4); } | LET binds IN_KW expr_function - { if (!$2->dynamicAttrs.empty()) + { if (!$2->dynamicAttrs->empty()) throw ParseError({ .msg = HintFmt("dynamic attributes not allowed in let"), .pos = state->positions[CUR_POS] }); - $$ = new ExprLet($2, $4); + $$ = state->exprs.add($2, $4); } | expr_if ; expr_if - : IF expr THEN expr ELSE expr { $$ = new ExprIf(CUR_POS, $2, $4, $6); } + : IF expr THEN expr ELSE expr { $$ = state->exprs.add(CUR_POS, $2, $4, $6); } | expr_pipe_from | expr_pipe_into | expr_op ; expr_pipe_from - : expr_op PIPE_FROM expr_pipe_from { $$ = makeCall(state->at(@2), $1, $3); } - | expr_op PIPE_FROM expr_op { $$ = makeCall(state->at(@2), $1, $3); } + : expr_op PIPE_FROM expr_pipe_from { $$ = makeCall(state->exprs, state->at(@2), $1, $3); } + | expr_op PIPE_FROM expr_op { $$ = makeCall(state->exprs, state->at(@2), $1, $3); } ; expr_pipe_into - : expr_pipe_into PIPE_INTO expr_op { $$ = makeCall(state->at(@2), $3, $1); } - | expr_op PIPE_INTO expr_op { $$ = makeCall(state->at(@2), $3, $1); } + : expr_pipe_into PIPE_INTO expr_op { $$ = makeCall(state->exprs, state->at(@2), $3, $1); } + | expr_op PIPE_INTO expr_op { $$ = makeCall(state->exprs, state->at(@2), $3, $1); } ; expr_op - : '!' expr_op %prec NOT { $$ = new ExprOpNot($2); } - | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->s.sub), {new ExprInt(0), $2}); } - | expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); } - | expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); } - | expr_op '<' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3}); } - | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1})); } - | expr_op '>' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1}); } - | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3})); } - | expr_op AND expr_op { $$ = new ExprOpAnd(state->at(@2), $1, $3); } - | expr_op OR expr_op { $$ = new ExprOpOr(state->at(@2), $1, $3); } - | expr_op IMPL expr_op { $$ = new ExprOpImpl(state->at(@2), $1, $3); } - | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->at(@2), $1, $3); } - | expr_op '?' attrpath { $$ = new ExprOpHasAttr(state->alloc, $1, std::move(*$3)); delete $3; } + : '!' expr_op %prec NOT { $$ = state->exprs.add($2); } + | '-' expr_op %prec NEGATE { $$ = state->exprs.add(CUR_POS, state->exprs.add(state->s.sub), {state->exprs.add(0), $2}); } + | expr_op EQ expr_op { $$ = state->exprs.add($1, $3); } + | expr_op NEQ expr_op { $$ = state->exprs.add($1, $3); } + | expr_op '<' expr_op { $$ = state->exprs.add(state->at(@2), state->exprs.add(state->s.lessThan), {$1, $3}); } + | expr_op LEQ expr_op { $$ = state->exprs.add(state->exprs.add(state->at(@2), state->exprs.add(state->s.lessThan), {$3, $1})); } + | expr_op '>' expr_op { $$ = state->exprs.add(state->at(@2), state->exprs.add(state->s.lessThan), {$3, $1}); } + | expr_op GEQ expr_op { $$ = state->exprs.add(state->exprs.add(state->at(@2), state->exprs.add(state->s.lessThan), {$1, $3})); } + | expr_op AND expr_op { $$ = state->exprs.add(state->at(@2), $1, $3); } + | expr_op OR expr_op { $$ = state->exprs.add(state->at(@2), $1, $3); } + | expr_op IMPL expr_op { $$ = state->exprs.add(state->at(@2), $1, $3); } + | expr_op UPDATE expr_op { $$ = state->exprs.add(state->at(@2), $1, $3); } + | expr_op '?' attrpath { $$ = state->exprs.add(state->exprs.alloc, $1, $3); } | expr_op '+' expr_op - { $$ = new ExprConcatStrings(state->at(@2), false, new std::vector >({{state->at(@1), $1}, {state->at(@3), $3}})); } - | expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.sub), {$1, $3}); } - | expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.mul), {$1, $3}); } - | expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.div), {$1, $3}); } - | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->at(@2), $1, $3); } + { $$ = state->exprs.add(state->exprs.alloc, state->at(@2), false, {{state->at(@1), $1}, {state->at(@3), $3}}); } + | expr_op '-' expr_op { $$ = state->exprs.add(state->at(@2), state->exprs.add(state->s.sub), {$1, $3}); } + | expr_op '*' expr_op { $$ = state->exprs.add(state->at(@2), state->exprs.add(state->s.mul), {$1, $3}); } + | expr_op '/' expr_op { $$ = state->exprs.add(state->at(@2), state->exprs.add(state->s.div), {$1, $3}); } + | expr_op CONCAT expr_op { $$ = state->exprs.add(state->at(@2), $1, $3); } | expr_app ; expr_app - : expr_app expr_select { $$ = makeCall(CUR_POS, $1, $2); $2->warnIfCursedOr(state->symbols, state->positions); } + : expr_app expr_select { $$ = makeCall(state->exprs, CUR_POS, $1, $2); $2->warnIfCursedOr(state->symbols, state->positions); } | /* Once a ‘cursed or’ reaches this nonterminal, it is no longer cursed, because the uncursed parse would also produce an expr_app. But we need to remove the cursed status in order to prevent valid things like @@ -282,9 +272,9 @@ expr_app expr_select : expr_simple '.' attrpath - { $$ = new ExprSelect(state->alloc, CUR_POS, $1, std::move(*$3), nullptr); delete $3; } + { $$ = state->exprs.add(state->exprs.alloc, CUR_POS, $1, $3, nullptr); } | expr_simple '.' attrpath OR_KW expr_select - { $$ = new ExprSelect(state->alloc, CUR_POS, $1, std::move(*$3), $5); delete $3; $5->warnIfCursedOr(state->symbols, state->positions); } + { $$ = state->exprs.add(state->exprs.alloc, CUR_POS, $1, $3, $5); $5->warnIfCursedOr(state->symbols, state->positions); } | /* Backwards compatibility: because Nixpkgs has a function named ‘or’, allow stuff like ‘map or [...]’. This production is problematic (see https://github.com/NixOS/nix/issues/11118) and will be refactored in the @@ -293,7 +283,7 @@ expr_select the ExprCall with data (establishing that it is a ‘cursed or’) that can be used to emit a warning when an affected expression is parsed. */ expr_simple OR_KW - { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->s.or_)}, state->positions.add(state->origin, @$.endOffset)); } + { $$ = state->exprs.add(CUR_POS, $1, {state->exprs.add(CUR_POS, state->s.or_)}, state->positions.add(state->origin, @$.endOffset)); } | expr_simple ; @@ -301,34 +291,27 @@ expr_simple : ID { std::string_view s = "__curPos"; if ($1.l == s.size() && strncmp($1.p, s.data(), s.size()) == 0) - $$ = new ExprPos(CUR_POS); + $$ = state->exprs.add(CUR_POS); else - $$ = new ExprVar(CUR_POS, state->symbols.create($1)); - } - | INT_LIT { $$ = new ExprInt($1); } - | FLOAT_LIT { $$ = new ExprFloat($1); } - | '"' string_parts '"' { - std::visit(overloaded{ - [&](std::string_view str) { $$ = new ExprString(state->alloc, str); }, - [&](Expr * expr) { $$ = expr; }}, - *$2); - delete $2; + $$ = state->exprs.add(CUR_POS, state->symbols.create($1)); } + | INT_LIT { $$ = state->exprs.add($1); } + | FLOAT_LIT { $$ = state->exprs.add($1); } + | '"' string_parts '"' { $$ = $2.toExpr(state->exprs); } | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { - $$ = state->stripIndentation(CUR_POS, std::move(*$2)); - delete $2; + $$ = state->stripIndentation(CUR_POS, $2); } | path_start PATH_END | path_start string_parts_interpolated PATH_END { - $2->insert($2->begin(), {state->at(@1), $1}); - $$ = new ExprConcatStrings(CUR_POS, false, $2); + $2.insert($2.begin(), {state->at(@1), $1}); + $$ = state->exprs.add(state->exprs.alloc, CUR_POS, false, $2); } | SPATH { std::string_view path($1.p + 1, $1.l - 2); - $$ = new ExprCall(CUR_POS, - new ExprVar(state->s.findFile), - {new ExprVar(state->s.nixPath), - new ExprString(state->alloc, path)}); + $$ = state->exprs.add(CUR_POS, + state->exprs.add(state->s.findFile), + {state->exprs.add(state->s.nixPath), + state->exprs.add(state->exprs.alloc, path)}); } | URI { static bool noURLLiterals = experimentalFeatureSettings.isEnabled(Xp::NoUrlLiterals); @@ -337,37 +320,36 @@ expr_simple .msg = HintFmt("URL literals are disabled"), .pos = state->positions[CUR_POS] }); - $$ = new ExprString(state->alloc, $1); + $$ = state->exprs.add(state->exprs.alloc, $1); } | '(' expr ')' { $$ = $2; } /* Let expressions `let {..., body = ...}' are just desugared into `(rec {..., body = ...}).body'. */ | LET '{' binds '}' - { $3->recursive = true; $3->pos = CUR_POS; $$ = new ExprSelect(state->alloc, noPos, $3, state->s.body); } + { $3->recursive = true; $3->pos = CUR_POS; $$ = state->exprs.add(state->exprs.alloc, noPos, $3, state->s.body); } | REC '{' binds '}' { $3->recursive = true; $3->pos = CUR_POS; $$ = $3; } | '{' binds1 '}' { $2->pos = CUR_POS; $$ = $2; } | '{' '}' - { $$ = new ExprAttrs(CUR_POS); } - | '[' expr_list ']' { $$ = $2; } + { $$ = state->exprs.add(CUR_POS); } + | '[' list ']' { $$ = state->exprs.add(state->exprs.alloc, $2); } ; string_parts - : STR { $$ = new std::variant($1); } - | string_parts_interpolated { $$ = new std::variant(new ExprConcatStrings(CUR_POS, true, $1)); } - | { $$ = new std::variant(std::string_view()); } + : STR { $$ = {$1}; } + | string_parts_interpolated { $$ = {state->exprs.add(state->exprs.alloc, CUR_POS, true, $1)}; } + | { $$ = {std::string_view()}; } ; string_parts_interpolated : string_parts_interpolated STR - { $$ = $1; $1->emplace_back(state->at(@2), new ExprString(state->alloc, $2)); } - | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); } - | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(state->at(@1), $2); } + { $$ = std::move($1); $$.emplace_back(state->at(@2), state->exprs.add(state->exprs.alloc, $2)); } + | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = std::move($1); $$.emplace_back(state->at(@2), $3); } + | DOLLAR_CURLY expr '}' { $$.emplace_back(state->at(@1), $2); } | STR DOLLAR_CURLY expr '}' { - $$ = new std::vector>; - $$->emplace_back(state->at(@1), new ExprString(state->alloc, $1)); - $$->emplace_back(state->at(@2), $3); + $$.emplace_back(state->at(@1), state->exprs.add(state->exprs.alloc, $1)); + $$.emplace_back(state->at(@2), $3); } ; @@ -392,8 +374,8 @@ path_start root filesystem accessor, rather than the accessor of the current Nix expression. */ literal.front() == '/' - ? new ExprPath(state->alloc, state->rootFS, path) - : new ExprPath(state->alloc, state->basePath.accessor, path); + ? state->exprs.add(state->exprs.alloc, state->rootFS, path) + : state->exprs.add(state->exprs.alloc, state->basePath.accessor, path); } | HPATH { if (state->settings.pureEval) { @@ -402,100 +384,92 @@ path_start std::string_view($1.p, $1.l) ); } - Path path(getHome() + std::string($1.p + 1, $1.l - 1)); - $$ = new ExprPath(state->alloc, ref(state->rootFS), path); + Path path(getHome().string() + std::string($1.p + 1, $1.l - 1)); + $$ = state->exprs.add(state->exprs.alloc, ref(state->rootFS), path); } ; ind_string_parts - : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->at(@2), $2); } - | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); } - | { $$ = new std::vector>>; } + : ind_string_parts IND_STR { $$ = std::move($1); $$.emplace_back(state->at(@2), $2); } + | ind_string_parts DOLLAR_CURLY expr '}' { $$ = std::move($1); $$.emplace_back(state->at(@2), $3); } + | { } ; binds : binds1 - | { $$ = new ExprAttrs; } + | { $$ = state->exprs.add(); } ; binds1 : binds1[accum] attrpath '=' expr ';' { $$ = $accum; - state->addAttr($$, std::move(*$attrpath), @attrpath, $expr, @expr); - delete $attrpath; + state->addAttr($$, std::move($attrpath), @attrpath, $expr, @expr); } | binds[accum] INHERIT attrs ';' { $$ = $accum; - for (auto & [i, iPos] : *$attrs) { - if ($accum->attrs.find(i.symbol) != $accum->attrs.end()) - state->dupAttr(i.symbol, iPos, $accum->attrs[i.symbol].pos); - $accum->attrs.emplace( + for (auto & [i, iPos] : $attrs) { + if ($accum->attrs->find(i.symbol) != $accum->attrs->end()) + state->dupAttr(i.symbol, iPos, (*$accum->attrs)[i.symbol].pos); + $accum->attrs->emplace( i.symbol, - ExprAttrs::AttrDef(new ExprVar(iPos, i.symbol), iPos, ExprAttrs::AttrDef::Kind::Inherited)); + ExprAttrs::AttrDef(state->exprs.add(iPos, i.symbol), iPos, ExprAttrs::AttrDef::Kind::Inherited)); } - delete $attrs; } | binds[accum] INHERIT '(' expr ')' attrs ';' { $$ = $accum; if (!$accum->inheritFromExprs) - $accum->inheritFromExprs = std::make_unique>(); + $accum->inheritFromExprs = std::make_unique>(); $accum->inheritFromExprs->push_back($expr); - auto from = new nix::ExprInheritFrom(state->at(@expr), $accum->inheritFromExprs->size() - 1); - for (auto & [i, iPos] : *$attrs) { - if ($accum->attrs.find(i.symbol) != $accum->attrs.end()) - state->dupAttr(i.symbol, iPos, $accum->attrs[i.symbol].pos); - $accum->attrs.emplace( + auto from = state->exprs.add(state->at(@expr), $accum->inheritFromExprs->size() - 1); + for (auto & [i, iPos] : $attrs) { + if ($accum->attrs->find(i.symbol) != $accum->attrs->end()) + state->dupAttr(i.symbol, iPos, (*$accum->attrs)[i.symbol].pos); + $accum->attrs->emplace( i.symbol, ExprAttrs::AttrDef( - new ExprSelect(state->alloc, iPos, from, i.symbol), + state->exprs.add(state->exprs.alloc, iPos, from, i.symbol), iPos, ExprAttrs::AttrDef::Kind::InheritedFrom)); } - delete $attrs; } | attrpath '=' expr ';' - { $$ = new ExprAttrs; - state->addAttr($$, std::move(*$attrpath), @attrpath, $expr, @expr); - delete $attrpath; + { $$ = state->exprs.add(); + state->addAttr($$, std::move($attrpath), @attrpath, $expr, @expr); } ; attrs - : attrs attr { $$ = $1; $1->emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); } + : attrs attr { $$ = std::move($1); $$.emplace_back(state->symbols.create($2), state->at(@2)); } | attrs string_attr - { $$ = $1; - std::visit(overloaded { - [&](std::string_view str) { $$->emplace_back(AttrName(state->symbols.create(str)), state->at(@2)); }, + { $$ = std::move($1); + $2.visit(overloaded{ + [&](std::string_view str) { $$.emplace_back(state->symbols.create(str), state->at(@2)); }, [&](Expr * expr) { - throw ParseError({ - .msg = HintFmt("dynamic attributes not allowed in inherit"), - .pos = state->positions[state->at(@2)] - }); - } - }, *$2); - delete $2; + throw ParseError({ + .msg = HintFmt("dynamic attributes not allowed in inherit"), + .pos = state->positions[state->at(@2)] + }); + }} + ); } - | { $$ = new std::vector>; } + | { } ; attrpath - : attrpath '.' attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($3))); } + : attrpath '.' attr { $$ = std::move($1); $$.emplace_back(state->symbols.create($3)); } | attrpath '.' string_attr - { $$ = $1; - std::visit(overloaded { - [&](std::string_view str) { $$->push_back(AttrName(state->symbols.create(str))); }, - [&](Expr * expr) { $$->push_back(AttrName(expr)); } - }, *$3); - delete $3; + { $$ = std::move($1); + $3.visit(overloaded{ + [&](std::string_view str) { $$.emplace_back(state->symbols.create(str)); }, + [&](Expr * expr) { $$.emplace_back(expr); }} + ); } - | attr { $$ = new std::vector; $$->push_back(AttrName(state->symbols.create($1))); } + | attr { $$.emplace_back(state->symbols.create($1)); } | string_attr - { $$ = new std::vector; - std::visit(overloaded { - [&](std::string_view str) { $$->push_back(AttrName(state->symbols.create(str))); }, - [&](Expr * expr) { $$->push_back(AttrName(expr)); } - }, *$1); - delete $1; + { $1.visit(overloaded{ + [&](std::string_view str) { $$.emplace_back(state->symbols.create(str)); }, + [&](Expr * expr) { $$.emplace_back(expr); }} + ); } ; @@ -505,33 +479,33 @@ attr ; string_attr - : '"' string_parts '"' { $$ = $2; } - | DOLLAR_CURLY expr '}' { $$ = new std::variant($2); } + : '"' string_parts '"' { $$ = std::move($2); } + | DOLLAR_CURLY expr '}' { $$ = {$2}; } ; -expr_list - : expr_list expr_select { $$ = $1; $1->elems.push_back($2); /* !!! dangerous */; $2->warnIfCursedOr(state->symbols, state->positions); } - | { $$ = new ExprList; } +list + : list expr_select { $$ = std::move($1); $$.push_back($2); /* !!! dangerous */; $2->warnIfCursedOr(state->symbols, state->positions); } + | { } ; formal_set - : '{' formals ',' ELLIPSIS '}' { $$ = $formals; $$->ellipsis = true; } - | '{' ELLIPSIS '}' { $$ = new Formals; $$->ellipsis = true; } - | '{' formals ',' '}' { $$ = $formals; $$->ellipsis = false; } - | '{' formals '}' { $$ = $formals; $$->ellipsis = false; } - | '{' '}' { $$ = new Formals; $$->ellipsis = false; } + : '{' formals ',' ELLIPSIS '}' { $$ = std::move($formals); $$.ellipsis = true; } + | '{' ELLIPSIS '}' { $$.ellipsis = true; } + | '{' formals ',' '}' { $$ = std::move($formals); $$.ellipsis = false; } + | '{' formals '}' { $$ = std::move($formals); $$.ellipsis = false; } + | '{' '}' { $$.ellipsis = false; } ; formals : formals[accum] ',' formal - { $$ = $accum; $$->formals.emplace_back(*$formal); delete $formal; } + { $$ = std::move($accum); $$.formals.emplace_back(std::move($formal)); } | formal - { $$ = new Formals; $$->formals.emplace_back(*$formal); delete $formal; } + { $$.formals.emplace_back(std::move($formal)); } ; formal - : ID { $$ = new Formal{CUR_POS, state->symbols.create($1), 0}; } - | ID '?' expr { $$ = new Formal{CUR_POS, state->symbols.create($1), $3}; } + : ID { $$ = Formal{CUR_POS, state->symbols.create($1), 0}; } + | ID '?' expr { $$ = Formal{CUR_POS, state->symbols.create($1), $3}; } ; %% @@ -546,7 +520,7 @@ Expr * parseExprFromBuf( size_t length, Pos::Origin origin, const SourcePath & basePath, - std::pmr::polymorphic_allocator & alloc, + Exprs & exprs, SymbolTable & symbols, const EvalSettings & settings, PosTable & positions, @@ -561,7 +535,7 @@ Expr * parseExprFromBuf( }; ParserState state { .lexerState = lexerState, - .alloc = alloc, + .exprs = exprs, .symbols = symbols, .positions = positions, .basePath = basePath, @@ -582,3 +556,4 @@ Expr * parseExprFromBuf( } +#pragma GCC diagnostic pop // end ignored "-Wswitch-enum" diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 52fbab0aceb..68e9d0f5f84 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -5,6 +5,7 @@ #include "nix/expr/eval-settings.hh" #include "nix/expr/gc-small-vector.hh" #include "nix/expr/json-to-value.hh" +#include "nix/expr/static-string-data.hh" #include "nix/store/globals.hh" #include "nix/store/names.hh" #include "nix/store/path-references.hh" @@ -40,6 +41,12 @@ namespace nix { +RegisterPrimOp::PrimOps & RegisterPrimOp::primOps() +{ + static RegisterPrimOp::PrimOps primOps; + return primOps; +} + /************************************************************* * Miscellaneous *************************************************************/ @@ -47,7 +54,7 @@ namespace nix { static inline Value * mkString(EvalState & state, const std::csub_match & match) { Value * v = state.allocValue(); - v->mkString({match.first, match.second}); + v->mkString({match.first, match.second}, state.mem); return v; } @@ -229,13 +236,14 @@ void derivationToValue( path2, { NixStringContextElem::DrvDeep{.drvPath = storePath}, - }); - attrs.alloc(state.s.name).mkString(drv.env["name"]); + }, + state.mem); + attrs.alloc(state.s.name).mkString(drv.env["name"], state.mem); auto list = state.buildList(drv.outputs.size()); for (const auto & [i, o] : enumerate(drv.outputs)) { mkOutputString(state, attrs, storePath, o); - (list[i] = state.allocValue())->mkString(o.first); + (list[i] = state.allocValue())->mkString(o.first, state.mem); } attrs.alloc(state.s.outputs).mkList(list); @@ -318,10 +326,51 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v } static RegisterPrimOp primop_scopedImport( - PrimOp{ - .name = "scopedImport", .arity = 2, .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) { - import(state, pos, *args[1], args[0], v); - }}); + {.name = "scopedImport", + .args = {"scope", "path"}, + .doc = R"( + Load, parse, and return the Nix expression in the file *path*, with the attributes from *scope* available as variables in the lexical scope of the imported file. + + This function is similar to [`import`](#builtins-import), but allows you to provide additional variables that will be available in the scope of the imported expression. + The *scope* argument must be an attribute set; each attribute becomes a variable available in the imported file. + Built-in functions and values remain accessible unless shadowed by *scope* attributes. + + > **Note** + > + > Variables from *scope* shadow built-ins with the same name, allowing you to override built-ins for the imported expression. + + > **Note** + > + > Unlike [`import`](#builtins-import), `scopedImport` does not memoize evaluation results. + > While the parsing result may be reused, each call produces a distinct value. + > This is observable through performance and side effects such as [`builtins.trace`](#builtins-trace). + + The *path* argument must meet the same criteria as an [interpolated expression](@docroot@/language/string-interpolation.md#interpolated-expression). + + If *path* is a directory, the file `default.nix` in that directory is used if it exists. + + > **Example** + > + > Create a file `greet.nix`: + > + > ```nix + > # greet.nix + > "${greeting}, ${name}!" + > ``` + > + > Import it with additional variables in scope: + > + > ```nix + > scopedImport { greeting = "Hello"; name = "World"; } ./greet.nix + > ``` + > + > "Hello, World!" + + Evaluation aborts if the file doesn't exist or contains an invalid Nix expression. + )", + .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) { + import(state, pos, *args[1], args[0], v); + }}); static RegisterPrimOp primop_import( {.name = "import", @@ -496,34 +545,34 @@ static void prim_typeOf(EvalState & state, const PosIdx pos, Value ** args, Valu state.forceValue(*args[0], pos); switch (args[0]->type()) { case nInt: - v.mkStringNoCopy("int"); + v.mkStringNoCopy("int"_sds); break; case nBool: - v.mkStringNoCopy("bool"); + v.mkStringNoCopy("bool"_sds); break; case nString: - v.mkStringNoCopy("string"); + v.mkStringNoCopy("string"_sds); break; case nPath: - v.mkStringNoCopy("path"); + v.mkStringNoCopy("path"_sds); break; case nNull: - v.mkStringNoCopy("null"); + v.mkStringNoCopy("null"_sds); break; case nAttrs: - v.mkStringNoCopy("set"); + v.mkStringNoCopy("set"_sds); break; case nList: - v.mkStringNoCopy("list"); + v.mkStringNoCopy("list"_sds); break; case nFunction: - v.mkStringNoCopy("lambda"); + v.mkStringNoCopy("lambda"_sds); break; case nExternal: - v.mkString(args[0]->external()->typeOf()); + v.mkString(args[0]->external()->typeOf(), state.mem); break; case nFloat: - v.mkStringNoCopy("float"); + v.mkStringNoCopy("float"_sds); break; case nThunk: case nFailed: @@ -691,7 +740,14 @@ struct CompareValues if (v1->type() == nInt && v2->type() == nFloat) return v1->integer().value < v2->fpoint(); if (v1->type() != v2->type()) - state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow(); + state + .error( + "cannot compare %s with %s; values are %s and %s", + showType(*v1), + showType(*v2), + ValuePrinter(state, *v1, errorPrintOptions), + ValuePrinter(state, *v2, errorPrintOptions)) + .debugThrow(); // Allow selecting a subset of enum values #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wswitch-enum" @@ -701,12 +757,12 @@ struct CompareValues case nFloat: return v1->fpoint() < v2->fpoint(); case nString: - return strcmp(v1->c_str(), v2->c_str()) < 0; + return v1->string_view() < v2->string_view(); case nPath: // Note: we don't take the accessor into account // since it's not obvious how to compare them in a // reproducible way. - return strcmp(v1->pathStr(), v2->pathStr()) < 0; + return v1->pathStrView() < v2->pathStrView(); case nList: // Lexicographic comparison for (size_t i = 0;; i++) { @@ -721,7 +777,11 @@ struct CompareValues default: state .error( - "cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)) + "cannot compare %s with %s; values of that type are incomparable (values are %s and %s)", + showType(*v1), + showType(*v2), + ValuePrinter(state, *v1, errorPrintOptions), + ValuePrinter(state, *v2, errorPrintOptions)) .debugThrow(); #pragma GCC diagnostic pop } @@ -767,42 +827,79 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value ** ar `workSet', adding the result to `workSet', continuing until no new elements are found. */ ValueList res; - // `doneKeys' doesn't need to be a GC root, because its values are - // reachable from res. - auto cmp = CompareValues(state, noPos, "while comparing the `key` attributes of two genericClosure elements"); - std::set doneKeys(cmp); + // Track which element each key came from + auto cmp = CompareValues(state, noPos, ""); + std::map keyToElem(cmp); while (!workSet.empty()) { Value * e = *(workSet.begin()); workSet.pop_front(); - state.forceAttrs( - *e, - noPos, - "while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure"); - - auto key = state.getAttr( - state.s.key, - e->attrs(), - "in one of the attrsets generated by (or initially passed to) builtins.genericClosure"); + try { + state.forceAttrs(*e, noPos, ""); + } catch (Error & err) { + err.addTrace(nullptr, "in genericClosure element %s", ValuePrinter(state, *e, errorPrintOptions)); + throw; + } + + const Attr * key; + try { + key = state.getAttr(state.s.key, e->attrs(), ""); + } catch (Error & err) { + err.addTrace(nullptr, "in genericClosure element %s", ValuePrinter(state, *e, errorPrintOptions)); + throw; + } state.forceValue(*key->value, noPos); - if (!doneKeys.insert(key->value).second) - continue; + try { + auto [it, inserted] = keyToElem.insert({key->value, e}); + if (!inserted) + continue; + } catch (Error & err) { + // Try to find which element we're comparing against + Value * otherElem = nullptr; + for (auto & [otherKey, elem] : keyToElem) { + try { + cmp(key->value, otherKey); + } catch (Error &) { + // Found the element we're comparing against + otherElem = elem; + break; + } + } + if (otherElem) { + // Traces are printed in reverse order; pre-swap them. + err.addTrace(nullptr, "with element %s", ValuePrinter(state, *otherElem, errorPrintOptions)); + err.addTrace(nullptr, "while comparing element %s", ValuePrinter(state, *e, errorPrintOptions)); + } else { + // Couldn't find the specific element, just show current + err.addTrace(nullptr, "while checking key of element %s", ValuePrinter(state, *e, errorPrintOptions)); + } + throw; + } res.push_back(e); /* Call the `operator' function with `e' as argument. */ Value newElements; - state.callFunction(*op->value, {&e, 1}, newElements, noPos); - state.forceList( - newElements, - noPos, - "while evaluating the return value of the `operator` passed to builtins.genericClosure"); - - /* Add the values returned by the operator to the work set. */ - for (auto elem : newElements.listView()) { - state.forceValue(*elem, noPos); // "while evaluating one one of the elements returned by the `operator` - // passed to builtins.genericClosure"); - workSet.push_back(elem); + try { + state.callFunction(*op->value, {&e, 1}, newElements, noPos); + state.forceList( + newElements, + noPos, + "while evaluating the return value of the `operator` passed to builtins.genericClosure"); + + /* Add the values returned by the operator to the work set. */ + for (auto elem : newElements.listView()) { + state.forceValue(*elem, noPos); // "while evaluating one one of the elements returned by the `operator` + // passed to builtins.genericClosure"); + workSet.push_back(elem); + } + } catch (Error & err) { + err.addTrace( + nullptr, + "while calling %s on genericClosure element %s", + state.symbols[state.s.operator_], + ValuePrinter(state, *e, errorPrintOptions)); + throw; } } @@ -835,10 +932,10 @@ static RegisterPrimOp primop_genericClosure( - [Int](@docroot@/language/types.md#type-int) - [Float](@docroot@/language/types.md#type-float) - - [Boolean](@docroot@/language/types.md#type-boolean) + - [Boolean](@docroot@/language/types.md#type-bool) - [String](@docroot@/language/types.md#type-string) - [Path](@docroot@/language/types.md#type-path) - - [List](@docroot@/language/types.md#list) + - [List](@docroot@/language/types.md#type-list) The result is produced by calling the `operator` on each `item` that has not been called yet, including newly added items, until no new items are added. Items are compared by their `key` attribute. @@ -1130,7 +1227,7 @@ static void prim_getEnv(EvalState & state, const PosIdx pos, Value ** args, Valu { std::string name( state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.getEnv")); - v.mkString(state.settings.restrictEval || state.settings.pureEval ? "" : getEnv(name).value_or("")); + v.mkString(state.settings.restrictEval || state.settings.pureEval ? "" : getEnv(name).value_or(""), state.mem); } static RegisterPrimOp primop_getEnv({ @@ -1384,7 +1481,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName pos, "while evaluating the `__structuredAttrs` " "attribute passed to builtins.derivationStrict")) - jsonObject = StructuredAttrs{.structuredAttrs = json::object()}; + jsonObject = StructuredAttrs{}; /* Check whether null attributes should be ignored. */ bool ignoreNulls = false; @@ -1430,7 +1527,8 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName .debugThrow(); } if (ingestionMethod == ContentAddressMethod::Raw::Text) - experimentalFeatureSettings.require(Xp::DynamicDerivations); + experimentalFeatureSettings.require( + Xp::DynamicDerivations, fmt("text-hashed derivation '%s', outputHashMode = \"text\"", drvName)); if (ingestionMethod == ContentAddressMethod::Raw::Git) experimentalFeatureSettings.require(Xp::GitHashing); }; @@ -1749,28 +1847,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName drv.outputs.insert_or_assign(i, DerivationOutput::Deferred{}); } - auto hashModulo = hashDerivationModulo(*state.store, Derivation(drv), true); - switch (hashModulo.kind) { - case DrvHash::Kind::Regular: - for (auto & i : outputs) { - auto h = get(hashModulo.hashes, i); - if (!h) - state.error("derivation produced no hash for output '%s'", i).atPos(v).debugThrow(); - auto outPath = state.store->makeOutputPath(i, *h, drvName); - drv.env[i] = state.store->printStorePath(outPath); - drv.outputs.insert_or_assign( - i, - DerivationOutput::InputAddressed{ - .path = std::move(outPath), - }); - } - break; - ; - case DrvHash::Kind::Deferred: - for (auto & i : outputs) { - drv.outputs.insert_or_assign(i, DerivationOutput::Deferred{}); - } - } + drv.fillInOutputPaths(*state.store); } /* Write the resulting term into the Nix store directory. */ @@ -1793,7 +1870,8 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName drvPathS, { NixStringContextElem::DrvDeep{.drvPath = drvPath}, - }); + }, + state.mem); for (auto & i : drv.outputs) mkOutputString(state, result, drvPath, i); @@ -1816,8 +1894,10 @@ static RegisterPrimOp primop_derivationStrict( ‘out’. */ static void prim_placeholder(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - v.mkString(hashPlaceholder( - state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.placeholder"))); + v.mkString( + hashPlaceholder(state.forceStringNoCtx( + *args[0], pos, "while evaluating the first argument passed to builtins.placeholder")), + state.mem); } static RegisterPrimOp primop_placeholder({ @@ -1846,7 +1926,7 @@ static void prim_toPath(EvalState & state, const PosIdx pos, Value ** args, Valu NixStringContext context; auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.toPath"); - v.mkString(path.path.abs(), context); + v.mkString(path.path.abs(), context, state.mem); } static RegisterPrimOp primop_toPath({ @@ -1889,7 +1969,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value ** args, V if (!settings.readOnlyMode) state.store->ensurePath(path2); context.insert(NixStringContextElem::Opaque{.path = path2}); - v.mkString(path.abs(), context); + v.mkString(path.abs(), context, state.mem); } static RegisterPrimOp primop_storePath({ @@ -1971,7 +2051,8 @@ static void prim_baseNameOf(EvalState & state, const PosIdx pos, Value ** args, v.mkString( legacyBaseNameOf(*state.coerceToString( pos, *args[0], context, "while evaluating the first argument passed to builtins.baseNameOf", false, false)), - context); + context, + state.mem); } static RegisterPrimOp primop_baseNameOf({ @@ -2000,18 +2081,18 @@ static void prim_dirOf(EvalState & state, const PosIdx pos, Value ** args, Value state.forceValue(*args[0], pos); if (args[0]->type() == nPath) { auto path = args[0]->path(); - v.mkPath(path.path.isRoot() ? path : path.parent()); + v.mkPath(path.path.isRoot() ? path : path.parent(), state.mem); } else { NixStringContext context; auto path = state.coerceToString( pos, *args[0], context, "while evaluating the first argument passed to 'builtins.dirOf'", false, false); auto pos = path->rfind('/'); if (pos == path->npos) - v.mkStringMove(".", context); + v.mkStringMove("."_sds, context, state.mem); else if (pos == 0) - v.mkStringMove("/", context); + v.mkStringMove("/"_sds, context, state.mem); else - v.mkString(path->substr(0, pos), context); + v.mkString(path->substr(0, pos), context, state.mem); } } @@ -2056,7 +2137,7 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value ** args, Va .path = std::move((StorePath &&) p), }); } - v.mkString(s, context); + v.mkString(s, context, state.mem); } static RegisterPrimOp primop_readFile({ @@ -2120,7 +2201,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value ** args, Va auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile"); - v.mkPath(state.findFile(lookupPath, path, pos)); + v.mkPath(state.findFile(lookupPath, path, pos), state.mem); } static RegisterPrimOp primop_findFile( @@ -2142,7 +2223,7 @@ static RegisterPrimOp primop_findFile( builtins.findFile builtins.nixPath "nixpkgs" ``` - A search path is represented as a list of [attribute sets](./types.md#attribute-set) with two attributes: + A search path is represented as a list of [attribute sets](./types.md#type-attrs) with two attributes: - `prefix` is a relative path. - `path` denotes a file system location @@ -2225,7 +2306,7 @@ static RegisterPrimOp primop_findFile( > - ``` > { > prefix = "nixpkgs"; - > path = "https://nixos.org/channels/nixos-unstable/nixexprs.tar.xz"; + > path = "https://channels.nixos.org/nixos-unstable/nixexprs.tar.xz"; > } > ``` @@ -2269,7 +2350,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value ** args, Va auto path = realisePath(state, pos, *args[1]); - v.mkString(hashString(*ha, path.readFile()).to_string(HashFormat::Base16, false)); + v.mkString(hashString(*ha, path.readFile()).to_string(HashFormat::Base16, false), state.mem); } static RegisterPrimOp primop_hashFile({ @@ -2295,10 +2376,10 @@ static const Value & fileTypeToString(EvalState & state, SourceAccessor::Type ty static const Constants stringValues = []() { Constants res; - res.regular.mkStringNoCopy("regular"); - res.directory.mkStringNoCopy("directory"); - res.symlink.mkStringNoCopy("symlink"); - res.unknown.mkStringNoCopy("unknown"); + res.regular.mkStringNoCopy("regular"_sds); + res.directory.mkStringNoCopy("directory"_sds); + res.symlink.mkStringNoCopy("symlink"_sds); + res.unknown.mkStringNoCopy("unknown"_sds); return res; }(); @@ -2358,7 +2439,7 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value ** args, Val // detailed node info quickly in this case we produce a thunk to // query the file type lazily. auto epath = state.allocValue(); - epath->mkPath(path / name); + epath->mkPath(path / name, state.mem); if (!readFileType) readFileType = &state.getBuiltin("readFileType"); attr.mkApp(readFileType, epath); @@ -2434,7 +2515,7 @@ static RegisterPrimOp primop_outputOf({ returns an input placeholder for the output of the output of `myDrv`. - This primop corresponds to the `^` sigil for [deriving paths](@docroot@/glossary.md#gloss-deriving-paths), e.g. as part of installable syntax on the command line. + This primop corresponds to the `^` sigil for [deriving paths](@docroot@/glossary.md#gloss-deriving-path), e.g. as part of installable syntax on the command line. )", .fun = prim_outputOf, .experimentalFeature = Xp::DynamicDerivations, @@ -2452,7 +2533,7 @@ static void prim_toXML(EvalState & state, const PosIdx pos, Value ** args, Value std::ostringstream out; NixStringContext context; printValueAsXML(state, true, false, *args[0], out, context, pos); - v.mkString(out.view(), context); + v.mkString(out.view(), context, state.mem); } static RegisterPrimOp primop_toXML({ @@ -2560,7 +2641,7 @@ static void prim_toJSON(EvalState & state, const PosIdx pos, Value ** args, Valu std::ostringstream out; NixStringContext context; printValueAsJSON(state, true, *args[0], pos, out, context); - v.mkString(out.view(), context); + v.mkString(out.view(), context, state.mem); } static RegisterPrimOp primop_toJSON({ @@ -2751,7 +2832,7 @@ bool EvalState::callPathFilter(Value * filterFun, const SourcePath & path, PosId /* Call the filter function. The first argument is the path, the second is a string indicating the type of the file. */ Value arg1; - arg1.mkString(path.path.abs()); + arg1.mkString(path.path.abs(), mem); // assert that type is not "unknown" Value * args[]{&arg1, const_cast(&fileTypeToString(*this, st.type))}; @@ -3006,7 +3087,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value ** args, V for (const auto & [n, i] : enumerate(*args[0]->attrs())) list[n] = Value::toPtr(state.symbols[i.name]); - std::sort(list.begin(), list.end(), [](Value * v1, Value * v2) { return strcmp(v1->c_str(), v2->c_str()) < 0; }); + std::sort(list.begin(), list.end(), [](Value * v1, Value * v2) { return v1->string_view() < v2->string_view(); }); v.mkList(list); } @@ -3439,21 +3520,20 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value ** args if (!args[0]->isLambda()) state.error("'functionArgs' requires a function").atPos(pos).debugThrow(); - if (!args[0]->lambda().fun->hasFormals()) { + if (const auto & formals = args[0]->lambda().fun->getFormals()) { + auto attrs = state.buildBindings(formals->formals.size()); + for (auto & i : formals->formals) + attrs.insert(i.name, state.getBool(i.def), i.pos); + /* Optimization: avoid sorting bindings. `formals` must already be sorted according to + (std::tie(a.name, a.pos) < std::tie(b.name, b.pos)) predicate, so the following assertion + always holds: + assert(std::is_sorted(attrs.alreadySorted()->begin(), attrs.alreadySorted()->end())); + .*/ + v.mkAttrs(attrs.alreadySorted()); + } else { v.mkAttrs(&Bindings::emptyBindings); return; } - - const auto & formals = args[0]->lambda().fun->formals->formals; - auto attrs = state.buildBindings(formals.size()); - for (auto & i : formals) - attrs.insert(i.name, state.getBool(i.def), i.pos); - /* Optimization: avoid sorting bindings. `formals` must already be sorted according to - (std::tie(a.name, a.pos) < std::tie(b.name, b.pos)) predicate, so the following assertion - always holds: - assert(std::is_sorted(attrs.alreadySorted()->begin(), attrs.alreadySorted()->end())); - .*/ - v.mkAttrs(attrs.alreadySorted()); } static RegisterPrimOp primop_functionArgs({ @@ -4427,7 +4507,7 @@ static void prim_toString(EvalState & state, const PosIdx pos, Value ** args, Va NixStringContext context; auto s = state.coerceToString( pos, *args[0], context, "while evaluating the first argument passed to builtins.toString", true, false); - v.mkString(*s, context); + v.mkString(*s, context, state.mem); } static RegisterPrimOp primop_toString({ @@ -4487,7 +4567,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value ** args, V if (len == 0) { state.forceValue(*args[2], pos); if (args[2]->type() == nString) { - v.mkStringNoCopy("", args[2]->context()); + v.mkStringNoCopy(""_sds, args[2]->context()); return; } } @@ -4500,7 +4580,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value ** args, V auto s = state.coerceToString( pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring"); - v.mkString(NixUInt(start) >= s->size() ? "" : s->substr(start, _len), context); + v.mkString(NixUInt(start) >= s->size() ? "" : s->substr(start, _len), context, state.mem); } static RegisterPrimOp primop_substring({ @@ -4555,7 +4635,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value ** args, auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); - v.mkString(hashString(*ha, s).to_string(HashFormat::Base16, false)); + v.mkString(hashString(*ha, s).to_string(HashFormat::Base16, false), state.mem); } static RegisterPrimOp primop_hashString({ @@ -4588,7 +4668,7 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value ** args, HashFormat hf = parseHashFormat( state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'")); - v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI)); + v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI), state.mem); } static RegisterPrimOp primop_convertHash({ @@ -4898,7 +4978,7 @@ static void prim_concatStringsSep(EvalState & state, const PosIdx pos, Value ** "while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep"); } - v.mkString(res, context); + v.mkString(res, context, state.mem); } static RegisterPrimOp primop_concatStringsSep({ @@ -4973,7 +5053,7 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value ** ar } } - v.mkString(res, context); + v.mkString(res, context, state.mem); } static RegisterPrimOp primop_replaceStrings({ @@ -5006,8 +5086,8 @@ static void prim_parseDrvName(EvalState & state, const PosIdx pos, Value ** args state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.parseDrvName"); DrvName parsed(name); auto attrs = state.buildBindings(2); - attrs.alloc(state.s.name).mkString(parsed.name); - attrs.alloc("version").mkString(parsed.version); + attrs.alloc(state.s.name).mkString(parsed.name, state.mem); + attrs.alloc("version").mkString(parsed.version, state.mem); v.mkAttrs(attrs); } @@ -5043,7 +5123,7 @@ static RegisterPrimOp primop_compareVersions({ version *s1* is older than version *s2*, `0` if they are the same, and `1` if *s1* is newer than *s2*. The version comparison algorithm is the same as the one used by [`nix-env - -u`](../command-ref/nix-env.md#operation---upgrade). + -u`](../command-ref/nix-env/upgrade.md). )", .fun = prim_compareVersions, }); @@ -5062,7 +5142,7 @@ static void prim_splitVersion(EvalState & state, const PosIdx pos, Value ** args } auto list = state.buildList(components.size()); for (const auto & [n, component] : enumerate(components)) - (list[n] = state.allocValue())->mkString(std::move(component)); + (list[n] = state.allocValue())->mkString(std::move(component), state.mem); v.mkList(list); } @@ -5072,7 +5152,7 @@ static RegisterPrimOp primop_splitVersion({ .doc = R"( Split a string representing a version into its components, by the same version splitting logic underlying the version comparison in - [`nix-env -u`](../command-ref/nix-env.md#operation---upgrade). + [`nix-env -u`](../command-ref/nix-env/upgrade.md). )", .fun = prim_splitVersion, }); @@ -5122,9 +5202,9 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) Primitive value. It can be returned by - [comparison operators](@docroot@/language/operators.md#Comparison) + [comparison operators](@docroot@/language/operators.md#comparison) and used in - [conditional expressions](@docroot@/language/syntax.md#Conditionals). + [conditional expressions](@docroot@/language/syntax.md#conditionals). The name `true` is not special, and can be shadowed: @@ -5145,9 +5225,9 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) Primitive value. It can be returned by - [comparison operators](@docroot@/language/operators.md#Comparison) + [comparison operators](@docroot@/language/operators.md#comparison) and used in - [conditional expressions](@docroot@/language/syntax.md#Conditionals). + [conditional expressions](@docroot@/language/syntax.md#conditionals). The name `false` is not special, and can be shadowed: @@ -5203,7 +5283,7 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) .impureOnly = true, }); - v.mkString(settings.getCurrentSystem()); + v.mkString(settings.getCurrentSystem(), mem); addConstant( "__currentSystem", v, @@ -5235,7 +5315,7 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) .impureOnly = true, }); - v.mkString(nixVersion); + v.mkString(nixVersion, mem); addConstant( "__nixVersion", v, @@ -5260,7 +5340,7 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) )", }); - v.mkString(store->storeDir); + v.mkString(store->storeDir, mem); addConstant( "__storeDir", v, @@ -5325,8 +5405,8 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) auto list = buildList(lookupPath.elements.size()); for (const auto & [n, i] : enumerate(lookupPath.elements)) { auto attrs = buildBindings(2); - attrs.alloc("path").mkString(i.path.s); - attrs.alloc("prefix").mkString(i.prefix.s); + attrs.alloc("path").mkString(i.path.s, mem); + attrs.alloc("prefix").mkString(i.prefix.s, mem); (list[n] = allocValue())->mkAttrs(attrs); } v.mkList(list); diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index 6990bb87cbd..d4824d9b9e5 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -17,7 +17,7 @@ static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, if (auto * p = std::get_if(&c.raw)) filtered.insert(*p); - v.mkString(*s, filtered); + v.mkString(*s, filtered, state.mem); } static RegisterPrimOp primop_unsafeDiscardStringContext({ @@ -84,7 +84,7 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p } } - v.mkString(*s, context2); + v.mkString(*s, context2, state.mem); } static RegisterPrimOp primop_unsafeDiscardOutputDependency( @@ -94,7 +94,7 @@ static RegisterPrimOp primop_unsafeDiscardOutputDependency( Create a copy of the given string where every [derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep) string context element is turned into a - [constant](@docroot@/language/string-context.md#string-context-element-constant) + [constant](@docroot@/language/string-context.md#string-context-constant) string context element. This is the opposite of [`builtins.addDrvOutputDependencies`](#builtins-addDrvOutputDependencies). @@ -157,7 +157,7 @@ static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, V context.begin()->raw)}), }; - v.mkString(*s, context2); + v.mkString(*s, context2, state.mem); } static RegisterPrimOp primop_addDrvOutputDependencies( @@ -165,7 +165,7 @@ static RegisterPrimOp primop_addDrvOutputDependencies( .args = {"s"}, .doc = R"( Create a copy of the given string where a single - [constant](@docroot@/language/string-context.md#string-context-element-constant) + [constant](@docroot@/language/string-context.md#string-context-constant) string context element is turned into a [derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep) string context element. @@ -239,7 +239,7 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value ** args, if (!info.second.outputs.empty()) { auto list = state.buildList(info.second.outputs.size()); for (const auto & [i, output] : enumerate(info.second.outputs)) - (list[i] = state.allocValue())->mkString(output); + (list[i] = state.allocValue())->mkString(output, state.mem); infoAttrs.alloc(state.s.outputs).mkList(list); } attrs.alloc(state.store->printStorePath(info.first)).mkAttrs(infoAttrs); @@ -342,7 +342,7 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** arg } } - v.mkString(orig, context); + v.mkString(orig, context, state.mem); } static RegisterPrimOp primop_appendContext({.name = "__appendContext", .arity = 2, .fun = prim_appendContext}); diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 942a07f89ab..4ab060f7807 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -81,17 +81,17 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value ** ar attrs.insert_or_assign("rev", rev->gitRev()); auto input = fetchers::Input::fromAttrs(state.fetchSettings, std::move(attrs)); - auto [storePath, accessor, input2] = input.fetchToStore(state.fetchSettings, state.store); + auto [storePath, accessor, input2] = input.fetchToStore(state.fetchSettings, *state.store); auto attrs2 = state.buildBindings(8); state.mkStorePathString(storePath, attrs2.alloc(state.s.outPath)); if (input2.getRef()) - attrs2.alloc("branch").mkString(*input2.getRef()); + attrs2.alloc("branch").mkString(*input2.getRef(), state.mem); // Backward compatibility: set 'rev' to // 0000000000000000000000000000000000000000 for a dirty tree. auto rev2 = input2.getRev().value_or(Hash(HashAlgorithm::SHA1)); - attrs2.alloc("rev").mkString(rev2.gitRev()); - attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12)); + attrs2.alloc("rev").mkString(rev2.gitRev(), state.mem); + attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12), state.mem); if (auto revCount = input2.getRevCount()) attrs2.alloc("revCount").mkInt(*revCount); v.mkAttrs(attrs2); diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 1b1a0d1c72b..c1616463c53 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -35,7 +35,7 @@ void emitTreeAttrs( // FIXME: support arbitrary input attributes. if (auto narHash = input.getNarHash()) - attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true)); + attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true), state.mem); if (input.getType() == "git") attrs.alloc("submodules").mkBool(fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false)); @@ -43,13 +43,13 @@ void emitTreeAttrs( if (!forceDirty) { if (auto rev = input.getRev()) { - attrs.alloc("rev").mkString(rev->gitRev()); - attrs.alloc("shortRev").mkString(rev->gitShortRev()); + attrs.alloc("rev").mkString(rev->gitRev(), state.mem); + attrs.alloc("shortRev").mkString(rev->gitShortRev(), state.mem); } else if (emptyRevFallback) { // Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev auto emptyHash = Hash(HashAlgorithm::SHA1); - attrs.alloc("rev").mkString(emptyHash.gitRev()); - attrs.alloc("shortRev").mkString(emptyHash.gitShortRev()); + attrs.alloc("rev").mkString(emptyHash.gitRev(), state.mem); + attrs.alloc("shortRev").mkString(emptyHash.gitShortRev(), state.mem); } if (auto revCount = input.getRevCount()) @@ -59,13 +59,14 @@ void emitTreeAttrs( } if (auto dirtyRev = fetchers::maybeGetStrAttr(input.attrs, "dirtyRev")) { - attrs.alloc("dirtyRev").mkString(*dirtyRev); - attrs.alloc("dirtyShortRev").mkString(*fetchers::maybeGetStrAttr(input.attrs, "dirtyShortRev")); + attrs.alloc("dirtyRev").mkString(*dirtyRev, state.mem); + attrs.alloc("dirtyShortRev").mkString(*fetchers::maybeGetStrAttr(input.attrs, "dirtyShortRev"), state.mem); } if (auto lastModified = input.getLastModified()) { attrs.alloc("lastModified").mkInt(*lastModified); - attrs.alloc("lastModifiedDate").mkString(fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S"))); + attrs.alloc("lastModifiedDate") + .mkString(fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S")), state.mem); } v.mkAttrs(attrs); @@ -183,13 +184,13 @@ static void fetchTree( } if (!state.settings.pureEval && !input.isDirect()) - input = lookupInRegistries(state.fetchSettings, state.store, input, fetchers::UseRegistries::Limited).first; + input = lookupInRegistries(state.fetchSettings, *state.store, input, fetchers::UseRegistries::Limited).first; if (state.settings.pureEval && !input.isLocked(state.fetchSettings)) { if (input.getNarHash()) warn( - "Input '%s' is unlocked (e.g. lacks a Git revision) but does have a NAR hash. " - "This is deprecated since such inputs are verifiable but may not be reproducible.", + "Input '%s' is unlocked (e.g. lacks a Git revision) but is checked by NAR hash. " + "This is not reproducible and will break after garbage collection or when shared.", input.to_string()); else state @@ -209,7 +210,7 @@ static void fetchTree( } auto cachedInput = - state.inputCache->getAccessor(state.fetchSettings, state.store, input, fetchers::UseRegistries::No); + state.inputCache->getAccessor(state.fetchSettings, *state.store, input, fetchers::UseRegistries::No); auto storePath = state.mountInput(cachedInput.lockedInput, input, cachedInput.accessor, true); @@ -224,228 +225,126 @@ static void prim_fetchTree(EvalState & state, const PosIdx pos, Value ** args, V static RegisterPrimOp primop_fetchTree({ .name = "fetchTree", .args = {"input"}, - .doc = R"( - Fetch a file system tree or a plain file using one of the supported backends and return an attribute set with: - - - the resulting fixed-output [store path](@docroot@/store/store-path.md) - - the corresponding [NAR](@docroot@/store/file-system-object/content-address.md#serial-nix-archive) hash - - backend-specific metadata (currently not documented). - - *input* must be an attribute set with the following attributes: - - - `type` (String, required) - - One of the [supported source types](#source-types). - This determines other required and allowed input attributes. - - - `narHash` (String, optional) - - The `narHash` parameter can be used to substitute the source of the tree. - It also allows for verification of tree contents that may not be provided by the underlying transfer mechanism. - If `narHash` is set, the source is first looked up is the Nix store and [substituters](@docroot@/command-ref/conf-file.md#conf-substituters), and only fetched if not available. - - A subset of the output attributes of `fetchTree` can be re-used for subsequent calls to `fetchTree` to produce the same result again. - That is, `fetchTree` is idempotent. + .doc = []() -> std::string { + std::string doc = stripIndentation(R"( + Fetch a file system tree or a plain file using one of the supported backends and return an attribute set with: - Downloads are cached in `$XDG_CACHE_HOME/nix`. - The remote source is fetched from the network if both are true: - - A NAR hash is supplied and the corresponding store path is not [valid](@docroot@/glossary.md#gloss-validity), that is, not available in the store + - the resulting fixed-output [store path](@docroot@/store/store-path.md) + - the corresponding [NAR](@docroot@/store/file-system-object/content-address.md#serial-nix-archive) hash + - backend-specific metadata (currently not documented). - > **Note** - > - > [Substituters](@docroot@/command-ref/conf-file.md#conf-substituters) are not used in fetching. + *input* must be an attribute set with the following attributes: - - There is no cache entry or the cache entry is older than [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) + - `type` (String, required) - ## Source types + One of the [supported source types](#source-types). + This determines other required and allowed input attributes. - The following source types and associated input attributes are supported. + - `narHash` (String, optional) - + The `narHash` parameter can be used to substitute the source of the tree. + It also allows for verification of tree contents that may not be provided by the underlying transfer mechanism. + If `narHash` is set, the source is first looked up is the Nix store and [substituters](@docroot@/command-ref/conf-file.md#conf-substituters), and only fetched if not available. - - `"file"` + A subset of the output attributes of `fetchTree` can be re-used for subsequent calls to `fetchTree` to produce the same result again. + That is, `fetchTree` is idempotent. - Place a plain file into the Nix store. - This is similar to [`builtins.fetchurl`](@docroot@/language/builtins.md#builtins-fetchurl) + Downloads are cached in `$XDG_CACHE_HOME/nix`. + The remote source is fetched from the network if both are true: + - A NAR hash is supplied and the corresponding store path is not [valid](@docroot@/glossary.md#gloss-validity), that is, not available in the store - - `url` (String, required) - - Supported protocols: - - - `https` - - > **Example** - > - > ```nix - > fetchTree { - > type = "file"; - > url = "https://example.com/index.html"; - > } - > ``` - - - `http` - - Insecure HTTP transfer for legacy sources. - - > **Warning** + > **Note** > - > HTTP performs no encryption or authentication. - > Use a `narHash` known in advance to ensure the output has expected contents. + > [Substituters](@docroot@/command-ref/conf-file.md#conf-substituters) are not used in fetching. - - `file` + - There is no cache entry or the cache entry is older than [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) - A file on the local file system. + ## Source types - > **Example** - > - > ```nix - > fetchTree { - > type = "file"; - > url = "file:///home/eelco/nix/README.md"; - > } - > ``` + The following source types and associated input attributes are supported. - - `"tarball"` + + )"); - Download a tar archive and extract it into the Nix store. - This has the same underlying implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball) - - - `url` (String, required) - - > **Example** - > - > ```nix - > fetchTree { - > type = "tarball"; - > url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11"; - > } - > ``` - - - `"git"` + auto indentString = [](std::string const & str, std::string const & indent) { + std::string result; + std::istringstream stream(str); + std::string line; + bool first = true; + while (std::getline(stream, line)) { + if (!first) + result += "\n"; + result += indent + line; + first = false; + } + return result; + }; + + for (const auto & [schemeName, scheme] : fetchers::getAllInputSchemes()) { + doc += "\n- `" + quoteString(schemeName, '"') + "`\n\n"; + doc += indentString(scheme->schemeDescription(), " "); + if (!doc.empty() && doc.back() != '\n') + doc += "\n"; + + for (const auto & [attrName, attribute] : scheme->allowedAttrs()) { + doc += "\n - `" + attrName + "` (" + attribute.type + ", " + + (attribute.required ? "required" : "optional") + ")\n\n"; + doc += indentString(stripIndentation(attribute.doc), " "); + if (!doc.empty() && doc.back() != '\n') + doc += "\n"; + } + } - Fetch a Git tree and copy it to the Nix store. - This is similar to [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit). + doc += "\n" + stripIndentation(R"( + The following input types are still subject to change: - - `url` (String, required) + - `"path"` + - `"github"` + - `"gitlab"` + - `"sourcehut"` + - `"mercurial"` - The URL formats supported are the same as for Git itself. + *input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references). > **Example** > + > Fetch a GitHub repository using the attribute set representation: + > > ```nix - > fetchTree { - > type = "git"; - > url = "git@github.com:NixOS/nixpkgs.git"; + > builtins.fetchTree { + > type = "github"; + > owner = "NixOS"; + > repo = "nixpkgs"; + > rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; > } > ``` - - > **Note** > - > If the URL points to a local directory, and no `ref` or `rev` is given, Nix only considers files added to the Git index, as listed by `git ls-files` but use the *current file contents* of the Git working directory. - - - `ref` (String, optional) - - By default, this has no effect. This becomes relevant only once `shallow` cloning is disabled. - - A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name. - - Default: `"HEAD"` - - - `rev` (String, optional) - - A Git revision; a commit hash. - - Default: the tip of `ref` - - - `shallow` (Bool, optional) - - Make a shallow clone when fetching the Git tree. - When this is enabled, the options `ref` and `allRefs` have no effect anymore. - - Default: `true` - - - `submodules` (Bool, optional) - - Also fetch submodules if available. - - Default: `false` - - - `lfs` (Bool, optional) - - Fetch any [Git LFS](https://git-lfs.com/) files. - - Default: `false` - - - `allRefs` (Bool, optional) - - By default, this has no effect. This becomes relevant only once `shallow` cloning is disabled. - - Whether to fetch all references (eg. branches and tags) of the repository. - With this argument being true, it's possible to load a `rev` from *any* `ref`. - (Without setting this option, only `rev`s from the specified `ref` are supported). - - Default: `false` - - - `lastModified` (Integer, optional) - - Unix timestamp of the fetched commit. - - If set, pass through the value to the output attribute set. - Otherwise, generated from the fetched Git tree. - - - `revCount` (Integer, optional) - - Number of revisions in the history of the Git repository before the fetched commit. - - If set, pass through the value to the output attribute set. - Otherwise, generated from the fetched Git tree. - - The following input types are still subject to change: - - - `"path"` - - `"github"` - - `"gitlab"` - - `"sourcehut"` - - `"mercurial"` - - *input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references). + > This evaluates to the following attribute set: + > + > ```nix + > { + > lastModified = 1686503798; + > lastModifiedDate = "20230611171638"; + > narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc="; + > outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source"; + > rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; + > shortRev = "ae2e6b3"; + > } + > ``` - > **Example** - > - > Fetch a GitHub repository using the attribute set representation: - > - > ```nix - > builtins.fetchTree { - > type = "github"; - > owner = "NixOS"; - > repo = "nixpkgs"; - > rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; - > } - > ``` - > - > This evaluates to the following attribute set: - > - > ```nix - > { - > lastModified = 1686503798; - > lastModifiedDate = "20230611171638"; - > narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc="; - > outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source"; - > rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; - > shortRev = "ae2e6b3"; - > } - > ``` + > **Example** + > + > Fetch the same GitHub repository using the URL-like syntax: + > + > ```nix + > builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd" + > ``` + )"); - > **Example** - > - > Fetch the same GitHub repository using the URL-like syntax: - > - > ```nix - > builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd" - > ``` - )", + return doc; + }(), .fun = prim_fetchTree, }); @@ -569,14 +468,18 @@ static void fetch( auto storePath = unpack ? fetchToStore( state.fetchSettings, *state.store, - fetchers::downloadTarball(state.store, state.fetchSettings, *url), + fetchers::downloadTarball(*state.store, state.fetchSettings, *url), FetchMode::Copy, name) - : fetchers::downloadFile(state.store, state.fetchSettings, *url, name).storePath; + : fetchers::downloadFile(*state.store, state.fetchSettings, *url, name).storePath; if (expectedHash) { auto hash = unpack ? state.store->queryPathInfo(storePath)->narHash - : hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath)); + : hashPath( + {state.store->requireStoreObjectAccessor(storePath)}, + FileSerialisationMethod::Flat, + HashAlgorithm::SHA256) + .hash; if (hash != *expectedHash) { state .error( diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index d2f91a75b63..562ff3d1497 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -1,5 +1,6 @@ #include "nix/expr/primops.hh" #include "nix/expr/eval-inline.hh" +#include "nix/expr/static-string-data.hh" #include "expr-config-private.hh" @@ -92,7 +93,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va std::istringstream tomlStream(std::string{toml}); - auto visit = [&](auto & self, Value & v, toml::value t) -> void { + auto visit = [&](this auto & self, Value & v, toml::value t) -> void { switch (t.type()) { case toml::value_t::table: { auto table = toml::get(t); @@ -100,7 +101,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va for (auto & elem : table) { forceNoNullByte(elem.first); - self(self, attrs.alloc(elem.first), elem.second); + self(attrs.alloc(elem.first), elem.second); } v.mkAttrs(attrs); @@ -110,7 +111,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va auto list = state.buildList(array.size()); for (const auto & [n, v] : enumerate(list)) - self(self, *(v = state.allocValue()), array[n]); + self(*(v = state.allocValue()), array[n]); v.mkList(list); } break; case toml::value_t::boolean: @@ -125,7 +126,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va case toml::value_t::string: { auto s = toml::get(t); forceNoNullByte(s); - v.mkString(s); + v.mkString(s, state.mem); } break; case toml::value_t::local_datetime: case toml::value_t::offset_datetime: @@ -136,12 +137,12 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va normalizeDatetimeFormat(t); #endif auto attrs = state.buildBindings(2); - attrs.alloc("_type").mkStringNoCopy("timestamp"); + attrs.alloc("_type").mkStringNoCopy("timestamp"_sds); std::ostringstream s; s << t; auto str = s.view(); forceNoNullByte(str); - attrs.alloc("value").mkString(str); + attrs.alloc("value").mkString(str, state.mem); v.mkAttrs(attrs); } else { throw std::runtime_error("Dates and times are not supported"); @@ -155,7 +156,6 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va try { visit( - visit, val, toml::parse( tomlStream, diff --git a/src/libexpr/symbol-table.cc b/src/libexpr/symbol-table.cc index 1e0d31997f2..052c7257037 100644 --- a/src/libexpr/symbol-table.cc +++ b/src/libexpr/symbol-table.cc @@ -49,12 +49,13 @@ SymbolStr::SymbolStr(const SymbolStr::Key & key) auto id = key.arena.allocate(size); auto v = (SymbolValue *) (const_cast(key.arena.data) + id); - auto s = (char *) (v + 1); - memcpy(s, key.s.data(), key.s.size()); - s[key.s.size()] = 0; + auto s = (StringData *) (v + 1); + s->size_ = key.s.size(); + std::memcpy(s->data_, key.s.data(), key.s.size()); + s->data_[key.s.size()] = '\0'; - v->mkStringNoCopy(s, nullptr); + v->mkStringNoCopy(*s); this->s = v; } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index d1cdcccf12c..45ae57d7977 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -48,11 +48,11 @@ json printValueAsJSON( if (strict && state.executor->enabled && !Executor::amWorkerThread) parallelForceDeep(state, v, pos); - std::function recurse; - - recurse = [&](json & res, Value & v, PosIdx pos) { + auto recurse = [&](this const auto & recurse, json & res, Value & v, PosIdx pos) -> void { checkInterrupt(); + auto _level = state.addCallDepth(pos); + if (strict) state.forceValue(v, pos); @@ -68,7 +68,7 @@ json printValueAsJSON( case nString: { copyContext(v, context); - res = v.c_str(); + res = v.string_view(); break; } diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc index 74529d5c0c6..21de85a1717 100644 --- a/src/libexpr/value-to-xml.cc +++ b/src/libexpr/value-to-xml.cc @@ -82,7 +82,7 @@ static void printValueAsXML( case nString: /* !!! show the context? */ copyContext(v, context); - doc.writeEmptyElement("string", singletonAttrs("value", v.c_str())); + doc.writeEmptyElement("string", singletonAttrs("value", v.string_view())); break; case nPath: @@ -102,14 +102,14 @@ static void printValueAsXML( if (strict) state.forceValue(*a->value, a->pos); if (a->value->type() == nString) - xmlAttrs["drvPath"] = drvPath = a->value->c_str(); + xmlAttrs["drvPath"] = drvPath = a->value->string_view(); } if (auto a = v.attrs()->get(state.s.outPath)) { if (strict) state.forceValue(*a->value, a->pos); if (a->value->type() == nString) - xmlAttrs["outPath"] = a->value->c_str(); + xmlAttrs["outPath"] = a->value->string_view(); } XMLOpenElement _(doc, "derivation", xmlAttrs); @@ -145,14 +145,14 @@ static void printValueAsXML( posToXML(state, xmlAttrs, state.positions[v.lambda().fun->pos]); XMLOpenElement _(doc, "function", xmlAttrs); - if (v.lambda().fun->hasFormals()) { + if (auto formals = v.lambda().fun->getFormals()) { XMLAttrs attrs; if (v.lambda().fun->arg) attrs["name"] = state.symbols[v.lambda().fun->arg]; - if (v.lambda().fun->formals->ellipsis) + if (formals->ellipsis) attrs["ellipsis"] = "1"; XMLOpenElement _(doc, "attrspat", attrs); - for (auto & i : v.lambda().fun->formals->lexicographicOrder(state.symbols)) + for (auto & i : formals->lexicographicOrder(state.symbols)) doc.writeEmptyElement("attr", singletonAttrs("name", state.symbols[i.name])); } else doc.writeEmptyElement("varpat", singletonAttrs("name", state.symbols[v.lambda().fun->arg])); diff --git a/src/libexpr/value/context.cc b/src/libexpr/value/context.cc index d0c140ef795..a06d79ddebf 100644 --- a/src/libexpr/value/context.cc +++ b/src/libexpr/value/context.cc @@ -9,8 +9,7 @@ NixStringContextElem NixStringContextElem::parse(std::string_view s0, const Expe { std::string_view s = s0; - std::function parseRest; - parseRest = [&]() -> SingleDerivedPath { + auto parseRest = [&](this auto & parseRest) -> SingleDerivedPath { // Case on whether there is a '!' size_t index = s.find("!"); if (index == std::string_view::npos) { diff --git a/src/libfetchers-c/meson.build b/src/libfetchers-c/meson.build index 3761b0df23b..db415d9173e 100644 --- a/src/libfetchers-c/meson.build +++ b/src/libfetchers-c/meson.build @@ -32,7 +32,6 @@ add_project_arguments( ) subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_fetchers.cc', diff --git a/src/libfetchers-tests/git-utils.cc b/src/libfetchers-tests/git-utils.cc index c20c494142e..0b21fd0c67d 100644 --- a/src/libfetchers-tests/git-utils.cc +++ b/src/libfetchers-tests/git-utils.cc @@ -48,7 +48,7 @@ class GitUtilsTest : public ::testing::Test ref openRepo() { - return GitRepo::openRepo(tmpDir, true, false); + return GitRepo::openRepo(tmpDir, {.create = true}); } std::string getRepoName() const diff --git a/src/libfetchers-tests/git.cc b/src/libfetchers-tests/git.cc index e8092b86cad..abc3dd74c5c 100644 --- a/src/libfetchers-tests/git.cc +++ b/src/libfetchers-tests/git.cc @@ -196,7 +196,7 @@ TEST_F(GitTest, submodulePeriodSupport) {"ref", "main"}, }); - auto [accessor, i] = input.getAccessor(settings, store); + auto [accessor, i] = input.getAccessor(settings, *store); ASSERT_EQ(accessor->readFile(CanonPath("deps/sub/lib.txt")), "hello from submodule\n"); } diff --git a/src/libfetchers-tests/input.cc b/src/libfetchers-tests/input.cc new file mode 100644 index 00000000000..faff55f2c2d --- /dev/null +++ b/src/libfetchers-tests/input.cc @@ -0,0 +1,61 @@ +#include "nix/fetchers/fetch-settings.hh" +#include "nix/fetchers/attrs.hh" +#include "nix/fetchers/fetchers.hh" + +#include + +#include + +namespace nix { + +using fetchers::Attr; + +struct InputFromAttrsTestCase +{ + fetchers::Attrs attrs; + std::string expectedUrl; + std::string description; + fetchers::Attrs expectedAttrs = attrs; +}; + +class InputFromAttrsTest : public ::testing::WithParamInterface, public ::testing::Test +{}; + +TEST_P(InputFromAttrsTest, attrsAreCorrectAndRoundTrips) +{ + fetchers::Settings fetchSettings; + + const auto & testCase = GetParam(); + + auto input = fetchers::Input::fromAttrs(fetchSettings, fetchers::Attrs(testCase.attrs)); + + EXPECT_EQ(input.toAttrs(), testCase.expectedAttrs); + EXPECT_EQ(input.toURLString(), testCase.expectedUrl); + + auto input2 = fetchers::Input::fromAttrs(fetchSettings, input.toAttrs()); + EXPECT_EQ(input, input2); + EXPECT_EQ(input.toAttrs(), input2.toAttrs()); +} + +INSTANTIATE_TEST_SUITE_P( + InputFromAttrs, + InputFromAttrsTest, + ::testing::Values( + // Test for issue #14429. + InputFromAttrsTestCase{ + .attrs = + { + {"url", Attr("git+ssh://git@github.com/NixOS/nixpkgs")}, + {"type", Attr("git")}, + }, + .expectedUrl = "git+ssh://git@github.com/NixOS/nixpkgs", + .description = "strips_git_plus_prefix", + .expectedAttrs = + { + {"url", Attr("ssh://git@github.com/NixOS/nixpkgs")}, + {"type", Attr("git")}, + }, + }), + [](const ::testing::TestParamInfo & info) { return info.param.description; }); + +} // namespace nix diff --git a/src/libfetchers-tests/meson.build b/src/libfetchers-tests/meson.build index 858d7f3af9a..6bccdb05c9a 100644 --- a/src/libfetchers-tests/meson.build +++ b/src/libfetchers-tests/meson.build @@ -37,12 +37,12 @@ libgit2 = dependency('libgit2') deps_private += libgit2 subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'access-tokens.cc', 'git-utils.cc', 'git.cc', + 'input.cc', 'nix_api_fetchers.cc', 'public-key.cc', ) @@ -64,7 +64,7 @@ this_exe = executable( test( meson.project_name(), this_exe, - env : asan_test_options_env + { + env : { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, protocol : 'gtest', diff --git a/src/libfetchers-tests/package.nix b/src/libfetchers-tests/package.nix index 8e82430d7d9..78061872582 100644 --- a/src/libfetchers-tests/package.nix +++ b/src/libfetchers-tests/package.nix @@ -61,7 +61,6 @@ mkMesonExecutable (finalAttrs: { buildInputs = [ writableTmpDirAsHomeHook ]; } '' - export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0 export _NIX_TEST_UNIT_DATA=${resolvePath ./data} ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out diff --git a/src/libfetchers/builtin.cc b/src/libfetchers/builtin.cc index a2df93939a6..44b3baf0b1a 100644 --- a/src/libfetchers/builtin.cc +++ b/src/libfetchers/builtin.cc @@ -39,7 +39,7 @@ static void builtinFetchTree(const BuiltinBuilderContext & ctx) // FIXME: disable use of the git/tarball cache - auto input = Input::fromAttrs(myFetchSettings, jsonToAttrs(ctx.drv.structuredAttrs->structuredAttrs["input"])); + auto input = Input::fromAttrs(myFetchSettings, jsonToAttrs(ctx.drv.structuredAttrs->structuredAttrs.at("input"))); std::cerr << fmt("fetching '%s'...\n", input.to_string()); @@ -48,7 +48,7 @@ static void builtinFetchTree(const BuiltinBuilderContext & ctx) Nix's daemon so we can use the real store? */ auto tmpStore = openStore(ctx.tmpDirInSandbox + "/nix"); - auto [accessor, lockedInput] = input.getAccessor(myFetchSettings, tmpStore); + auto [accessor, lockedInput] = input.getAccessor(myFetchSettings, *tmpStore); auto source = sinkToSource([&](Sink & sink) { accessor->dumpPath(CanonPath::root, sink); }); diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc index 87fe3391c2c..1db3ed8dc89 100644 --- a/src/libfetchers/cache.cc +++ b/src/libfetchers/cache.cc @@ -38,7 +38,7 @@ struct CacheImpl : Cache { auto state(_state.lock()); - auto dbPath = getCacheDir() + "/fetcher-cache-v4.sqlite"; + auto dbPath = (getCacheDir() / "fetcher-cache-v4.sqlite").string(); createDirs(dirOf(dbPath)); state->db = SQLite(dbPath); diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index abd0e294094..48c75df4f64 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -5,6 +5,7 @@ #include "nix/util/json-utils.hh" #include "nix/fetchers/fetch-settings.hh" #include "nix/fetchers/fetch-to-store.hh" +#include "nix/util/url.hh" #include "nix/util/forwarding-source-accessor.hh" #include "nix/util/archive.hh" @@ -27,18 +28,9 @@ void registerInputScheme(std::shared_ptr && inputScheme) throw Error("Input scheme with name %s already registered", schemeName); } -nlohmann::json dumpRegisterInputSchemeInfo() +const InputSchemeMap & getAllInputSchemes() { - using nlohmann::json; - - auto res = json::object(); - - for (auto & [name, scheme] : inputSchemes()) { - auto & r = res[name] = json::object(); - r["allowedAttrs"] = scheme->allowedAttrs(); - } - - return res; + return inputSchemes(); } Input Input::fromURL(const Settings & settings, const std::string & url, bool requireTree) @@ -67,6 +59,12 @@ Input Input::fromURL(const Settings & settings, const ParsedURL & url, bool requ } } + // Provide a helpful hint when user tries file+git instead of git+file + auto parsedScheme = parseUrlScheme(url.scheme); + if (parsedScheme.application == "file" && parsedScheme.transport == "git") { + throw Error("input '%s' is unsupported; did you mean 'git+file' instead of 'file+git'?", url); + } + throw Error("input '%s' is unsupported", url); } @@ -114,7 +112,7 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs) return std::move(*res); } -std::optional Input::getFingerprint(ref store) const +std::optional Input::getFingerprint(Store & store) const { if (!scheme) return std::nullopt; @@ -199,7 +197,7 @@ bool Input::contains(const Input & other) const } // FIXME: remove -std::tuple, Input> Input::fetchToStore(const Settings & settings, ref store) const +std::tuple, Input> Input::fetchToStore(const Settings & settings, Store & store) const { if (!scheme) throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs())); @@ -207,9 +205,9 @@ std::tuple, Input> Input::fetchToStore(const Sett try { auto [accessor, result] = getAccessorUnchecked(settings, store); - auto storePath = nix::fetchToStore(settings, *store, SourcePath(accessor), FetchMode::Copy, result.getName()); + auto storePath = nix::fetchToStore(settings, store, SourcePath(accessor), FetchMode::Copy, result.getName()); - auto narHash = store->queryPathInfo(storePath)->narHash; + auto narHash = store.queryPathInfo(storePath)->narHash; result.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); result.attrs.insert_or_assign("__final", Explicit(true)); @@ -282,7 +280,7 @@ void Input::checkLocks(Input specified, Input & result) } } -std::pair, Input> Input::getAccessor(const Settings & settings, ref store) const +std::pair, Input> Input::getAccessor(const Settings & settings, Store & store) const { try { auto [accessor, result] = getAccessorUnchecked(settings, store); @@ -313,7 +311,7 @@ struct SubstitutedSourceAccessor : ForwardingSourceAccessor } }; -std::pair, Input> Input::getAccessorUnchecked(const Settings & settings, ref store) const +std::pair, Input> Input::getAccessorUnchecked(const Settings & settings, Store & store) const { // FIXME: cache the accessor @@ -322,10 +320,10 @@ std::pair, Input> Input::getAccessorUnchecked(const Settings std::optional storePath; if (isFinal() && getNarHash()) - storePath = computeStorePath(*store); + storePath = computeStorePath(store); auto makeStoreAccessor = [&]() -> std::pair, Input> { - auto accessor = make_ref(ref{store->getFSAccessor(*storePath)}); + auto accessor = make_ref(store.requireStoreObjectAccessor(*storePath)); // FIXME: use the NAR hash for fingerprinting Git trees that have a .gitattributes file, since we don't know if // we used `git archive` or libgit2 to fetch it. @@ -340,7 +338,7 @@ std::pair, Input> Input::getAccessorUnchecked(const Settings if (accessor->fingerprint) { settings.getCache()->upsert( makeSourcePathToHashCacheKey(*accessor->fingerprint, ContentAddressMethod::Raw::NixArchive, "/"), - {{"hash", store->queryPathInfo(*storePath)->narHash.to_string(HashFormat::SRI, true)}}); + {{"hash", store.queryPathInfo(*storePath)->narHash.to_string(HashFormat::SRI, true)}}); } // FIXME: ideally we would use the `showPath()` of the @@ -354,8 +352,8 @@ std::pair, Input> Input::getAccessorUnchecked(const Settings reuse it. We only do this for final inputs, since otherwise there is a risk that we don't return the same attributes (like `lastModified`) that the "real" fetcher would return. */ - if (storePath && store->isValidPath(*storePath)) { - debug("using input '%s' in '%s'", to_string(), store->printStorePath(*storePath)); + if (storePath && store.isValidPath(*storePath)) { + debug("using input '%s' in '%s'", to_string(), store.printStorePath(*storePath)); return makeStoreAccessor(); } @@ -372,7 +370,7 @@ std::pair, Input> Input::getAccessorUnchecked(const Settings if (storePath) { // Fall back to substitution. try { - store->ensurePath(*storePath); + store.ensurePath(*storePath); warn( "Successfully substituted input '%s' after failing to fetch it from its original location: %s", to_string(), @@ -396,7 +394,7 @@ Input Input::applyOverrides(std::optional ref, std::optional return scheme->applyOverrides(*this, ref, rev); } -void Input::clone(const Settings & settings, ref store, const std::filesystem::path & destDir) const +void Input::clone(const Settings & settings, Store & store, const std::filesystem::path & destDir) const { assert(scheme); scheme->clone(settings, store, *this, destDir); @@ -513,7 +511,7 @@ void InputScheme::putFile( } void InputScheme::clone( - const Settings & settings, ref store, const Input & input, const std::filesystem::path & destDir) const + const Settings & settings, Store & store, const Input & input, const std::filesystem::path & destDir) const { if (std::filesystem::exists(destDir)) throw Error("cannot clone into existing path %s", destDir); @@ -522,9 +520,9 @@ void InputScheme::clone( Activity act(*logger, lvlTalkative, actUnknown, fmt("copying '%s' to %s...", input2.to_string(), destDir)); - auto source = sinkToSource([&](Sink & sink) { accessor->dumpPath(CanonPath::root, sink); }); - - restorePath(destDir, *source); + RestoreSink sink(/*startFsync=*/false); + sink.dstPath = destDir; + copyRecursive(*accessor, CanonPath::root, sink, CanonPath::root); } std::optional InputScheme::experimentalFeature() const @@ -548,10 +546,11 @@ using namespace nix; fetchers::PublicKey adl_serializer::from_json(const json & json) { fetchers::PublicKey res = {}; - if (auto type = optionalValueAt(json, "type")) + auto & obj = getObject(json); + if (auto * type = optionalValueAt(obj, "type")) res.type = getString(*type); - res.key = getString(valueAt(json, "key")); + res.key = getString(valueAt(obj, "key")); return res; } diff --git a/src/libfetchers/git-lfs-fetch.cc b/src/libfetchers/git-lfs-fetch.cc index 9688daa4a71..e2b2c2e7dda 100644 --- a/src/libfetchers/git-lfs-fetch.cc +++ b/src/libfetchers/git-lfs-fetch.cc @@ -209,7 +209,7 @@ std::vector Fetch::fetchUrls(const std::vector & pointe auto url = api.endpoint + "/objects/batch"; const auto & authHeader = api.authHeader; FileTransferRequest request(parseURL(url)); - request.post = true; + request.method = HttpMethod::Post; Headers headers; if (authHeader.has_value()) headers.push_back({"Authorization", *authHeader}); @@ -219,7 +219,9 @@ std::vector Fetch::fetchUrls(const std::vector & pointe nlohmann::json oidList = pointerToPayload(pointers); nlohmann::json data = {{"operation", "download"}}; data["objects"] = oidList; - request.data = data.dump(); + auto payload = data.dump(); + StringSource source{payload}; + request.data = {source}; FileTransferResult result = getFileTransfer()->upload(request); auto responseString = result.data; @@ -266,10 +268,10 @@ void Fetch::fetch( return; } - Path cacheDir = getCacheDir() + "/git-lfs"; + std::filesystem::path cacheDir = getCacheDir() / "git-lfs"; std::string key = hashString(HashAlgorithm::SHA256, pointerFilePath.rel()).to_string(HashFormat::Base16, false) + "/" + pointer->oid; - Path cachePath = cacheDir + "/" + key; + std::filesystem::path cachePath = cacheDir / key; if (pathExists(cachePath)) { debug("using cache entry %s -> %s", key, cachePath); sink(readFile(cachePath)); @@ -300,8 +302,8 @@ void Fetch::fetch( downloadToSink(ourl, authHeader, sink, sha256, size); debug("creating cache entry %s -> %s", key, cachePath); - if (!pathExists(dirOf(cachePath))) - createDirs(dirOf(cachePath)); + if (!pathExists(cachePath.parent_path())) + createDirs(cachePath.parent_path()); writeFile(cachePath, sink.s); debug("%s fetched with git-lfs", pointerFilePath); diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 02b9f68928e..e0bcfe92a9a 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -9,6 +9,7 @@ #include "nix/util/users.hh" #include "nix/util/fs-sink.hh" #include "nix/util/sync.hh" +#include "nix/util/util.hh" #include "nix/util/thread-pool.hh" #include "nix/util/pool.hh" #include "nix/util/executable-path.hh" @@ -24,6 +25,7 @@ #include #include #include +#include #include #include #include @@ -31,6 +33,7 @@ #include #include #include +#include #include #include #include @@ -89,7 +92,7 @@ typedef std::unique_ptr> ObjectDb; typedef std::unique_ptr> PackBuilder; typedef std::unique_ptr> Indexer; -Hash toHash(const git_oid & oid) +static Hash toHash(const git_oid & oid) { #ifdef GIT_EXPERIMENTAL_SHA256 assert(oid.type == GIT_OID_SHA1); @@ -108,7 +111,7 @@ static void initLibGit2() }); } -git_oid hashToOID(const Hash & hash) +static git_oid hashToOID(const Hash & hash) { git_oid oid; if (git_oid_fromstr(&oid, hash.gitRev().c_str())) @@ -116,7 +119,7 @@ git_oid hashToOID(const Hash & hash) return oid; } -Object lookupObject(git_repository * repo, const git_oid & oid, git_object_t type = GIT_OBJECT_ANY) +static Object lookupObject(git_repository * repo, const git_oid & oid, git_object_t type = GIT_OBJECT_ANY) { Object obj; if (git_object_lookup(Setter(obj), repo, &oid, type)) { @@ -127,7 +130,7 @@ Object lookupObject(git_repository * repo, const git_oid & oid, git_object_t typ } template -T peelObject(git_object * obj, git_object_t type) +static T peelObject(git_object * obj, git_object_t type) { T obj2; if (git_object_peel((git_object **) (typename T::pointer *) Setter(obj2), obj, type)) { @@ -138,7 +141,7 @@ T peelObject(git_object * obj, git_object_t type) } template -T dupObject(typename T::pointer obj) +static T dupObject(typename T::pointer obj) { T obj2; if (git_object_dup((git_object **) (typename T::pointer *) Setter(obj2), (git_object *) obj)) @@ -201,16 +204,19 @@ static git_packbuilder_progress PACKBUILDER_PROGRESS_CHECK_INTERRUPT = &packBuil } // extern "C" -static void initRepoAtomically(std::filesystem::path & path, bool bare) +static void initRepoAtomically(std::filesystem::path & path, GitRepo::Options options) { if (pathExists(path.string())) return; - Path tmpDir = createTempDir(os_string_to_string(PathViewNG{std::filesystem::path(path).parent_path()})); + if (!options.create) + throw Error("Git repository %s does not exist.", path); + + std::filesystem::path tmpDir = createTempDir(path.parent_path()); AutoDelete delTmpDir(tmpDir, true); Repository tmpRepo; - if (git_repository_init(Setter(tmpRepo), tmpDir.c_str(), bare)) + if (git_repository_init(Setter(tmpRepo), tmpDir.string().c_str(), options.bare)) throw Error("creating Git repository %s: %s", path, git_error_last()->message); try { std::filesystem::rename(tmpDir, path); @@ -232,7 +238,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this /** Location of the repository on disk. */ std::filesystem::path path; - bool bare; + Options options; /** * libgit2 repository. Note that new objects are not written to disk, @@ -245,32 +251,57 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this * In-memory object store for efficient batched writing to packfiles. * Owned by `repo`. */ - git_odb_backend * mempack_backend; + git_odb_backend * mempackBackend = nullptr; - bool useMempack; + /** + * On-disk packfile object store. + * Owned by `repo`. + */ + git_odb_backend * packBackend = nullptr; - GitRepoImpl(std::filesystem::path _path, bool create, bool bare, bool useMempack = false) + GitRepoImpl(std::filesystem::path _path, Options _options) : path(std::move(_path)) - , bare(bare) - , useMempack(useMempack) + , options(_options) { initLibGit2(); - initRepoAtomically(path, bare); + initRepoAtomically(path, options); if (git_repository_open(Setter(repo), path.string().c_str())) throw Error("opening Git repository %s: %s", path, git_error_last()->message); - if (useMempack) { - ObjectDb odb; + ObjectDb odb; + if (options.packfilesOnly) { + /* Create a fresh object database because by default the repo also + loose object backends. We are not using any of those for the + tarball cache, but libgit2 still does a bunch of unnecessary + syscalls that always fail with ENOENT. NOTE: We are only creating + a libgit2 object here and not modifying the repo. Think of this as + enabling the specific backend. + */ + + if (git_odb_new(Setter(odb))) + throw Error("creating Git object database: %s", git_error_last()->message); + + if (git_odb_backend_pack(&packBackend, (path / "objects").string().c_str())) + throw Error("creating pack backend: %s", git_error_last()->message); + + if (git_odb_add_backend(odb.get(), packBackend, 1)) + throw Error("adding pack backend to Git object database: %s", git_error_last()->message); + } else { if (git_repository_odb(Setter(odb), repo.get())) throw Error("getting Git object database: %s", git_error_last()->message); + } - // mempack_backend will be owned by the repository, so we are not expected to free it ourselves. - if (git_mempack_new(&mempack_backend)) - throw Error("creating mempack backend: %s", git_error_last()->message); + // mempack_backend will be owned by the repository, so we are not expected to free it ourselves. + if (git_mempack_new(&mempackBackend)) + throw Error("creating mempack backend: %s", git_error_last()->message); - if (git_odb_add_backend(odb.get(), mempack_backend, 999)) - throw Error("adding mempack backend to Git object database: %s", git_error_last()->message); + if (git_odb_add_backend(odb.get(), mempackBackend, 999)) + throw Error("adding mempack backend to Git object database: %s", git_error_last()->message); + + if (options.packfilesOnly) { + if (git_repository_set_odb(repo.get(), odb.get())) + throw Error("setting Git object database: %s", git_error_last()->message); } } @@ -281,9 +312,6 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this void flush() override { - if (!useMempack) - return; - checkInterrupt(); git_buf buf = GIT_BUF_INIT; @@ -295,7 +323,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this git_packbuilder_set_threads(packBuilder.get(), 0 /* autodetect */); packBuilderContext.handleException( - "preparing packfile", git_mempack_write_thin_pack(mempack_backend, packBuilder.get())); + "preparing packfile", git_mempack_write_thin_pack(mempackBackend, packBuilder.get())); checkInterrupt(); packBuilderContext.handleException("writing packfile", git_packbuilder_write_buf(&buf, packBuilder.get())); checkInterrupt(); @@ -328,7 +356,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this if (git_indexer_commit(indexer.get(), &stats)) throw Error("committing git packfile index: %s", git_error_last()->message); - if (git_mempack_reset(mempack_backend)) + if (git_mempack_reset(mempackBackend)) throw Error("resetting git mempack backend: %s", git_error_last()->message); checkInterrupt(); @@ -341,10 +369,9 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this Pool getPool() { // TODO: as an optimization, it would be nice to include `this` in the pool. - return Pool( - std::numeric_limits::max(), [this, useMempack(useMempack)]() -> ref { - return make_ref(path, false, bare, useMempack); - }); + return Pool(std::numeric_limits::max(), [this]() -> ref { + return make_ref(path, options); + }); } uint64_t getRevCount(const Hash & rev) override @@ -562,27 +589,6 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this ref getFileSystemObjectSink() override; - static int sidebandProgressCallback(const char * str, int len, void * payload) - { - auto act = (Activity *) payload; - act->result(resFetchStatus, trim(std::string_view(str, len))); - return getInterrupted() ? -1 : 0; - } - - static int transferProgressCallback(const git_indexer_progress * stats, void * payload) - { - auto act = (Activity *) payload; - act->result( - resFetchStatus, - fmt("%d/%d objects received, %d/%d deltas indexed, %.1f MiB", - stats->received_objects, - stats->total_objects, - stats->indexed_deltas, - stats->total_deltas, - stats->received_bytes / (1024.0 * 1024.0))); - return getInterrupted() ? -1 : 0; - } - void fetch(const std::string & url, const std::string & refspec, bool shallow) override { Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url)); @@ -618,8 +624,6 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this // with "could not read from remote repository". opts.depth = shallow && parseURL(url).scheme != "ssh" ? 1 : GIT_FETCH_DEPTH_FULL; opts.callbacks.payload = &act; - opts.callbacks.sideband_progress = sidebandProgressCallback; - opts.callbacks.transfer_progress = transferProgressCallback; if (git_remote_fetch(remote.get(), &refspecs2, &opts, nullptr)) throw Error("fetching '%s' from '%s': %s", refspec, url, git_error_last()->message); @@ -733,9 +737,9 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this } }; -ref GitRepo::openRepo(const std::filesystem::path & path, bool create, bool bare) +ref GitRepo::openRepo(const std::filesystem::path & path, GitRepo::Options options) { - return make_ref(path, create, bare); + return make_ref(path, options); } std::string GitAccessorOptions::makeFingerprint(const Hash & rev) const @@ -1081,18 +1085,9 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink { ref repo; - bool useMempack = -// On macOS, mempack is beneficial. -#ifdef __linux__ - false -#else - true -#endif - ; - Pool repoPool; - unsigned int concurrency = std::min(std::thread::hardware_concurrency(), 4U); + unsigned int concurrency = std::min(std::thread::hardware_concurrency(), 8U); ThreadPool workers{concurrency}; @@ -1194,16 +1189,27 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink workers.enqueue([this, path, data{std::move(crf.data)}, executable(crf.executable), id(nextId++)]() { auto repo(repoPool.get()); - git_writestream * stream = nullptr; - if (git_blob_create_from_stream(&stream, *repo, nullptr)) - throw Error("creating a blob stream object: %s", git_error_last()->message); - - if (stream->write(stream, data.data(), data.size())) - throw Error("writing a blob for tarball member '%s': %s", path, git_error_last()->message); + /* Monkey-patching the pack backend to only read the pack directory + once. Otherwise it will do a readdir for each added oid when it's + not found and that translates to ~6 syscalls. Since we are never + writing pack files until flushing we can force the odb backend to + read the directory just once. It's very convenient that the vtable is + semi-public interface and is up for grabs. + + This is purely an optimization for our use-case with a tarball cache. + libgit2 calls refresh() if the backend provides it when an oid isn't found. + We are only writing objects to a mempack (it has higher priority) and there isn't + a realistic use-case where a previously missing object would appear from thin air + on the disk (unless another process happens to be unpacking a similar tarball to + the cache at the same time, but that's a very unrealistic scenario). + */ + if (auto * backend = repo->packBackend) + backend->refresh = nullptr; git_oid oid; - if (git_blob_create_from_stream_commit(&oid, stream)) - throw Error("creating a blob object for tarball member '%s': %s", path, git_error_last()->message); + if (git_blob_create_from_buffer(&oid, *repo, data.data(), data.size())) + throw Error( + "creating a blob object for '%s' from in-memory buffer: %s", path, git_error_last()->message); auto state(_state.lock()); addNode(*state, path, Child{executable ? GIT_FILEMODE_BLOB_EXECUTABLE : GIT_FILEMODE_BLOB, oid, id}); @@ -1273,8 +1279,7 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink workers.process(); }; - if (useMempack) - doFlush(); + doFlush(); processGraph( {&root}, @@ -1306,10 +1311,11 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink node->oid = oid; }, true, - useMempack ? 1 : concurrency); + // FIXME: make this multi-threaded again? Doesn't work + // with packfiles. + 1); - if (useMempack) - doFlush(); + doFlush(); return toHash(root.oid.value()); } @@ -1394,7 +1400,8 @@ ref Settings::getTarballCache() const { auto tarballCache(_tarballCache.lock()); if (!*tarballCache) - *tarballCache = GitRepo::openRepo(std::filesystem::path(getCacheDir()) / "tarball-cache", true, true); + *tarballCache = + GitRepo::openRepo(getCacheDir() / "tarball-cache", {.create = true, .bare = true, .packfilesOnly = true}); return ref(*tarballCache); } @@ -1409,7 +1416,7 @@ GitRepo::WorkdirInfo GitRepo::getCachedWorkdirInfo(const std::filesystem::path & if (i != cache->end()) return i->second; } - auto workdirInfo = GitRepo::openRepo(path)->getWorkdirInfo(); + auto workdirInfo = GitRepo::openRepo(path, {})->getWorkdirInfo(); _cache.lock()->emplace(path, workdirInfo); return workdirInfo; } diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index ef5b19af8fe..7f33d9d8c60 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -43,10 +43,10 @@ bool isCacheFileWithinTtl(time_t now, const struct stat & st) return st.st_mtime + static_cast(settings.tarballTtl) > now; } -Path getCachePath(std::string_view key, bool shallow) +std::filesystem::path getCachePath(std::string_view key, bool shallow) { - return getCacheDir() + "/gitv3/" + hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false) - + (shallow ? "-shallow" : ""); + return getCacheDir() / "gitv3" + / (hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false) + (shallow ? "-shallow" : "")); } // Returns the name of the HEAD branch. @@ -56,13 +56,13 @@ Path getCachePath(std::string_view key, bool shallow) // // ref: refs/heads/main HEAD // ... -std::optional readHead(const Path & path) +std::optional readHead(const std::filesystem::path & path) { auto [status, output] = runProgram( RunOptions{ .program = "git", // FIXME: use 'HEAD' to avoid returning all refs - .args = {"ls-remote", "--symref", path}, + .args = {"ls-remote", "--symref", path.string()}, .isInteractive = true, }); if (status != 0) @@ -87,9 +87,9 @@ std::optional readHead(const Path & path) // Persist the HEAD ref from the remote repo in the local cached repo. bool storeCachedHead(const std::string & actualUrl, bool shallow, const std::string & headRef) { - Path cacheDir = getCachePath(actualUrl, shallow); + std::filesystem::path cacheDir = getCachePath(actualUrl, shallow); try { - runProgram("git", true, {"-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef}); + runProgram("git", true, {"-C", cacheDir.string(), "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef}); } catch (ExecError & e) { if ( #ifndef WIN32 // TODO abstract over exit status handling on Windows @@ -110,13 +110,13 @@ std::optional readHeadCached(const std::string & actualUrl, bool sh { // Create a cache path to store the branch of the HEAD ref. Append something // in front of the URL to prevent collision with the repository itself. - Path cacheDir = getCachePath(actualUrl, shallow); - Path headRefFile = cacheDir + "/HEAD"; + std::filesystem::path cacheDir = getCachePath(actualUrl, shallow); + std::filesystem::path headRefFile = cacheDir / "HEAD"; time_t now = time(0); struct stat st; std::optional cachedRef; - if (stat(headRefFile.c_str(), &st) == 0) { + if (stat(headRefFile.string().c_str(), &st) == 0) { cachedRef = readHead(cacheDir); if (cachedRef != std::nullopt && *cachedRef != gitInitialBranch && isCacheFileWithinTtl(now, st)) { debug("using cached HEAD ref '%s' for repo '%s'", *cachedRef, actualUrl); @@ -169,8 +169,6 @@ struct GitInputScheme : InputScheme return {}; auto url2(url); - if (hasPrefix(url2.scheme, "git+")) - url2.scheme = std::string(url2.scheme, 4); url2.query.clear(); Attrs attrs; @@ -197,28 +195,183 @@ struct GitInputScheme : InputScheme return "git"; } - StringSet allowedAttrs() const override + std::string schemeDescription() const override { - return { - "url", - "ref", - "rev", - "shallow", - "submodules", - "lfs", - "exportIgnore", - "lastModified", - "revCount", - "narHash", - "allRefs", - "name", - "dirtyRev", - "dirtyShortRev", - "verifyCommit", - "keytype", - "publicKey", - "publicKeys", + return stripIndentation(R"( + Fetch a Git tree and copy it to the Nix store. + This is similar to [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit). + )"); + } + + const std::map & allowedAttrs() const override + { + static const std::map attrs = { + { + "url", + { + .type = "String", + .required = true, + .doc = R"( + The URL formats supported are the same as for Git itself. + + > **Example** + > + > ```nix + > fetchTree { + > type = "git"; + > url = "git@github.com:NixOS/nixpkgs.git"; + > } + > ``` + + > **Note** + > + > If the URL points to a local directory, and no `ref` or `rev` is given, Nix only considers files added to the Git index, as listed by `git ls-files` but uses the *current file contents* of the Git working directory. + )", + }, + }, + { + "ref", + { + .type = "String", + .required = false, + .doc = R"( + By default, this has no effect. This becomes relevant only once `shallow` cloning is disabled. + + A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name. + + Default: `"HEAD"` + )", + }, + }, + { + "rev", + { + .type = "String", + .required = false, + .doc = R"( + A Git revision; a commit hash. + + Default: the tip of `ref` + )", + }, + }, + { + "shallow", + { + .type = "Bool", + .required = false, + .doc = R"( + Make a shallow clone when fetching the Git tree. + When this is enabled, the options `ref` and `allRefs` have no effect anymore. + + Default: `true` + )", + }, + }, + { + "submodules", + { + .type = "Bool", + .required = false, + .doc = R"( + Also fetch submodules if available. + + Default: `false` + )", + }, + }, + { + "lfs", + { + .type = "Bool", + .required = false, + .doc = R"( + Fetch any [Git LFS](https://git-lfs.com/) files. + + Default: `false` + )", + }, + }, + { + "exportIgnore", + {}, + }, + { + "lastModified", + { + .type = "Integer", + .required = false, + .doc = R"( + Unix timestamp of the fetched commit. + + If set, pass through the value to the output attribute set. + Otherwise, generated from the fetched Git tree. + )", + }, + }, + { + "revCount", + { + .type = "Integer", + .required = false, + .doc = R"( + Number of revisions in the history of the Git repository before the fetched commit. + + If set, pass through the value to the output attribute set. + Otherwise, generated from the fetched Git tree. + )", + }, + }, + { + "narHash", + {}, + }, + { + "allRefs", + { + .type = "Bool", + .required = false, + .doc = R"( + By default, this has no effect. This becomes relevant only once `shallow` cloning is disabled. + + Whether to fetch all references (eg. branches and tags) of the repository. + With this argument being true, it's possible to load a `rev` from *any* `ref`. + (Without setting this option, only `rev`s from the specified `ref` are supported). + + Default: `false` + )", + }, + }, + { + "name", + {}, + }, + { + "dirtyRev", + {}, + }, + { + "dirtyShortRev", + {}, + }, + { + "verifyCommit", + {}, + }, + { + "keytype", + {}, + }, + { + "publicKey", + {}, + }, + { + "publicKeys", + {}, + }, }; + return attrs; } std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override @@ -283,7 +436,7 @@ struct GitInputScheme : InputScheme return res; } - void clone(const Settings & settings, ref store, const Input & input, const std::filesystem::path & destDir) + void clone(const Settings & settings, Store & store, const Input & input, const std::filesystem::path & destDir) const override { auto repoInfo = getRepoInfo(input); @@ -300,7 +453,7 @@ struct GitInputScheme : InputScheme if (input.getRev()) throw UnimplementedError("cloning a specific revision is not implemented"); - args.push_back(destDir); + args.push_back(destDir.string()); runProgram("git", true, args, {}, true); } @@ -487,11 +640,6 @@ struct GitInputScheme : InputScheme url); } - // If we don't check here for the path existence, then we can give libgit2 any directory - // and it will initialize them as git directories. - if (!pathExists(path)) { - throw Error("The path '%s' does not exist.", path); - } repoInfo.location = std::filesystem::absolute(path); } else { if (url.scheme == "file") @@ -553,7 +701,7 @@ struct GitInputScheme : InputScheme if (auto res = cache->lookup(key)) return getIntAttr(*res, "lastModified"); - auto lastModified = GitRepo::openRepo(repoDir)->getLastModified(rev); + auto lastModified = GitRepo::openRepo(repoDir, {})->getLastModified(rev); cache->upsert(key, {{"lastModified", lastModified}}); @@ -576,7 +724,7 @@ struct GitInputScheme : InputScheme Activity act( *logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.locationToArg())); - auto revCount = GitRepo::openRepo(repoDir)->getRevCount(rev); + auto revCount = GitRepo::openRepo(repoDir, {})->getRevCount(rev); cache->upsert(key, Attrs{{"revCount", revCount}}); @@ -587,7 +735,7 @@ struct GitInputScheme : InputScheme { auto head = std::visit( overloaded{ - [&](const std::filesystem::path & path) { return GitRepo::openRepo(path)->getWorkdirRef(); }, + [&](const std::filesystem::path & path) { return GitRepo::openRepo(path, {})->getWorkdirRef(); }, [&](const ParsedURL & url) { return readHeadCached(url.to_string(), shallow); }}, repoInfo.location); if (!head) { @@ -672,7 +820,7 @@ struct GitInputScheme : InputScheme PathFilter filter = [&](const Path & path) { return baseNameOf(path) != ".git"; }; return store.addToStore( "source", - {getFSSourceAccessor(), CanonPath(tmpDir)}, + {getFSSourceAccessor(), CanonPath(tmpDir.string())}, ContentAddressMethod::Raw::NixArchive, HashAlgorithm::SHA256, {}, @@ -691,7 +839,7 @@ struct GitInputScheme : InputScheme unpackTarfile(*source, tmpDir); - return store.addToStore("source", {getFSSourceAccessor(), CanonPath(tmpDir)}); + return store.addToStore("source", {getFSSourceAccessor(), CanonPath(tmpDir.string())}); }(); auto accessor = store.getFSAccessor(storePath); @@ -702,8 +850,7 @@ struct GitInputScheme : InputScheme } std::pair, Input> - getAccessorFromCommit(const Settings & settings, ref store, RepoInfo & repoInfo, Input && input) const - + getAccessorFromCommit(const Settings & settings, Store & store, RepoInfo & repoInfo, Input && input) const { assert(!repoInfo.workdirInfo.isDirty); @@ -719,7 +866,7 @@ struct GitInputScheme : InputScheme if (auto repoPath = repoInfo.getPath()) { repoDir = *repoPath; if (!input.getRev()) - input.attrs.insert_or_assign("rev", GitRepo::openRepo(repoDir)->resolveRef(ref).gitRev()); + input.attrs.insert_or_assign("rev", GitRepo::openRepo(repoDir, {})->resolveRef(ref).gitRev()); } else { auto rev = input.getRev(); auto repoUrl = std::get(repoInfo.location); @@ -731,7 +878,7 @@ struct GitInputScheme : InputScheme * repo instead. */ std::filesystem::path cacheDirNonShallow = getCachePath(repoUrl.to_string(), false); if (rev && shallow && pathExists(cacheDirNonShallow)) { - auto nonShallowRepo = GitRepo::openRepo(cacheDirNonShallow, true, true); + auto nonShallowRepo = GitRepo::openRepo(cacheDirNonShallow, {.create = true, .bare = true}); if (nonShallowRepo->hasObject(*rev)) { debug( "using non-shallow cached repo for '%s' since it contains rev '%s'", @@ -745,7 +892,7 @@ struct GitInputScheme : InputScheme std::filesystem::create_directories(cacheDir.parent_path()); PathLocks cacheDirLock({cacheDir.string()}); - auto repo = GitRepo::openRepo(cacheDir, true, true); + auto repo = GitRepo::openRepo(cacheDir, {.create = true, .bare = true}); // We need to set the origin so resolving submodule URLs works repo->setRemote("origin", repoUrl.to_string()); @@ -775,7 +922,7 @@ struct GitInputScheme : InputScheme auto fetchRef = getAllRefsAttr(input) ? "refs/*:refs/*" : input.getRev() ? input.getRev()->gitRev() : ref.compare(0, 5, "refs/") == 0 ? fmt("%1%:%1%", ref) - : ref == "HEAD" ? ref + : ref == "HEAD" ? "HEAD:HEAD" : fmt("%1%:%1%", "refs/heads/" + ref); repo->fetch(repoUrl.to_string(), fetchRef, shallow); @@ -816,7 +963,7 @@ struct GitInputScheme : InputScheme } have_rev: - auto repo = GitRepo::openRepo(repoDir); + auto repo = GitRepo::openRepo(repoDir, {}); // FIXME: check whether rev is an ancestor of ref? @@ -855,13 +1002,13 @@ struct GitInputScheme : InputScheme * as well. */ warn("Using Nix 2.19 semantics to export Git repository '%s'.", input.to_string()); auto accessorModern = accessor; - accessor = getLegacyGitAccessor(*store, repoInfo, repoDir, rev, options); + accessor = getLegacyGitAccessor(store, repoInfo, repoDir, rev, options); if (expectedNarHash) { auto narHashLegacy = - fetchToStore2(settings, *store, {accessor}, FetchMode::DryRun, input.getName()).second; + fetchToStore2(settings, store, {accessor}, FetchMode::DryRun, input.getName()).second; if (expectedNarHash != narHashLegacy) { auto narHashModern = - fetchToStore2(settings, *store, {accessorModern}, FetchMode::DryRun, input.getName()).second; + fetchToStore2(settings, store, {accessorModern}, FetchMode::DryRun, input.getName()).second; if (expectedNarHash == narHashModern) accessor = accessorModern; } @@ -870,12 +1017,12 @@ struct GitInputScheme : InputScheme /* Backward compatibility hack for locks produced by Nix < 2.20 that depend on Nix applying Git filters, * `export-ignore` or `export-subst`. Nix >= 2.20 doesn't do those, so we may get a NAR hash mismatch. If * that happens, try again using `git archive`. */ - auto narHashNew = fetchToStore2(settings, *store, {accessor}, FetchMode::DryRun, input.getName()).second; + auto narHashNew = fetchToStore2(settings, store, {accessor}, FetchMode::DryRun, input.getName()).second; if (expectedNarHash && accessor->pathExists(CanonPath(".gitattributes"))) { if (expectedNarHash != narHashNew) { - auto accessorLegacy = getLegacyGitAccessor(*store, repoInfo, repoDir, rev, options); + auto accessorLegacy = getLegacyGitAccessor(store, repoInfo, repoDir, rev, options); auto narHashLegacy = - fetchToStore2(settings, *store, {accessorLegacy}, FetchMode::DryRun, input.getName()).second; + fetchToStore2(settings, store, {accessorLegacy}, FetchMode::DryRun, input.getName()).second; if (expectedNarHash == narHashLegacy) { warn( "Git input '%s' specifies a NAR hash '%s' that was created by Nix < 2.20.\n" @@ -943,7 +1090,7 @@ struct GitInputScheme : InputScheme } std::pair, Input> - getAccessorFromWorkdir(const Settings & settings, ref store, RepoInfo & repoInfo, Input && input) const + getAccessorFromWorkdir(const Settings & settings, Store & store, RepoInfo & repoInfo, Input && input) const { auto repoPath = repoInfo.getPath().value(); @@ -952,7 +1099,7 @@ struct GitInputScheme : InputScheme for (auto & submodule : repoInfo.workdirInfo.submodules) repoInfo.workdirInfo.files.insert(submodule.path); - auto repo = GitRepo::openRepo(repoPath, false, false); + auto repo = GitRepo::openRepo(repoPath, {}); auto exportIgnore = getExportIgnoreAttr(input); @@ -992,7 +1139,7 @@ struct GitInputScheme : InputScheme } if (!repoInfo.workdirInfo.isDirty) { - auto repo = GitRepo::openRepo(repoPath); + auto repo = GitRepo::openRepo(repoPath, {}); if (auto ref = repo->getWorkdirRef()) input.attrs.insert_or_assign("ref", *ref); @@ -1027,7 +1174,7 @@ struct GitInputScheme : InputScheme } std::pair, Input> - getAccessor(const Settings & settings, ref store, const Input & _input) const override + getAccessor(const Settings & settings, Store & store, const Input & _input) const override { Input input(_input); @@ -1049,7 +1196,7 @@ struct GitInputScheme : InputScheme return {accessor, std::move(final)}; } - std::optional getFingerprint(ref store, const Input & input) const override + std::optional getFingerprint(Store & store, const Input & input) const override { auto options = getGitAccessorOptions(input); diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 4e418e1ebd9..b3c892c6133 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -41,18 +41,16 @@ struct GitArchiveInputScheme : InputScheme /* This ignores empty path segments for back-compat. Older versions used a tokenizeString here. */ auto path = url.pathSegments(/*skipEmpty=*/true) | std::ranges::to>(); - std::optional rev; + std::optional rev; std::optional ref; std::optional host_url; auto size = path.size(); if (size == 3) { if (std::regex_match(path[2], revRegex)) - rev = Hash::parseAny(path[2], HashAlgorithm::SHA1); - else if (isLegalRefName(path[2])) - ref = path[2]; + rev = path[2]; else - throw BadURL("in URL '%s', '%s' is not a commit hash or branch/tag name", url, path[2]); + ref = path[2]; } else if (size > 3) { std::string rs; for (auto i = std::next(path.begin(), 2); i != path.end(); i++) { @@ -61,12 +59,7 @@ struct GitArchiveInputScheme : InputScheme rs += "/"; } } - - if (isLegalRefName(rs)) { - ref = rs; - } else { - throw BadURL("in URL '%s', '%s' is not a branch/tag name", url, rs); - } + ref = rs; } else if (size < 2) throw BadURL("URL '%s' is invalid", url); @@ -74,54 +67,71 @@ struct GitArchiveInputScheme : InputScheme if (name == "rev") { if (rev) throw BadURL("URL '%s' contains multiple commit hashes", url); - rev = Hash::parseAny(value, HashAlgorithm::SHA1); + rev = value; } else if (name == "ref") { - if (!isLegalRefName(value)) - throw BadURL("URL '%s' contains an invalid branch/tag name", url); if (ref) throw BadURL("URL '%s' contains multiple branch/tag names", url); ref = value; - } else if (name == "host") { - if (!std::regex_match(value, hostRegex)) - throw BadURL("URL '%s' contains an invalid instance host", url); + } else if (name == "host") host_url = value; - } // FIXME: barf on unsupported attributes } - if (ref && rev) - throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url, *ref, rev->gitRev()); - - Input input{}; - input.attrs.insert_or_assign("type", std::string{schemeName()}); - input.attrs.insert_or_assign("owner", path[0]); - input.attrs.insert_or_assign("repo", path[1]); + Attrs attrs; + attrs.insert_or_assign("type", std::string{schemeName()}); + attrs.insert_or_assign("owner", path[0]); + attrs.insert_or_assign("repo", path[1]); if (rev) - input.attrs.insert_or_assign("rev", rev->gitRev()); + attrs.insert_or_assign("rev", *rev); if (ref) - input.attrs.insert_or_assign("ref", *ref); + attrs.insert_or_assign("ref", *ref); if (host_url) - input.attrs.insert_or_assign("host", *host_url); + attrs.insert_or_assign("host", *host_url); auto narHash = url.query.find("narHash"); if (narHash != url.query.end()) - input.attrs.insert_or_assign("narHash", narHash->second); + attrs.insert_or_assign("narHash", narHash->second); - return input; + return inputFromAttrs(settings, attrs); } - StringSet allowedAttrs() const override - { - return { - "owner", - "repo", - "ref", - "rev", - "narHash", - "lastModified", - "host", - "treeHash", + const std::map & allowedAttrs() const override + { + static const std::map attrs = { + { + "owner", + {}, + }, + { + "repo", + {}, + }, + { + "ref", + {}, + }, + { + "rev", + {}, + }, + { + "narHash", + {}, + }, + { + "lastModified", + {}, + }, + { + "host", + {}, + }, + { + "treeHash", + {}, + }, }; + return attrs; } std::optional inputFromAttrs(const fetchers::Settings & settings, const Attrs & attrs) const override @@ -129,6 +139,24 @@ struct GitArchiveInputScheme : InputScheme getStrAttr(attrs, "owner"); getStrAttr(attrs, "repo"); + auto ref = maybeGetStrAttr(attrs, "ref"); + auto rev = maybeGetStrAttr(attrs, "rev"); + if (ref && rev) + throw BadURL( + "input %s contains both a commit hash ('%s') and a branch/tag name ('%s')", + attrsToJSON(attrs), + *rev, + *ref); + + if (rev) + Hash::parseAny(*rev, HashAlgorithm::SHA1); + + if (ref && !isLegalRefName(*ref)) + throw BadURL("input %s contains an invalid branch/tag name", attrsToJSON(attrs)); + + if (auto host = maybeGetStrAttr(attrs, "host"); host && !std::regex_match(*host, hostRegex)) + throw BadURL("input %s contains an invalid instance host", attrsToJSON(attrs)); + Input input{}; input.attrs = attrs; return input; @@ -233,7 +261,7 @@ struct GitArchiveInputScheme : InputScheme std::optional treeHash; }; - virtual RefInfo getRevFromRef(const Settings & settings, nix::ref store, const Input & input) const = 0; + virtual RefInfo getRevFromRef(const Settings & settings, nix::Store & store, const Input & input) const = 0; virtual DownloadUrl getDownloadUrl(const Settings & settings, const Input & input) const = 0; @@ -243,7 +271,7 @@ struct GitArchiveInputScheme : InputScheme time_t lastModified; }; - std::pair downloadArchive(const Settings & settings, ref store, Input input) const + std::pair downloadArchive(const Settings & settings, Store & store, Input input) const { if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD"); @@ -316,7 +344,7 @@ struct GitArchiveInputScheme : InputScheme } std::pair, Input> - getAccessor(const Settings & settings, ref store, const Input & _input) const override + getAccessor(const Settings & settings, Store & store, const Input & _input) const override { auto [input, tarballInfo] = downloadArchive(settings, store, _input); @@ -347,7 +375,7 @@ struct GitArchiveInputScheme : InputScheme return input.getRev().has_value() && (settings.trustTarballsFromGitForges || input.getNarHash().has_value()); } - std::optional getFingerprint(ref store, const Input & input) const override + std::optional getFingerprint(Store & store, const Input & input) const override { if (auto rev = input.getRev()) return "github:" + rev->gitRev(); @@ -363,6 +391,12 @@ struct GitHubInputScheme : GitArchiveInputScheme return "github"; } + std::string schemeDescription() const override + { + // TODO + return ""; + } + std::optional> accessHeaderFromToken(const std::string & token) const override { // Github supports PAT/OAuth2 tokens and HTTP Basic @@ -389,7 +423,7 @@ struct GitHubInputScheme : GitArchiveInputScheme return getStrAttr(input.attrs, "repo"); } - RefInfo getRevFromRef(const Settings & settings, nix::ref store, const Input & input) const override + RefInfo getRevFromRef(const Settings & settings, nix::Store & store, const Input & input) const override { auto host = getHost(input); auto url = fmt( @@ -402,7 +436,8 @@ struct GitHubInputScheme : GitArchiveInputScheme Headers headers = makeHeadersWithAuthTokens(settings, host, input); auto downloadResult = downloadFile(store, settings, url, "source", headers); - auto json = nlohmann::json::parse(store->getFSAccessor(downloadResult.storePath)->readFile(CanonPath::root)); + auto json = nlohmann::json::parse( + store.requireStoreObjectAccessor(downloadResult.storePath)->readFile(CanonPath::root)); return RefInfo{ .rev = Hash::parseAny(std::string{json["sha"]}, HashAlgorithm::SHA1), @@ -426,7 +461,7 @@ struct GitHubInputScheme : GitArchiveInputScheme return DownloadUrl{parseURL(url), headers}; } - void clone(const Settings & settings, ref store, const Input & input, const std::filesystem::path & destDir) + void clone(const Settings & settings, Store & store, const Input & input, const std::filesystem::path & destDir) const override { auto host = getHost(input); @@ -443,6 +478,12 @@ struct GitLabInputScheme : GitArchiveInputScheme return "gitlab"; } + std::string schemeDescription() const override + { + // TODO + return ""; + } + std::optional> accessHeaderFromToken(const std::string & token) const override { // Gitlab supports 4 kinds of authorization, two of which are @@ -462,7 +503,7 @@ struct GitLabInputScheme : GitArchiveInputScheme return std::make_pair(token.substr(0, fldsplit), token.substr(fldsplit + 1)); } - RefInfo getRevFromRef(const Settings & settings, nix::ref store, const Input & input) const override + RefInfo getRevFromRef(const Settings & settings, nix::Store & store, const Input & input) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); // See rate limiting note below @@ -476,7 +517,8 @@ struct GitLabInputScheme : GitArchiveInputScheme Headers headers = makeHeadersWithAuthTokens(settings, host, input); auto downloadResult = downloadFile(store, settings, url, "source", headers); - auto json = nlohmann::json::parse(store->getFSAccessor(downloadResult.storePath)->readFile(CanonPath::root)); + auto json = nlohmann::json::parse( + store.requireStoreObjectAccessor(downloadResult.storePath)->readFile(CanonPath::root)); if (json.is_array() && json.size() >= 1 && json[0]["id"] != nullptr) { return RefInfo{.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)}; @@ -507,7 +549,7 @@ struct GitLabInputScheme : GitArchiveInputScheme return DownloadUrl{parseURL(url), headers}; } - void clone(const Settings & settings, ref store, const Input & input, const std::filesystem::path & destDir) + void clone(const Settings & settings, Store & store, const Input & input, const std::filesystem::path & destDir) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); @@ -527,6 +569,12 @@ struct SourceHutInputScheme : GitArchiveInputScheme return "sourcehut"; } + std::string schemeDescription() const override + { + // TODO + return ""; + } + std::optional> accessHeaderFromToken(const std::string & token) const override { // SourceHut supports both PAT and OAuth2. See @@ -537,7 +585,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme // Once it is implemented, however, should work as expected. } - RefInfo getRevFromRef(const Settings & settings, nix::ref store, const Input & input) const override + RefInfo getRevFromRef(const Settings & settings, nix::Store & store, const Input & input) const override { // TODO: In the future, when the sourcehut graphql API is implemented for mercurial // and with anonymous access, this method should use it instead. @@ -552,13 +600,10 @@ struct SourceHutInputScheme : GitArchiveInputScheme std::string refUri; if (ref == "HEAD") { - auto file = - store->toRealPath(downloadFile(store, settings, fmt("%s/HEAD", base_url), "source", headers).storePath); - std::ifstream is(file); - std::string line; - getline(is, line); + auto downloadFileResult = downloadFile(store, settings, fmt("%s/HEAD", base_url), "source", headers); + auto contents = store.requireStoreObjectAccessor(downloadFileResult.storePath)->readFile(CanonPath::root); - auto remoteLine = git::parseLsRemoteLine(line); + auto remoteLine = git::parseLsRemoteLine(getLine(contents).first); if (!remoteLine) { throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref); } @@ -568,9 +613,9 @@ struct SourceHutInputScheme : GitArchiveInputScheme } std::regex refRegex(refUri); - auto file = store->toRealPath( - downloadFile(store, settings, fmt("%s/info/refs", base_url), "source", headers).storePath); - std::ifstream is(file); + auto downloadFileResult = downloadFile(store, settings, fmt("%s/info/refs", base_url), "source", headers); + auto contents = store.requireStoreObjectAccessor(downloadFileResult.storePath)->readFile(CanonPath::root); + std::istringstream is(contents); std::string line; std::optional id; @@ -600,7 +645,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme return DownloadUrl{parseURL(url), headers}; } - void clone(const Settings & settings, ref store, const Input & input, const std::filesystem::path & destDir) + void clone(const Settings & settings, Store & store, const Input & input, const std::filesystem::path & destDir) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht"); diff --git a/src/libfetchers/include/nix/fetchers/fetchers.hh b/src/libfetchers/include/nix/fetchers/fetchers.hh index 16cddb29e7e..c4b7c589d6c 100644 --- a/src/libfetchers/include/nix/fetchers/fetchers.hh +++ b/src/libfetchers/include/nix/fetchers/fetchers.hh @@ -113,7 +113,7 @@ public: * Fetch the entire input into the Nix store, returning the * location in the Nix store and the locked input. */ - std::tuple, Input> fetchToStore(const Settings & settings, ref store) const; + std::tuple, Input> fetchToStore(const Settings & settings, Store & store) const; /** * Check the locking attributes in `result` against @@ -133,17 +133,17 @@ public: * input without copying it to the store. Also return a possibly * unlocked input. */ - std::pair, Input> getAccessor(const Settings & settings, ref store) const; + std::pair, Input> getAccessor(const Settings & settings, Store & store) const; private: - std::pair, Input> getAccessorUnchecked(const Settings & settings, ref store) const; + std::pair, Input> getAccessorUnchecked(const Settings & settings, Store & store) const; public: Input applyOverrides(std::optional ref, std::optional rev) const; - void clone(const Settings & settings, ref store, const std::filesystem::path & destDir) const; + void clone(const Settings & settings, Store & store, const std::filesystem::path & destDir) const; std::optional getSourcePath() const; @@ -173,7 +173,7 @@ public: * * This is not a stable identifier between Nix versions, but not guaranteed to change either. */ - std::optional getFingerprint(ref store) const; + std::optional getFingerprint(Store & store) const; }; /** @@ -203,21 +203,34 @@ struct InputScheme */ virtual std::string_view schemeName() const = 0; + /** + * Longform description of this scheme, for documentation purposes. + */ + virtual std::string schemeDescription() const = 0; + + // TODO remove these defaults + struct AttributeInfo + { + const char * type = "String"; + bool required = true; + const char * doc = ""; + }; + /** * Allowed attributes in an attribute set that is converted to an - * input. + * input, and documentation for each attribute. * - * `type` is not included from this set, because the `type` field is + * `type` is not included from this map, because the `type` field is parsed first to choose which scheme; `type` is always required. */ - virtual StringSet allowedAttrs() const = 0; + virtual const std::map & allowedAttrs() const = 0; virtual ParsedURL toURL(const Input & input, bool abbreviate = false) const; virtual Input applyOverrides(const Input & input, std::optional ref, std::optional rev) const; - virtual void clone( - const Settings & settings, ref store, const Input & input, const std::filesystem::path & destDir) const; + virtual void + clone(const Settings & settings, Store & store, const Input & input, const std::filesystem::path & destDir) const; virtual std::optional getSourcePath(const Input & input) const; @@ -228,7 +241,7 @@ struct InputScheme std::optional commitMsg) const; virtual std::pair, Input> - getAccessor(const Settings & settings, ref store, const Input & input) const = 0; + getAccessor(const Settings & settings, Store & store, const Input & input) const = 0; /** * Is this `InputScheme` part of an experimental feature? @@ -240,7 +253,7 @@ struct InputScheme return true; } - virtual std::optional getFingerprint(ref store, const Input & input) const + virtual std::optional getFingerprint(Store & store, const Input & input) const { return std::nullopt; } @@ -264,7 +277,12 @@ struct InputScheme void registerInputScheme(std::shared_ptr && fetcher); -nlohmann::json dumpRegisterInputSchemeInfo(); +using InputSchemeMap = std::map>; + +/** + * Use this for docs, not for finding a specific scheme + */ +const InputSchemeMap & getAllInputSchemes(); struct PublicKey { diff --git a/src/libfetchers/include/nix/fetchers/git-utils.hh b/src/libfetchers/include/nix/fetchers/git-utils.hh index 6db6e09e8a0..eada8745c3e 100644 --- a/src/libfetchers/include/nix/fetchers/git-utils.hh +++ b/src/libfetchers/include/nix/fetchers/git-utils.hh @@ -35,7 +35,14 @@ struct GitRepo { virtual ~GitRepo() {} - static ref openRepo(const std::filesystem::path & path, bool create = false, bool bare = false); + struct Options + { + bool create = false; + bool bare = false; + bool packfilesOnly = false; + }; + + static ref openRepo(const std::filesystem::path & path, Options options); virtual uint64_t getRevCount(const Hash & rev) = 0; diff --git a/src/libfetchers/include/nix/fetchers/input-cache.hh b/src/libfetchers/include/nix/fetchers/input-cache.hh index ad702dfdaa4..463927ceaee 100644 --- a/src/libfetchers/include/nix/fetchers/input-cache.hh +++ b/src/libfetchers/include/nix/fetchers/input-cache.hh @@ -16,7 +16,7 @@ struct InputCache }; CachedResult - getAccessor(const Settings & settings, ref store, const Input & originalInput, UseRegistries useRegistries); + getAccessor(const Settings & settings, Store & store, const Input & originalInput, UseRegistries useRegistries); struct CachedInput { diff --git a/src/libfetchers/include/nix/fetchers/registry.hh b/src/libfetchers/include/nix/fetchers/registry.hh index 8978231a059..ca38dd805d6 100644 --- a/src/libfetchers/include/nix/fetchers/registry.hh +++ b/src/libfetchers/include/nix/fetchers/registry.hh @@ -2,6 +2,7 @@ ///@file #include "nix/util/types.hh" +#include "nix/util/source-path.hh" #include "nix/fetchers/fetchers.hh" namespace nix { @@ -36,12 +37,12 @@ struct Registry { } - static std::shared_ptr read(const Settings & settings, const Path & path, RegistryType type); + static std::shared_ptr read(const Settings & settings, const SourcePath & path, RegistryType type); static std::shared_ptr read(const Settings & settings, std::string_view whence, std::string_view jsonStr, RegistryType type); - void write(const Path & path); + void write(const std::filesystem::path & path); void add(const Input & from, const Input & to, const Attrs & extraAttrs); @@ -52,11 +53,11 @@ typedef std::vector> Registries; std::shared_ptr getUserRegistry(const Settings & settings); -std::shared_ptr getCustomRegistry(const Settings & settings, const Path & p); +std::shared_ptr getCustomRegistry(const Settings & settings, const std::filesystem::path & p); -Path getUserRegistryPath(); +std::filesystem::path getUserRegistryPath(); -Registries getRegistries(const Settings & settings, ref store); +Registries getRegistries(const Settings & settings, Store & store); void overrideRegistry(const Input & from, const Input & to, const Attrs & extraAttrs); @@ -71,6 +72,6 @@ enum class UseRegistries : int { * use the registries for which the filter function returns true. */ std::pair -lookupInRegistries(const Settings & settings, ref store, const Input & input, UseRegistries useRegistries); +lookupInRegistries(const Settings & settings, Store & store, const Input & input, UseRegistries useRegistries); } // namespace nix::fetchers diff --git a/src/libfetchers/include/nix/fetchers/tarball.hh b/src/libfetchers/include/nix/fetchers/tarball.hh index be816a24c9c..e9e569d3c7f 100644 --- a/src/libfetchers/include/nix/fetchers/tarball.hh +++ b/src/libfetchers/include/nix/fetchers/tarball.hh @@ -25,7 +25,7 @@ struct DownloadFileResult }; DownloadFileResult downloadFile( - ref store, + Store & store, const Settings & settings, const std::string & url, const std::string & name, @@ -43,6 +43,6 @@ struct DownloadTarballResult * Download and import a tarball into the Git cache. The result is the * Git tree hash of the root directory. */ -ref downloadTarball(ref store, const Settings & settings, const std::string & url); +ref downloadTarball(Store & store, const Settings & settings, const std::string & url); } // namespace nix::fetchers diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index d36c6a183f1..e629dcbac6b 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -60,14 +60,33 @@ struct IndirectInputScheme : InputScheme return "indirect"; } - StringSet allowedAttrs() const override + std::string schemeDescription() const override { - return { - "id", - "ref", - "rev", - "narHash", + // TODO + return ""; + } + + const std::map & allowedAttrs() const override + { + static const std::map attrs = { + { + "id", + {}, + }, + { + "ref", + {}, + }, + { + "rev", + {}, + }, + { + "narHash", + {}, + }, }; + return attrs; } std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override @@ -107,7 +126,7 @@ struct IndirectInputScheme : InputScheme } std::pair, Input> - getAccessor(const Settings & settings, ref store, const Input & input) const override + getAccessor(const Settings & settings, Store & store, const Input & input) const override { throw Error("indirect input '%s' cannot be fetched directly", input.to_string()); } diff --git a/src/libfetchers/input-cache.cc b/src/libfetchers/input-cache.cc index 6da15bd3d62..652d5ce7976 100644 --- a/src/libfetchers/input-cache.cc +++ b/src/libfetchers/input-cache.cc @@ -6,7 +6,7 @@ namespace nix::fetchers { InputCache::CachedResult InputCache::getAccessor( - const Settings & settings, ref store, const Input & originalInput, UseRegistries useRegistries) + const Settings & settings, Store & store, const Input & originalInput, UseRegistries useRegistries) { auto fetched = lookup(originalInput); Input resolvedInput = originalInput; diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 9f984bec6bb..f9297ce8c2f 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -68,16 +68,41 @@ struct MercurialInputScheme : InputScheme return "hg"; } - StringSet allowedAttrs() const override + std::string schemeDescription() const override { - return { - "url", - "ref", - "rev", - "revCount", - "narHash", - "name", + // TODO + return ""; + } + + const std::map & allowedAttrs() const override + { + static const std::map attrs = { + { + "url", + {}, + }, + { + "ref", + {}, + }, + { + "rev", + {}, + }, + { + "revCount", + {}, + }, + { + "narHash", + {}, + }, + { + "name", + {}, + }, }; + return attrs; } std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override @@ -154,7 +179,7 @@ struct MercurialInputScheme : InputScheme return {isLocal, isLocal ? renderUrlPathEnsureLegal(url.path) : url.to_string()}; } - StorePath fetchToStore(const Settings & settings, ref store, Input & input) const + StorePath fetchToStore(const Settings & settings, Store & store, Input & input) const { auto origRev = input.getRev(); @@ -188,11 +213,11 @@ struct MercurialInputScheme : InputScheme runHg({"status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0"}), "\0"s); - Path actualPath(absPath(actualUrl)); + std::filesystem::path actualPath(absPath(actualUrl)); PathFilter filter = [&](const Path & p) -> bool { - assert(hasPrefix(p, actualPath)); - std::string file(p, actualPath.size() + 1); + assert(hasPrefix(p, actualPath.string())); + std::string file(p, actualPath.string().size() + 1); auto st = lstat(p); @@ -205,9 +230,9 @@ struct MercurialInputScheme : InputScheme return files.count(file); }; - auto storePath = store->addToStore( + auto storePath = store.addToStore( input.getName(), - {getFSSourceAccessor(), CanonPath(actualPath)}, + {getFSSourceAccessor(), CanonPath(actualPath.string())}, ContentAddressMethod::Raw::NixArchive, HashAlgorithm::SHA256, {}, @@ -224,7 +249,7 @@ struct MercurialInputScheme : InputScheme if (rev.algo != HashAlgorithm::SHA1) throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", rev.gitRev()); - return Cache::Key{"hgRev", {{"store", store->storeDir}, {"name", name}, {"rev", input.getRev()->gitRev()}}}; + return Cache::Key{"hgRev", {{"store", store.storeDir}, {"name", name}, {"rev", input.getRev()->gitRev()}}}; }; auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath { @@ -244,39 +269,38 @@ struct MercurialInputScheme : InputScheme /* If we have a rev, check if we have a cached store path. */ if (auto rev = input.getRev()) { - if (auto res = settings.getCache()->lookupStorePath(revInfoKey(*rev), *store)) + if (auto res = settings.getCache()->lookupStorePath(revInfoKey(*rev), store)) return makeResult(res->value, res->storePath); } - Path cacheDir = - fmt("%s/hg/%s", - getCacheDir(), - hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false)); + std::filesystem::path cacheDir = + getCacheDir() / "hg" / hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false); /* If this is a commit hash that we already have, we don't have to pull again. */ if (!(input.getRev() && pathExists(cacheDir) - && runProgram(hgOptions({"log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1"})) + && runProgram( + hgOptions({"log", "-R", cacheDir.string(), "-r", input.getRev()->gitRev(), "--template", "1"})) .second == "1")) { Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl)); if (pathExists(cacheDir)) { try { - runHg({"pull", "-R", cacheDir, "--", actualUrl}); + runHg({"pull", "-R", cacheDir.string(), "--", actualUrl}); } catch (ExecError & e) { - auto transJournal = cacheDir + "/.hg/store/journal"; + auto transJournal = cacheDir / ".hg" / "store" / "journal"; /* hg throws "abandoned transaction" error only if this file exists */ if (pathExists(transJournal)) { - runHg({"recover", "-R", cacheDir}); - runHg({"pull", "-R", cacheDir, "--", actualUrl}); + runHg({"recover", "-R", cacheDir.string()}); + runHg({"pull", "-R", cacheDir.string(), "--", actualUrl}); } else { throw ExecError(e.status, "'hg pull' %s", statusToString(e.status)); } } } else { - createDirs(dirOf(cacheDir)); - runHg({"clone", "--noupdate", "--", actualUrl, cacheDir}); + createDirs(dirOf(cacheDir.string())); + runHg({"clone", "--noupdate", "--", actualUrl, cacheDir.string()}); } } @@ -284,7 +308,7 @@ struct MercurialInputScheme : InputScheme auto tokens = tokenizeString>(runHg( {"log", "-R", - cacheDir, + cacheDir.string(), "-r", input.getRev() ? input.getRev()->gitRev() : *input.getRef(), "--template", @@ -298,17 +322,17 @@ struct MercurialInputScheme : InputScheme /* Now that we have the rev, check the cache again for a cached store path. */ - if (auto res = settings.getCache()->lookupStorePath(revInfoKey(rev), *store)) + if (auto res = settings.getCache()->lookupStorePath(revInfoKey(rev), store)) return makeResult(res->value, res->storePath); - Path tmpDir = createTempDir(); + std::filesystem::path tmpDir = createTempDir(); AutoDelete delTmpDir(tmpDir, true); - runHg({"archive", "-R", cacheDir, "-r", rev.gitRev(), tmpDir}); + runHg({"archive", "-R", cacheDir.string(), "-r", rev.gitRev(), tmpDir.string()}); - deletePath(tmpDir + "/.hg_archival.txt"); + deletePath(tmpDir / ".hg_archival.txt"); - auto storePath = store->addToStore(name, {getFSSourceAccessor(), CanonPath(tmpDir)}); + auto storePath = store.addToStore(name, {getFSSourceAccessor(), CanonPath(tmpDir.string())}); Attrs infoAttrs({ {"revCount", (uint64_t) revCount}, @@ -317,20 +341,18 @@ struct MercurialInputScheme : InputScheme if (!origRev) settings.getCache()->upsert(refToRevKey, {{"rev", rev.gitRev()}}); - settings.getCache()->upsert(revInfoKey(rev), *store, infoAttrs, storePath); + settings.getCache()->upsert(revInfoKey(rev), store, infoAttrs, storePath); return makeResult(infoAttrs, std::move(storePath)); } std::pair, Input> - getAccessor(const Settings & settings, ref store, const Input & _input) const override + getAccessor(const Settings & settings, Store & store, const Input & _input) const override { Input input(_input); auto storePath = fetchToStore(settings, store, input); - - // We just added it, it should be there. - auto accessor = ref{store->getFSAccessor(storePath)}; + auto accessor = store.requireStoreObjectAccessor(storePath); accessor->setPathDisplay("«" + input.to_string(true) + "»"); @@ -342,7 +364,7 @@ struct MercurialInputScheme : InputScheme return (bool) input.getRev(); } - std::optional getFingerprint(ref store, const Input & input) const override + std::optional getFingerprint(Store & store, const Input & input) const override { if (auto rev = input.getRev()) return "hg:" + rev->gitRev(); diff --git a/src/libfetchers/meson.build b/src/libfetchers/meson.build index 8b6bd55df90..cd04615e52c 100644 --- a/src/libfetchers/meson.build +++ b/src/libfetchers/meson.build @@ -32,7 +32,6 @@ libgit2 = dependency('libgit2', version : '>= 1.9') deps_private += libgit2 subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'attrs.cc', diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index a1c3c1537e3..2f2d6976a14 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -40,20 +40,42 @@ struct PathInputScheme : InputScheme return "path"; } - StringSet allowedAttrs() const override + std::string schemeDescription() const override { - return { - "path", + // TODO + return ""; + } + + const std::map & allowedAttrs() const override + { + static const std::map attrs = { + { + "path", + {}, + }, /* Allow the user to pass in "fake" tree info attributes. This is useful for making a pinned tree work the same as the repository from which is exported (e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */ - "rev", - "revCount", - "lastModified", - "narHash", + { + "rev", + {}, + }, + { + "revCount", + {}, + }, + { + "lastModified", + {}, + }, + { + "narHash", + {}, + }, }; + return attrs; } std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override @@ -117,7 +139,7 @@ struct PathInputScheme : InputScheme } std::pair, Input> - getAccessor(const Settings & settings, ref store, const Input & _input) const override + getAccessor(const Settings & settings, Store & store, const Input & _input) const override { Input input(_input); auto path = getStrAttr(input.attrs, "path"); @@ -125,27 +147,27 @@ struct PathInputScheme : InputScheme auto absPath = getAbsPath(input); // FIXME: check whether access to 'path' is allowed. - auto storePath = store->maybeParseStorePath(absPath.string()); + auto storePath = store.maybeParseStorePath(absPath.string()); if (storePath) - store->addTempRoot(*storePath); + store.addTempRoot(*storePath); time_t mtime = 0; - if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath)) { + if (!storePath || storePath->name() != "source" || !store.isValidPath(*storePath)) { Activity act(*logger, lvlTalkative, actUnknown, fmt("copying %s to the store", absPath)); // FIXME: try to substitute storePath. auto src = sinkToSource( [&](Sink & sink) { mtime = dumpPathAndGetMtime(absPath.string(), sink, defaultPathFilter); }); - storePath = store->addToStoreFromDump(*src, "source"); + storePath = store.addToStoreFromDump(*src, "source"); } - auto accessor = ref{store->getFSAccessor(*storePath)}; + auto accessor = store.requireStoreObjectAccessor(*storePath); // To prevent `fetchToStore()` copying the path again to Nix // store, pre-create an entry in the fetcher cache. - auto info = store->queryPathInfo(*storePath); + auto info = store.queryPathInfo(*storePath); accessor->fingerprint = - fmt("path:%s", store->queryPathInfo(*storePath)->narHash.to_string(HashFormat::SRI, true)); + fmt("path:%s", store.queryPathInfo(*storePath)->narHash.to_string(HashFormat::SRI, true)); settings.getCache()->upsert( makeSourcePathToHashCacheKey(*accessor->fingerprint, ContentAddressMethod::Raw::NixArchive, "/"), {{"hash", info->narHash.to_string(HashFormat::SRI, true)}}); diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index 33553710824..83de80bbccf 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -10,15 +10,15 @@ namespace nix::fetchers { -std::shared_ptr Registry::read(const Settings & settings, const Path & path, RegistryType type) +std::shared_ptr Registry::read(const Settings & settings, const SourcePath & path, RegistryType type) { debug("reading registry '%s'", path); - if (!pathExists(path)) + if (!path.pathExists()) return std::make_shared(type); try { - return read(settings, path, readFile(path), type); + return read(settings, path.to_string(), path.readFile(), type); } catch (Error & e) { warn("cannot read flake registry '%s': %s", path, e.what()); return std::make_shared(type); @@ -31,7 +31,6 @@ Registry::read(const Settings & settings, std::string_view whence, std::string_v auto registry = std::make_shared(type); try { - auto json = nlohmann::json::parse(jsonStr); auto version = json.value("version", 0); @@ -65,7 +64,7 @@ Registry::read(const Settings & settings, std::string_view whence, std::string_v return registry; } -void Registry::write(const Path & path) +void Registry::write(const std::filesystem::path & path) { nlohmann::json arr; for (auto & entry : entries) { @@ -83,7 +82,7 @@ void Registry::write(const Path & path) json["version"] = 2; json["flakes"] = std::move(arr); - createDirs(dirOf(path)); + createDirs(path.parent_path()); writeFile(path, json.dump(2)); } @@ -99,31 +98,38 @@ void Registry::remove(const Input & input) entries.end()); } -static Path getSystemRegistryPath() +static std::filesystem::path getSystemRegistryPath() { - return settings.nixConfDir + "/registry.json"; + return settings.nixConfDir / "registry.json"; } static std::shared_ptr getSystemRegistry(const Settings & settings) { - static auto systemRegistry = Registry::read(settings, getSystemRegistryPath(), Registry::System); + static auto systemRegistry = Registry::read( + settings, + SourcePath{getFSSourceAccessor(), CanonPath{getSystemRegistryPath().string()}}.resolveSymlinks(), + Registry::System); return systemRegistry; } -Path getUserRegistryPath() +std::filesystem::path getUserRegistryPath() { - return getConfigDir() + "/registry.json"; + return getConfigDir() / "registry.json"; } std::shared_ptr getUserRegistry(const Settings & settings) { - static auto userRegistry = Registry::read(settings, getUserRegistryPath(), Registry::User); + static auto userRegistry = Registry::read( + settings, + SourcePath{getFSSourceAccessor(), CanonPath{getUserRegistryPath().string()}}.resolveSymlinks(), + Registry::User); return userRegistry; } -std::shared_ptr getCustomRegistry(const Settings & settings, const Path & p) +std::shared_ptr getCustomRegistry(const Settings & settings, const std::filesystem::path & p) { - static auto customRegistry = Registry::read(settings, p, Registry::Custom); + static auto customRegistry = Registry::read( + settings, SourcePath{getFSSourceAccessor(), CanonPath{p.string()}}.resolveSymlinks(), Registry::Custom); return customRegistry; } @@ -138,7 +144,7 @@ void overrideRegistry(const Input & from, const Input & to, const Attrs & extraA getFlagRegistry()->add(from, to, extraAttrs); } -static std::shared_ptr getGlobalRegistry(const Settings & settings, ref store) +static std::shared_ptr getGlobalRegistry(const Settings & settings, Store & store) { static auto reg = [&]() { try { @@ -147,14 +153,19 @@ static std::shared_ptr getGlobalRegistry(const Settings & settings, re return std::make_shared(Registry::Global); // empty registry } - if (!isAbsolute(path)) { - auto storePath = downloadFile(store, settings, path, "flake-registry.json").storePath; - if (auto store2 = store.dynamic_pointer_cast()) - store2->addPermRoot(storePath, getCacheDir() + "/flake-registry.json"); - path = store->toRealPath(storePath); - } - - return Registry::read(settings, path, Registry::Global); + return Registry::read( + settings, + [&] -> SourcePath { + if (!isAbsolute(path)) { + auto storePath = downloadFile(store, settings, path, "flake-registry.json").storePath; + if (auto store2 = dynamic_cast(&store)) + store2->addPermRoot(storePath, (getCacheDir() / "flake-registry.json").string()); + return {store.requireStoreObjectAccessor(storePath)}; + } else { + return SourcePath{getFSSourceAccessor(), CanonPath{path}}.resolveSymlinks(); + } + }(), + Registry::Global); } catch (Error & e) { warn( "cannot fetch global flake registry '%s', will use builtin fallback registry: %s", @@ -173,7 +184,7 @@ static std::shared_ptr getGlobalRegistry(const Settings & settings, re return reg; } -Registries getRegistries(const Settings & settings, ref store) +Registries getRegistries(const Settings & settings, Store & store) { Registries registries; registries.push_back(getFlagRegistry()); @@ -184,7 +195,7 @@ Registries getRegistries(const Settings & settings, ref store) } std::pair -lookupInRegistries(const Settings & settings, ref store, const Input & _input, UseRegistries useRegistries) +lookupInRegistries(const Settings & settings, Store & store, const Input & _input, UseRegistries useRegistries) { Attrs extraAttrs; int n = 0; diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index fbdd1185070..3b9e756fec8 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -13,7 +13,7 @@ namespace nix::fetchers { DownloadFileResult downloadFile( - ref store, + Store & store, const Settings & settings, const std::string & url, const std::string & name, @@ -28,7 +28,7 @@ DownloadFileResult downloadFile( {"name", name}, }}}; - auto cached = settings.getCache()->lookupStorePath(key, *store); + auto cached = settings.getCache()->lookupStorePath(key, store); auto useCached = [&]() -> DownloadFileResult { return { @@ -74,7 +74,7 @@ DownloadFileResult downloadFile( dumpString(res.data, sink); auto hash = hashString(HashAlgorithm::SHA256, res.data); auto info = ValidPathInfo::makeFromCA( - *store, + store, name, FixedOutputInfo{ .method = FileIngestionMethod::Flat, @@ -84,7 +84,7 @@ DownloadFileResult downloadFile( hashString(HashAlgorithm::SHA256, sink.s)); info.narSize = sink.s.size(); auto source = StringSource{sink.s}; - store->addToStore(info, source, NoRepair, NoCheckSigs); + store.addToStore(info, source, NoRepair, NoCheckSigs); storePath = std::move(info.path); } @@ -93,7 +93,7 @@ DownloadFileResult downloadFile( key.second.insert_or_assign("url", url); assert(!res.urls.empty()); infoAttrs.insert_or_assign("url", *res.urls.rbegin()); - settings.getCache()->upsert(key, *store, infoAttrs, *storePath); + settings.getCache()->upsert(key, store, infoAttrs, *storePath); } return { @@ -214,7 +214,7 @@ static DownloadTarballResult downloadTarball_( return attrsToResult(infoAttrs); } -ref downloadTarball(ref store, const Settings & settings, const std::string & url) +ref downloadTarball(Store & store, const Settings & settings, const std::string & url) { /* Go through Input::getAccessor() to ensure that the resulting accessor has a fingerprint. */ @@ -278,7 +278,7 @@ struct CurlInputScheme : InputScheme HTTP request. Now that we've processed the Nix-specific attributes above, remove them so we don't also send them as part of the HTTP request. */ - for (auto & param : allowedAttrs()) + for (auto & [param, _] : allowedAttrs()) url.query.erase(param); input.attrs.insert_or_assign("type", std::string{schemeName()}); @@ -286,18 +286,83 @@ struct CurlInputScheme : InputScheme return input; } - StringSet allowedAttrs() const override + static const std::map & allowedAttrsImpl() { - return { - "type", - "url", - "narHash", - "name", - "unpack", - "rev", - "revCount", - "lastModified", + static const std::map attrs = { + { + "url", + { + .type = "String", + .required = true, + .doc = R"( + Supported protocols: + + - `https` + + > **Example** + > + > ```nix + > fetchTree { + > type = "file"; + > url = "https://example.com/index.html"; + > } + > ``` + + - `http` + + Insecure HTTP transfer for legacy sources. + + > **Warning** + > + > HTTP performs no encryption or authentication. + > Use a `narHash` known in advance to ensure the output has expected contents. + + - `file` + + A file on the local file system. + + > **Example** + > + > ```nix + > fetchTree { + > type = "file"; + > url = "file:///home/eelco/nix/README.md"; + > } + > ``` + )", + }, + }, + { + "narHash", + {}, + }, + { + "name", + {}, + }, + { + "unpack", + {}, + }, + { + "rev", + {}, + }, + { + "revCount", + {}, + }, + { + "lastModified", + {}, + }, }; + return attrs; + } + + const std::map & allowedAttrs() const override + { + return allowedAttrsImpl(); } std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override @@ -332,6 +397,14 @@ struct FileInputScheme : CurlInputScheme return "file"; } + std::string schemeDescription() const override + { + return stripIndentation(R"( + Place a plain file into the Nix store. + This is similar to [`builtins.fetchurl`](@docroot@/language/builtins.md#builtins-fetchurl) + )"); + } + bool isValidURL(const ParsedURL & url, bool requireTree) const override { auto parsedUrlScheme = parseUrlScheme(url.scheme); @@ -341,7 +414,7 @@ struct FileInputScheme : CurlInputScheme } std::pair, Input> - getAccessor(const Settings & settings, ref store, const Input & _input) const override + getAccessor(const Settings & settings, Store & store, const Input & _input) const override { auto input(_input); @@ -351,10 +424,10 @@ struct FileInputScheme : CurlInputScheme tarballs. */ auto file = downloadFile(store, settings, getStrAttr(input.attrs, "url"), input.getName()); - auto narHash = store->queryPathInfo(file.storePath)->narHash; + auto narHash = store.queryPathInfo(file.storePath)->narHash; input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); - auto accessor = ref{store->getFSAccessor(file.storePath)}; + auto accessor = ref{store.getFSAccessor(file.storePath)}; accessor->setPathDisplay("«" + input.to_string(true) + "»"); @@ -369,6 +442,34 @@ struct TarballInputScheme : CurlInputScheme return "tarball"; } + std::string schemeDescription() const override + { + return stripIndentation(R"( + Download a tar archive and extract it into the Nix store. + This has the same underlying implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball) + )"); + } + + const std::map & allowedAttrs() const override + { + static const std::map attrs = [] { + auto attrs = CurlInputScheme::allowedAttrsImpl(); + // Override the "url" attribute to add tarball-specific example + attrs["url"].doc = R"( + > **Example** + > + > ```nix + > fetchTree { + > type = "tarball"; + > url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11"; + > } + > ``` + )"; + return attrs; + }(); + return attrs; + } + bool isValidURL(const ParsedURL & url, bool requireTree) const override { auto parsedUrlScheme = parseUrlScheme(url.scheme); @@ -379,7 +480,7 @@ struct TarballInputScheme : CurlInputScheme } std::pair, Input> - getAccessor(const Settings & settings, ref store, const Input & _input) const override + getAccessor(const Settings & settings, Store & store, const Input & _input) const override { auto input(_input); @@ -404,7 +505,7 @@ struct TarballInputScheme : CurlInputScheme return {result.accessor, input}; } - std::optional getFingerprint(ref store, const Input & input) const override + std::optional getFingerprint(Store & store, const Input & input) const override { if (auto narHash = input.getNarHash()) return "tarball:" + narHash->to_string(HashFormat::SRI, true); diff --git a/src/libflake-c/meson.build b/src/libflake-c/meson.build index d0d45cfa813..fddb39bdf96 100644 --- a/src/libflake-c/meson.build +++ b/src/libflake-c/meson.build @@ -32,7 +32,6 @@ deps_public_maybe_subproject = [ subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_flake.cc', diff --git a/src/libflake-c/nix_api_flake.cc b/src/libflake-c/nix_api_flake.cc index 5c28f973bd3..793db44b438 100644 --- a/src/libflake-c/nix_api_flake.cc +++ b/src/libflake-c/nix_api_flake.cc @@ -200,7 +200,7 @@ nix_value * nix_locked_flake_get_output_attrs( nix_clear_err(context); try { auto v = nix_alloc_value(context, evalState); - nix::flake::callFlake(evalState->state, *lockedFlake->lockedFlake, v->value); + nix::flake::callFlake(evalState->state, *lockedFlake->lockedFlake, *v->value); return v; } NIXC_CATCH_ERRS_NULL diff --git a/src/libflake-tests/flakeref.cc b/src/libflake-tests/flakeref.cc index c31a3cc49f0..3cc655907b3 100644 --- a/src/libflake-tests/flakeref.cc +++ b/src/libflake-tests/flakeref.cc @@ -1,8 +1,15 @@ #include +#include +#include +#include #include "nix/fetchers/fetch-settings.hh" #include "nix/flake/flakeref.hh" #include "nix/fetchers/attrs.hh" +#include "nix/fetchers/fetchers.hh" +#include "nix/util/configuration.hh" +#include "nix/util/error.hh" +#include "nix/util/experimental-features.hh" namespace nix { diff --git a/src/libflake-tests/meson.build b/src/libflake-tests/meson.build index 28f9257a30f..f1e7762af67 100644 --- a/src/libflake-tests/meson.build +++ b/src/libflake-tests/meson.build @@ -34,7 +34,6 @@ gtest = dependency('gtest', main : true) deps_private += gtest subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'flakeref.cc', @@ -59,7 +58,7 @@ this_exe = executable( test( meson.project_name(), this_exe, - env : asan_test_options_env + { + env : { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', 'HOME' : meson.current_build_dir() / 'test-home', }, diff --git a/src/libflake-tests/nix_api_flake.cc b/src/libflake-tests/nix_api_flake.cc index f7e0cb71980..ec690b8124d 100644 --- a/src/libflake-tests/nix_api_flake.cc +++ b/src/libflake-tests/nix_api_flake.cc @@ -1,15 +1,17 @@ +#include +#include +#include + #include "nix/util/file-system.hh" #include "nix_api_store.h" #include "nix_api_util.h" #include "nix_api_expr.h" #include "nix_api_value.h" #include "nix_api_flake.h" - -#include "nix/expr/tests/nix_api_expr.hh" #include "nix/util/tests/string_callback.hh" - -#include -#include +#include "nix/store/tests/nix_api_store.hh" +#include "nix/util/tests/nix_api_util.hh" +#include "nix_api_fetchers.h" namespace nixC { @@ -84,7 +86,7 @@ TEST_F(nix_api_store_test, nix_api_load_flake) auto tmpDir = nix::createTempDir(); nix::AutoDelete delTmpDir(tmpDir, true); - nix::writeFile(tmpDir + "/flake.nix", R"( + nix::writeFile(tmpDir / "flake.nix", R"( { outputs = { ... }: { hello = "potato"; @@ -119,7 +121,8 @@ TEST_F(nix_api_store_test, nix_api_load_flake) assert_ctx_ok(); ASSERT_NE(nullptr, parseFlags); - auto r0 = nix_flake_reference_parse_flags_set_base_directory(ctx, parseFlags, tmpDir.c_str(), tmpDir.size()); + auto r0 = nix_flake_reference_parse_flags_set_base_directory( + ctx, parseFlags, tmpDir.string().c_str(), tmpDir.string().size()); assert_ctx_ok(); ASSERT_EQ(NIX_OK, r0); @@ -175,8 +178,8 @@ TEST_F(nix_api_store_test, nix_api_load_flake_with_flags) auto tmpDir = nix::createTempDir(); nix::AutoDelete delTmpDir(tmpDir, true); - nix::createDirs(tmpDir + "/b"); - nix::writeFile(tmpDir + "/b/flake.nix", R"( + nix::createDirs(tmpDir / "b"); + nix::writeFile(tmpDir / "b" / "flake.nix", R"( { outputs = { ... }: { hello = "BOB"; @@ -184,18 +187,18 @@ TEST_F(nix_api_store_test, nix_api_load_flake_with_flags) } )"); - nix::createDirs(tmpDir + "/a"); - nix::writeFile(tmpDir + "/a/flake.nix", R"( + nix::createDirs(tmpDir / "a"); + nix::writeFile(tmpDir / "a" / "flake.nix", R"( { - inputs.b.url = ")" + tmpDir + R"(/b"; + inputs.b.url = ")" + tmpDir.string() + R"(/b"; outputs = { b, ... }: { hello = b.hello; }; } )"); - nix::createDirs(tmpDir + "/c"); - nix::writeFile(tmpDir + "/c/flake.nix", R"( + nix::createDirs(tmpDir / "c"); + nix::writeFile(tmpDir / "c" / "flake.nix", R"( { outputs = { ... }: { hello = "Claire"; @@ -228,7 +231,8 @@ TEST_F(nix_api_store_test, nix_api_load_flake_with_flags) assert_ctx_ok(); ASSERT_NE(nullptr, parseFlags); - auto r0 = nix_flake_reference_parse_flags_set_base_directory(ctx, parseFlags, tmpDir.c_str(), tmpDir.size()); + auto r0 = nix_flake_reference_parse_flags_set_base_directory( + ctx, parseFlags, tmpDir.string().c_str(), tmpDir.string().size()); assert_ctx_ok(); ASSERT_EQ(NIX_OK, r0); diff --git a/src/libflake-tests/package.nix b/src/libflake-tests/package.nix index 49eaed151c3..2a359e1c2b4 100644 --- a/src/libflake-tests/package.nix +++ b/src/libflake-tests/package.nix @@ -59,7 +59,6 @@ mkMesonExecutable (finalAttrs: { buildInputs = [ writableTmpDirAsHomeHook ]; } ('' - export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0 export _NIX_TEST_UNIT_DATA=${resolvePath ./data} ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out diff --git a/src/libflake-tests/url-name.cc b/src/libflake-tests/url-name.cc index 81ba516c8e4..64cbe5c9db0 100644 --- a/src/libflake-tests/url-name.cc +++ b/src/libflake-tests/url-name.cc @@ -1,6 +1,8 @@ -#include "nix/flake/url-name.hh" #include +#include "nix/flake/url-name.hh" +#include "nix/util/url.hh" + namespace nix { /* ----------- tests for url-name.hh --------------------------------------------------*/ diff --git a/src/libflake/config.cc b/src/libflake/config.cc index c9071f601f9..fd0e9c75fdc 100644 --- a/src/libflake/config.cc +++ b/src/libflake/config.cc @@ -1,18 +1,39 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + #include "nix/util/users.hh" #include "nix/util/config-global.hh" #include "nix/flake/settings.hh" #include "nix/flake/flake.hh" - -#include +#include "nix/util/ansicolor.hh" +#include "nix/util/configuration.hh" +#include "nix/util/file-system.hh" +#include "nix/util/fmt.hh" +#include "nix/util/logging.hh" +#include "nix/util/strings.hh" +#include "nix/util/types.hh" +#include "nix/util/util.hh" namespace nix::flake { // setting name -> setting value -> allow or ignore. typedef std::map> TrustedList; -Path trustedListPath() +std::filesystem::path trustedListPath() { - return getDataDir() + "/trusted-settings.json"; + return getDataDir() / "trusted-settings.json"; } static TrustedList readTrustedList() @@ -27,7 +48,7 @@ static TrustedList readTrustedList() static void writeTrustedList(const TrustedList & trustedList) { auto path = trustedListPath(); - createDirs(dirOf(path)); + createDirs(path.parent_path()); writeFile(path, nlohmann::json(trustedList).dump()); } diff --git a/src/libflake/flake-primops.cc b/src/libflake/flake-primops.cc index 65de7ff18f1..3bbe232b0a1 100644 --- a/src/libflake/flake-primops.cc +++ b/src/libflake/flake-primops.cc @@ -1,8 +1,34 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include + #include "nix/flake/flake-primops.hh" #include "nix/expr/eval.hh" #include "nix/flake/flake.hh" #include "nix/flake/flakeref.hh" #include "nix/flake/settings.hh" +#include "nix/expr/attr-set.hh" +#include "nix/expr/eval-error.hh" +#include "nix/expr/eval-inline.hh" +#include "nix/expr/eval-settings.hh" +#include "nix/expr/symbol-table.hh" +#include "nix/expr/value.hh" +#include "nix/fetchers/attrs.hh" +#include "nix/fetchers/fetchers.hh" +#include "nix/util/configuration.hh" +#include "nix/util/error.hh" +#include "nix/util/experimental-features.hh" +#include "nix/util/pos-idx.hh" +#include "nix/util/pos-table.hh" +#include "nix/util/source-path.hh" +#include "nix/util/types.hh" +#include "nix/util/util.hh" namespace nix::flake::primops { @@ -66,7 +92,7 @@ static void prim_parseFlakeRef(EvalState & state, const PosIdx pos, Value ** arg auto & vv = binds.alloc(s); std::visit( overloaded{ - [&vv](const std::string & value) { vv.mkString(value); }, + [&vv, &state](const std::string & value) { vv.mkString(value, state.mem); }, [&vv](const uint64_t & value) { vv.mkInt(value); }, [&vv](const Explicit & value) { vv.mkBool(value.t); }}, value); @@ -128,7 +154,7 @@ static void prim_flakeRefToString(EvalState & state, const PosIdx pos, Value ** } } auto flakeRef = FlakeRef::fromAttrs(state.fetchSettings, attrs); - v.mkString(flakeRef.to_string()); + v.mkString(flakeRef.to_string(), state.mem); } nix::PrimOp flakeRefToString({ diff --git a/src/libflake/flake.cc b/src/libflake/flake.cc index c5c00a43724..2df431121f3 100644 --- a/src/libflake/flake.cc +++ b/src/libflake/flake.cc @@ -1,9 +1,32 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + #include "nix/util/terminal.hh" +#include "nix/util/ref.hh" +#include "nix/util/environment-variables.hh" #include "nix/flake/flake.hh" #include "nix/expr/eval.hh" +#include "nix/expr/eval-cache.hh" #include "nix/expr/eval-settings.hh" #include "nix/flake/lockfile.hh" -#include "nix/expr/primops.hh" #include "nix/expr/eval-inline.hh" #include "nix/store/store-api.hh" #include "nix/fetchers/fetchers.hh" @@ -11,15 +34,37 @@ #include "nix/fetchers/fetch-settings.hh" #include "nix/flake/settings.hh" #include "nix/expr/value-to-json.hh" -#include "nix/store/local-fs-store.hh" #include "nix/fetchers/fetch-to-store.hh" #include "nix/util/memory-source-accessor.hh" #include "nix/util/mounted-source-accessor.hh" #include "nix/fetchers/input-cache.hh" - -#include +#include "nix/expr/attr-set.hh" +#include "nix/expr/eval-error.hh" +#include "nix/expr/nixexpr.hh" +#include "nix/expr/symbol-table.hh" +#include "nix/expr/value.hh" +#include "nix/expr/value/context.hh" +#include "nix/fetchers/attrs.hh" +#include "nix/fetchers/registry.hh" +#include "nix/flake/flakeref.hh" +#include "nix/store/path.hh" +#include "nix/util/canon-path.hh" +#include "nix/util/configuration.hh" +#include "nix/util/error.hh" +#include "nix/util/experimental-features.hh" +#include "nix/util/file-system.hh" +#include "nix/util/fmt.hh" +#include "nix/util/hash.hh" +#include "nix/util/logging.hh" +#include "nix/util/pos-idx.hh" +#include "nix/util/pos-table.hh" +#include "nix/util/position.hh" +#include "nix/util/source-path.hh" +#include "nix/util/types.hh" +#include "nix/util/util.hh" namespace nix { +struct SourceAccessor; using namespace flake; using namespace fetchers; @@ -55,7 +100,7 @@ static void parseFlakeInputAttr(EvalState & state, const nix::Attr & attr, fetch #pragma GCC diagnostic ignored "-Wswitch-enum" switch (attr.value->type()) { case nString: - attrs.emplace(state.symbols[attr.name], attr.value->c_str()); + attrs.emplace(state.symbols[attr.name], std::string(attr.value->string_view())); break; case nBool: attrs.emplace(state.symbols[attr.name], Explicit{attr.value->boolean()}); @@ -141,7 +186,7 @@ static FlakeInput parseFlakeInput( parseFlakeInputs(state, attr.value, attr.pos, lockRootAttrPath, flakeDir, false).first; } else if (attr.name == sFollows) { expectType(state, nString, *attr.value, attr.pos); - auto follows(parseInputAttrPath(attr.value->c_str())); + auto follows(parseInputAttrPath(attr.value->string_view())); follows.insert(follows.begin(), lockRootAttrPath.begin(), lockRootAttrPath.end()); input.follows = follows; } else @@ -228,7 +273,7 @@ static Flake readFlake( if (auto description = vInfo.attrs()->get(state.s.description)) { expectType(state, nString, *description->value, description->pos); - flake.description = description->value->c_str(); + flake.description = description->value->string_view(); } auto sInputs = state.symbols.create("inputs"); @@ -245,12 +290,15 @@ static Flake readFlake( if (auto outputs = vInfo.attrs()->get(sOutputs)) { expectType(state, nFunction, *outputs->value, outputs->pos); - if (outputs->value->isLambda() && outputs->value->lambda().fun->hasFormals()) { - for (auto & formal : outputs->value->lambda().fun->formals->formals) { - if (formal.name != state.s.self) - flake.inputs.emplace( - state.symbols[formal.name], - FlakeInput{.ref = parseFlakeRef(state.fetchSettings, std::string(state.symbols[formal.name]))}); + if (outputs->value->isLambda()) { + if (auto formals = outputs->value->lambda().fun->getFormals()) { + for (auto & formal : formals->formals) { + if (formal.name != state.s.self) + flake.inputs.emplace( + state.symbols[formal.name], + FlakeInput{ + .ref = parseFlakeRef(state.fetchSettings, std::string(state.symbols[formal.name]))}); + } } } @@ -335,7 +383,7 @@ static Flake getFlake( { // Fetch a lazy tree first. auto cachedInput = - state.inputCache->getAccessor(state.fetchSettings, state.store, originalRef.input, useRegistries); + state.inputCache->getAccessor(state.fetchSettings, *state.store, originalRef.input, useRegistries); auto subdir = fetchers::maybeGetStrAttr(cachedInput.extraAttrs, "dir").value_or(originalRef.subdir); auto resolvedRef = FlakeRef(std::move(cachedInput.resolvedInput), subdir); @@ -352,7 +400,7 @@ static Flake getFlake( // FIXME: need to remove attrs that are invalidated by the changed input attrs, such as 'narHash'. newLockedRef.input.attrs.erase("narHash"); auto cachedInput2 = state.inputCache->getAccessor( - state.fetchSettings, state.store, newLockedRef.input, fetchers::UseRegistries::No); + state.fetchSettings, *state.store, newLockedRef.input, fetchers::UseRegistries::No); cachedInput.accessor = cachedInput2.accessor; lockedRef = FlakeRef(std::move(cachedInput2.lockedInput), newLockedRef.subdir); } @@ -468,8 +516,8 @@ lockFlake(const Settings & settings, EvalState & state, const FlakeRef & topRef, /* Get the overrides (i.e. attributes of the form 'inputs.nixops.inputs.nixpkgs.url = ...'). */ - std::function addOverrides; - addOverrides = [&](const FlakeInput & input, const InputAttrPath & prefix) { + auto addOverrides = + [&](this const auto & addOverrides, const FlakeInput & input, const InputAttrPath & prefix) -> void { for (auto & [idOverride, inputOverride] : input.overrides) { auto inputAttrPath(prefix); inputAttrPath.push_back(idOverride); @@ -745,7 +793,7 @@ lockFlake(const Settings & settings, EvalState & state, const FlakeRef & topRef, return {*resolvedPath, *input.ref}; } else { auto cachedInput = state.inputCache->getAccessor( - state.fetchSettings, state.store, input.ref->input, useRegistriesTop); + state.fetchSettings, *state.store, input.ref->input, useRegistriesInputs); auto resolvedRef = FlakeRef(std::move(cachedInput.resolvedInput), input.ref->subdir); @@ -907,7 +955,7 @@ static ref makeInternalFS() internalFS->setPathDisplay("«flakes-internal»", ""); internalFS->addFile( CanonPath("call-flake.nix"), -#include "call-flake.nix.gen.hh" +#include "call-flake.nix.gen.hh" // IWYU pragma: keep ); return internalFS; } @@ -948,7 +996,7 @@ void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & vRes) auto key = keyMap.find(node); assert(key != keyMap.end()); - override.alloc(state.symbols.create("dir")).mkString(CanonPath(subdir).rel()); + override.alloc(state.symbols.create("dir")).mkString(CanonPath(subdir).rel(), state.mem); overrides.alloc(state.symbols.create(key->second)).mkAttrs(override); } @@ -958,7 +1006,7 @@ void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & vRes) Value * vCallFlake = requireInternalFile(state, CanonPath("call-flake.nix")); auto vLocks = state.allocValue(); - vLocks->mkString(lockFileStr); + vLocks->mkString(lockFileStr, state.mem); auto vFetchFinalTree = get(state.internalPrimOps, "fetchFinalTree"); assert(vFetchFinalTree); @@ -967,9 +1015,7 @@ void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & vRes) state.callFunction(*vCallFlake, args, vRes, noPos); } -} // namespace flake - -std::optional LockedFlake::getFingerprint(ref store, const fetchers::Settings & fetchSettings) const +std::optional LockedFlake::getFingerprint(Store & store, const fetchers::Settings & fetchSettings) const { if (lockFile.isUnlocked(fetchSettings)) return std::nullopt; @@ -996,4 +1042,41 @@ std::optional LockedFlake::getFingerprint(ref store, const f Flake::~Flake() {} +ref openEvalCache(EvalState & state, ref lockedFlake) +{ + auto fingerprint = state.settings.useEvalCache && state.settings.pureEval + ? lockedFlake->getFingerprint(*state.store, state.fetchSettings) + : std::nullopt; + auto rootLoader = [&state, lockedFlake]() { + /* For testing whether the evaluation cache is + complete. */ + if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0") + throw Error("not everything is cached, but evaluation is not allowed"); + + auto vFlake = state.allocValue(); + callFlake(state, *lockedFlake, *vFlake); + + state.forceAttrs(*vFlake, noPos, "while parsing cached flake data"); + + auto aOutputs = vFlake->attrs()->get(state.symbols.create("outputs")); + assert(aOutputs); + + return aOutputs->value; + }; + + if (fingerprint) { + auto search = state.evalCaches.find(fingerprint.value()); + if (search == state.evalCaches.end()) { + search = state.evalCaches + .emplace(fingerprint.value(), make_ref(fingerprint, state, rootLoader)) + .first; + } + return search->second; + } else { + return make_ref(std::nullopt, state, rootLoader); + } +} + +} // namespace flake + } // namespace nix diff --git a/src/libflake/flakeref.cc b/src/libflake/flakeref.cc index 3c0531f14dd..d186db8ac85 100644 --- a/src/libflake/flakeref.cc +++ b/src/libflake/flakeref.cc @@ -1,11 +1,40 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + #include "nix/flake/flakeref.hh" -#include "nix/store/store-api.hh" #include "nix/util/url.hh" #include "nix/util/url-parts.hh" #include "nix/fetchers/fetchers.hh" +#include "nix/util/error.hh" +#include "nix/util/file-system.hh" +#include "nix/util/fmt.hh" +#include "nix/util/logging.hh" +#include "nix/util/strings.hh" +#include "nix/util/util.hh" +#include "nix/fetchers/attrs.hh" +#include "nix/fetchers/registry.hh" +#include "nix/store/outputs-spec.hh" +#include "nix/util/ref.hh" +#include "nix/util/types.hh" #include "nix/fetchers/fetch-settings.hh" namespace nix { +class Store; +struct SourceAccessor; + +namespace fetchers { +struct Settings; +} // namespace fetchers #if 0 // 'dir' path elements cannot start with a '.'. We also reject @@ -36,8 +65,8 @@ std::ostream & operator<<(std::ostream & str, const FlakeRef & flakeRef) return str; } -FlakeRef FlakeRef::resolve( - const fetchers::Settings & fetchSettings, ref store, fetchers::UseRegistries useRegistries) const +FlakeRef +FlakeRef::resolve(const fetchers::Settings & fetchSettings, Store & store, fetchers::UseRegistries useRegistries) const { auto [input2, extraAttrs] = lookupInRegistries(fetchSettings, store, input, useRegistries); return FlakeRef(std::move(input2), fetchers::maybeGetStrAttr(extraAttrs, "dir").value_or(subdir)); @@ -46,7 +75,7 @@ FlakeRef FlakeRef::resolve( FlakeRef parseFlakeRef( const fetchers::Settings & fetchSettings, const std::string & url, - const std::optional & baseDir, + const std::optional & baseDir, bool allowMissing, bool isFlake, bool preserveRelativePaths) @@ -74,7 +103,7 @@ fromParsedURL(const fetchers::Settings & fetchSettings, ParsedURL && parsedURL, std::pair parsePathFlakeRefWithFragment( const fetchers::Settings & fetchSettings, const std::string & url, - const std::optional & baseDir, + const std::optional & baseDir, bool allowMissing, bool isFlake, bool preserveRelativePaths) @@ -94,7 +123,7 @@ std::pair parsePathFlakeRefWithFragment( to 'baseDir'). If so, search upward to the root of the repo (i.e. the directory containing .git). */ - path = absPath(path, baseDir, true); + path = absPath(path, baseDir->string(), true); if (isFlake) { @@ -216,7 +245,7 @@ parseFlakeIdRef(const fetchers::Settings & fetchSettings, const std::string & ur std::optional> parseURLFlakeRef( const fetchers::Settings & fetchSettings, const std::string & url, - const std::optional & baseDir, + const std::optional & baseDir, bool isFlake) { try { @@ -225,7 +254,7 @@ std::optional> parseURLFlakeRef( /* Here we know that the path must not contain encoded '/' or NUL bytes. */ auto path = renderUrlPathEnsureLegal(parsed.path); if (!isAbsolute(path)) - parsed.path = splitString>(absPath(path, *baseDir), "/"); + parsed.path = splitString>(absPath(path, baseDir->string()), "/"); } return fromParsedURL(fetchSettings, std::move(parsed), isFlake); } catch (BadURL &) { @@ -236,7 +265,7 @@ std::optional> parseURLFlakeRef( std::pair parseFlakeRefWithFragment( const fetchers::Settings & fetchSettings, const std::string & url, - const std::optional & baseDir, + const std::optional & baseDir, bool allowMissing, bool isFlake, bool preserveRelativePaths) @@ -262,7 +291,7 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Settings & fetchSettings, const fet } std::pair, FlakeRef> -FlakeRef::lazyFetch(const fetchers::Settings & fetchSettings, ref store) const +FlakeRef::lazyFetch(const fetchers::Settings & fetchSettings, Store & store) const { auto [accessor, lockedInput] = input.getAccessor(fetchSettings, store); return {accessor, FlakeRef(std::move(lockedInput), subdir)}; @@ -320,7 +349,7 @@ FlakeRef FlakeRef::canonicalize() const std::tuple parseFlakeRefWithFragmentAndExtendedOutputsSpec( const fetchers::Settings & fetchSettings, const std::string & url, - const std::optional & baseDir, + const std::optional & baseDir, bool allowMissing, bool isFlake) { diff --git a/src/libflake/include/nix/flake/flake-primops.hh b/src/libflake/include/nix/flake/flake-primops.hh index 35a7128f4fd..b333e33d706 100644 --- a/src/libflake/include/nix/flake/flake-primops.hh +++ b/src/libflake/include/nix/flake/flake-primops.hh @@ -1,7 +1,12 @@ #pragma once #include "nix/expr/eval.hh" -#include "nix/flake/settings.hh" + +namespace nix { +namespace flake { +struct Settings; +} // namespace flake +} // namespace nix namespace nix::flake::primops { diff --git a/src/libflake/include/nix/flake/flake.hh b/src/libflake/include/nix/flake/flake.hh index 3c8acb2b72d..11c4cc688fb 100644 --- a/src/libflake/include/nix/flake/flake.hh +++ b/src/libflake/include/nix/flake/flake.hh @@ -5,6 +5,7 @@ #include "nix/flake/flakeref.hh" #include "nix/flake/lockfile.hh" #include "nix/expr/value.hh" +#include "nix/expr/eval-cache.hh" namespace nix { @@ -141,7 +142,7 @@ struct LockedFlake */ std::map, SourcePath> nodePaths; - std::optional getFingerprint(ref store, const fetchers::Settings & fetchSettings) const; + std::optional getFingerprint(Store & store, const fetchers::Settings & fetchSettings) const; }; struct LockFlags @@ -206,7 +207,7 @@ struct LockFlags /** * The path to a lock file to write to instead of the `flake.lock` file in the top-level flake */ - std::optional outputLockFilePath; + std::optional outputLockFilePath; /** * Flake inputs to be overridden. @@ -230,6 +231,11 @@ lockFlake(const Settings & settings, EvalState & state, const FlakeRef & flakeRe void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & v); +/** + * Open an evaluation cache for a flake. + */ +ref openEvalCache(EvalState & state, ref lockedFlake); + } // namespace flake void emitTreeAttrs( diff --git a/src/libflake/include/nix/flake/flakeref.hh b/src/libflake/include/nix/flake/flakeref.hh index d57d19e3104..629afab03b5 100644 --- a/src/libflake/include/nix/flake/flakeref.hh +++ b/src/libflake/include/nix/flake/flakeref.hh @@ -2,9 +2,11 @@ ///@file #include +#include +#include +#include +#include -#include "nix/util/types.hh" -#include "nix/fetchers/fetchers.hh" #include "nix/store/outputs-spec.hh" #include "nix/fetchers/registry.hh" @@ -12,6 +14,10 @@ namespace nix { class Store; +namespace fetchers { +struct Settings; +} // namespace fetchers + typedef std::string FlakeId; /** @@ -66,13 +72,12 @@ struct FlakeRef FlakeRef resolve( const fetchers::Settings & fetchSettings, - ref store, + Store & store, fetchers::UseRegistries useRegistries = fetchers::UseRegistries::All) const; static FlakeRef fromAttrs(const fetchers::Settings & fetchSettings, const fetchers::Attrs & attrs); - std::pair, FlakeRef> - lazyFetch(const fetchers::Settings & fetchSettings, ref store) const; + std::pair, FlakeRef> lazyFetch(const fetchers::Settings & fetchSettings, Store & store) const; /** * Canonicalize a flakeref for the purpose of comparing "old" and @@ -89,7 +94,7 @@ std::ostream & operator<<(std::ostream & str, const FlakeRef & flakeRef); FlakeRef parseFlakeRef( const fetchers::Settings & fetchSettings, const std::string & url, - const std::optional & baseDir = {}, + const std::optional & baseDir = {}, bool allowMissing = false, bool isFlake = true, bool preserveRelativePaths = false); @@ -100,7 +105,7 @@ FlakeRef parseFlakeRef( std::pair parseFlakeRefWithFragment( const fetchers::Settings & fetchSettings, const std::string & url, - const std::optional & baseDir = {}, + const std::optional & baseDir = {}, bool allowMissing = false, bool isFlake = true, bool preserveRelativePaths = false); @@ -111,7 +116,7 @@ std::pair parseFlakeRefWithFragment( std::tuple parseFlakeRefWithFragmentAndExtendedOutputsSpec( const fetchers::Settings & fetchSettings, const std::string & url, - const std::optional & baseDir = {}, + const std::optional & baseDir = {}, bool allowMissing = false, bool isFlake = true); diff --git a/src/libflake/include/nix/flake/settings.hh b/src/libflake/include/nix/flake/settings.hh index d8ed4a91a75..05b36f5b779 100644 --- a/src/libflake/include/nix/flake/settings.hh +++ b/src/libflake/include/nix/flake/settings.hh @@ -1,9 +1,10 @@ #pragma once ///@file -#include "nix/util/configuration.hh" - #include +#include + +#include "nix/util/configuration.hh" namespace nix { // Forward declarations diff --git a/src/libflake/include/nix/flake/url-name.hh b/src/libflake/include/nix/flake/url-name.hh index b95d2dff616..d313db33bb9 100644 --- a/src/libflake/include/nix/flake/url-name.hh +++ b/src/libflake/include/nix/flake/url-name.hh @@ -1,9 +1,8 @@ -#include "nix/util/url.hh" -#include "nix/util/url-parts.hh" -#include "nix/util/util.hh" -#include "nix/util/split.hh" +#include +#include namespace nix { +struct ParsedURL; /** * Try to extract a reasonably unique and meaningful, human-readable diff --git a/src/libflake/lockfile.cc b/src/libflake/lockfile.cc index 1553a54430e..83a692b9871 100644 --- a/src/libflake/lockfile.cc +++ b/src/libflake/lockfile.cc @@ -1,15 +1,49 @@ -#include "nix/fetchers/fetch-settings.hh" -#include "nix/flake/settings.hh" -#include "nix/flake/lockfile.hh" -#include "nix/store/store-api.hh" -#include "nix/util/strings.hh" - +#include +#include +#include +#include +#include +#include +#include #include #include - -#include #include -#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "nix/fetchers/fetch-settings.hh" +#include "nix/flake/lockfile.hh" +#include "nix/util/strings.hh" +#include "nix/fetchers/attrs.hh" +#include "nix/fetchers/fetchers.hh" +#include "nix/flake/flakeref.hh" +#include "nix/store/path.hh" +#include "nix/util/ansicolor.hh" +#include "nix/util/configuration.hh" +#include "nix/util/error.hh" +#include "nix/util/fmt.hh" +#include "nix/util/hash.hh" +#include "nix/util/logging.hh" +#include "nix/util/ref.hh" +#include "nix/util/types.hh" +#include "nix/util/util.hh" + +namespace nix { +class Store; +} // namespace nix namespace nix::flake { @@ -44,8 +78,8 @@ LockedNode::LockedNode(const fetchers::Settings & fetchSettings, const nlohmann: if (!lockedRef.input.isLocked(fetchSettings) && !lockedRef.input.isRelative()) { if (lockedRef.input.getNarHash()) warn( - "Lock file entry '%s' is unlocked (e.g. lacks a Git revision) but does have a NAR hash. " - "This is deprecated since such inputs are verifiable but may not be reproducible.", + "Lock file entry '%s' is unlocked (e.g. lacks a Git revision) but is checked by NAR hash. " + "This is not reproducible and will break after garbage collection or when shared.", lockedRef.to_string()); else throw Error( @@ -114,11 +148,10 @@ LockFile::LockFile(const fetchers::Settings & fetchSettings, std::string_view co if (version < 5 || version > 7) throw Error("lock file '%s' has unsupported version %d", path, version); - std::map> nodeMap; - - std::function getInputs; + std::string rootKey = json["root"]; + std::map> nodeMap{{rootKey, root}}; - getInputs = [&](Node & node, const nlohmann::json & jsonNode) { + [&](this const auto & getInputs, Node & node, const nlohmann::json & jsonNode) { if (jsonNode.find("inputs") == jsonNode.end()) return; for (auto & i : jsonNode["inputs"].items()) { @@ -146,11 +179,7 @@ LockFile::LockFile(const fetchers::Settings & fetchSettings, std::string_view co throw Error("lock file contains cycle to root node"); } } - }; - - std::string rootKey = json["root"]; - nodeMap.insert_or_assign(rootKey, root); - getInputs(*root, json["nodes"][rootKey]); + }(*root, json["nodes"][rootKey]); // FIXME: check that there are no cycles in version >= 7. Cycles // between inputs are only possible using 'follows' indirections. @@ -164,9 +193,7 @@ std::pair LockFile::toJSON() const KeyMap nodeKeys; boost::unordered_flat_set keys; - std::function node)> dumpNode; - - dumpNode = [&](std::string key, ref node) -> std::string { + auto dumpNode = [&](this auto & dumpNode, std::string key, ref node) -> std::string { auto k = nodeKeys.find(node); if (k != nodeKeys.end()) return k->second; @@ -245,17 +272,13 @@ std::optional LockFile::isUnlocked(const fetchers::Settings & fetchSet { std::set> nodes; - std::function node)> visit; - - visit = [&](ref node) { + [&](this const auto & visit, ref node) { if (!nodes.insert(node).second) return; for (auto & i : node->inputs) if (auto child = std::get_if<0>(&i.second)) visit(*child); - }; - - visit(root); + }(root); /* Return whether the input is either locked, or, if `allow-dirty-locks` is enabled, it has a NAR hash. In the @@ -301,9 +324,7 @@ std::map LockFile::getAllInputs() const std::set> done; std::map res; - std::function node)> recurse; - - recurse = [&](const InputAttrPath & prefix, ref node) { + [&](this const auto & recurse, const InputAttrPath & prefix, ref node) { if (!done.insert(node).second) return; @@ -314,9 +335,7 @@ std::map LockFile::getAllInputs() const if (auto child = std::get_if<0>(&input)) recurse(inputAttrPath, *child); } - }; - - recurse({}, root); + }({}, root); return res; } diff --git a/src/libflake/meson.build b/src/libflake/meson.build index 3bd04fcf415..58916ecd9ab 100644 --- a/src/libflake/meson.build +++ b/src/libflake/meson.build @@ -29,7 +29,6 @@ nlohmann_json = dependency('nlohmann_json', version : '>= 3.9') deps_public += nlohmann_json subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') subdir('nix-meson-build-support/generate-header') diff --git a/src/libflake/settings.cc b/src/libflake/settings.cc index e77bded306a..52fa1b49d51 100644 --- a/src/libflake/settings.cc +++ b/src/libflake/settings.cc @@ -1,5 +1,9 @@ +#include + #include "nix/flake/settings.hh" #include "nix/flake/flake-primops.hh" +#include "nix/expr/eval-settings.hh" +#include "nix/expr/eval.hh" namespace nix::flake { diff --git a/src/libflake/url-name.cc b/src/libflake/url-name.cc index 3bba3692eb1..f4b5c6a7f2f 100644 --- a/src/libflake/url-name.cc +++ b/src/libflake/url-name.cc @@ -1,6 +1,10 @@ -#include "nix/flake/url-name.hh" #include -#include +#include +#include + +#include "nix/flake/url-name.hh" +#include "nix/util/strings.hh" +#include "nix/util/url.hh" namespace nix { diff --git a/src/libmain-c/meson.build b/src/libmain-c/meson.build index 2ac2b799bca..36332fdb70a 100644 --- a/src/libmain-c/meson.build +++ b/src/libmain-c/meson.build @@ -28,7 +28,6 @@ deps_public_maybe_subproject = [ subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_main.cc', diff --git a/src/libmain/include/nix/main/shared.hh b/src/libmain/include/nix/main/shared.hh index 8b84ae47bd6..800018290f6 100644 --- a/src/libmain/include/nix/main/shared.hh +++ b/src/libmain/include/nix/main/shared.hh @@ -91,8 +91,6 @@ extern volatile ::sig_atomic_t blockInt; /* GC helpers. */ -std::string showBytes(uint64_t bytes); - struct GCResults; struct PrintFreed diff --git a/src/libmain/meson.build b/src/libmain/meson.build index 21bfbea3e24..2ac59924e59 100644 --- a/src/libmain/meson.build +++ b/src/libmain/meson.build @@ -53,7 +53,6 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'common-args.cc', diff --git a/src/libmain/plugin.cc b/src/libmain/plugin.cc index 321fd6a15de..4755ba24bfa 100644 --- a/src/libmain/plugin.cc +++ b/src/libmain/plugin.cc @@ -10,30 +10,30 @@ namespace nix { -struct PluginFilesSetting : public BaseSetting +struct PluginFilesSetting : public BaseSetting> { bool pluginsLoaded = false; PluginFilesSetting( Config * options, - const Paths & def, + const std::list & def, const std::string & name, const std::string & description, const StringSet & aliases = {}) - : BaseSetting(def, true, name, description, aliases) + : BaseSetting>(def, true, name, description, aliases) { options->addSetting(this); } - Paths parse(const std::string & str) const override; + std::list parse(const std::string & str) const override; }; -Paths PluginFilesSetting::parse(const std::string & str) const +std::list PluginFilesSetting::parse(const std::string & str) const { if (pluginsLoaded) throw UsageError( "plugin-files set after plugins were loaded, you may need to move the flag before the subcommand"); - return BaseSetting::parse(str); + return BaseSetting>::parse(str); } struct PluginSettings : Config diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc index e81f5913536..05fd8982786 100644 --- a/src/libmain/progress-bar.cc +++ b/src/libmain/progress-bar.cc @@ -477,12 +477,11 @@ class ProgressBar : public Logger std::string getStatus(State & state) { - auto MiB = 1024.0 * 1024.0; - std::string res; auto renderActivity = - [&](ActivityType type, const std::string & itemFmt, const std::string & numberFmt = "%d", double unit = 1) { + [&] [[nodiscard]] ( + ActivityType type, const std::string & itemFmt, const std::string & numberFmt = "%d", double unit = 1) { auto & act = state.activitiesByType[type]; uint64_t done = act.done, expected = act.done, running = 0, failed = act.failed; for (auto & j : act.its) { @@ -526,20 +525,82 @@ class ProgressBar : public Logger return s; }; + auto renderSizeActivity = [&] [[nodiscard]] (ActivityType type, const std::string & itemFmt = "%s") { + auto & act = state.activitiesByType[type]; + uint64_t done = act.done, expected = act.done, running = 0, failed = act.failed; + for (auto & j : act.its) { + done += j.second->done; + expected += j.second->expected; + running += j.second->running; + failed += j.second->failed; + } + + expected = std::max(expected, act.expected); + + std::optional commonUnit; + std::string s; + + if (running || done || expected || failed) { + if (running) + if (expected != 0) { + commonUnit = getCommonSizeUnit({(int64_t) running, (int64_t) done, (int64_t) expected}); + s = + fmt(ANSI_BLUE "%s" ANSI_NORMAL "/" ANSI_GREEN "%s" ANSI_NORMAL "/%s", + commonUnit ? renderSizeWithoutUnit(running, *commonUnit) : renderSize(running), + commonUnit ? renderSizeWithoutUnit(done, *commonUnit) : renderSize(done), + commonUnit ? renderSizeWithoutUnit(expected, *commonUnit) : renderSize(expected)); + } else { + commonUnit = getCommonSizeUnit({(int64_t) running, (int64_t) done}); + s = + fmt(ANSI_BLUE "%s" ANSI_NORMAL "/" ANSI_GREEN "%s" ANSI_NORMAL, + commonUnit ? renderSizeWithoutUnit(running, *commonUnit) : renderSize(running), + commonUnit ? renderSizeWithoutUnit(done, *commonUnit) : renderSize(done)); + } + else if (expected != done) + if (expected != 0) { + commonUnit = getCommonSizeUnit({(int64_t) done, (int64_t) expected}); + s = + fmt(ANSI_GREEN "%s" ANSI_NORMAL "/%s", + commonUnit ? renderSizeWithoutUnit(done, *commonUnit) : renderSize(done), + commonUnit ? renderSizeWithoutUnit(expected, *commonUnit) : renderSize(expected)); + } else { + commonUnit = getSizeUnit(done); + s = fmt(ANSI_GREEN "%s" ANSI_NORMAL, renderSizeWithoutUnit(done, *commonUnit)); + } + else { + commonUnit = getSizeUnit(done); + s = fmt(done ? ANSI_GREEN "%s" ANSI_NORMAL : "%s", renderSizeWithoutUnit(done, *commonUnit)); + } + + if (commonUnit) + s = fmt("%s %siB", s, getSizeUnitSuffix(*commonUnit)); + + s = fmt(itemFmt, s); + + if (failed) + s += fmt(" (" ANSI_RED "%s failed" ANSI_NORMAL ")", renderSize(failed)); + } + + return s; + }; + + auto maybeAppendToResult = [&](std::string_view s) { + if (s.empty()) + return; + if (!res.empty()) + res += ", "; + res += s; + }; + auto showActivity = [&](ActivityType type, const std::string & itemFmt, const std::string & numberFmt = "%d", double unit = 1) { - auto s = renderActivity(type, itemFmt, numberFmt, unit); - if (s.empty()) - return; - if (!res.empty()) - res += ", "; - res += s; + maybeAppendToResult(renderActivity(type, itemFmt, numberFmt, unit)); }; showActivity(actBuilds, "%s built"); auto s1 = renderActivity(actCopyPaths, "%s copied"); - auto s2 = renderActivity(actCopyPath, "%s MiB", "%.1f", MiB); + auto s2 = renderSizeActivity(actCopyPath); if (!s1.empty() || !s2.empty()) { if (!res.empty()) @@ -555,12 +616,12 @@ class ProgressBar : public Logger } } - showActivity(actFileTransfer, "%s MiB DL", "%.1f", MiB); + maybeAppendToResult(renderSizeActivity(actFileTransfer, "%s DL")); { auto s = renderActivity(actOptimiseStore, "%s paths optimised"); if (s != "") { - s += fmt(", %.1f MiB / %d inodes freed", state.bytesLinked / MiB, state.filesLinked); + s += fmt(", %s / %d inodes freed", renderSize(state.bytesLinked), state.filesLinked); if (!res.empty()) res += ", "; res += s; diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 065c90c725e..cac9e38ad85 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -6,6 +6,7 @@ #include "nix/main/loggers.hh" #include "nix/main/progress-bar.hh" #include "nix/util/signals.hh" +#include "nix/util/util.hh" #include #include @@ -64,18 +65,19 @@ void printMissing(ref store, const MissingPaths & missing, Verbosity lvl) } if (!missing.willSubstitute.empty()) { - const float downloadSizeMiB = missing.downloadSize / (1024.f * 1024.f); - const float narSizeMiB = missing.narSize / (1024.f * 1024.f); if (missing.willSubstitute.size() == 1) { printMsg( - lvl, "this path will be fetched (%.2f MiB download, %.2f MiB unpacked):", downloadSizeMiB, narSizeMiB); + lvl, + "this path will be fetched (%s download, %s unpacked):", + renderSize(missing.downloadSize), + renderSize(missing.narSize)); } else { printMsg( lvl, - "these %d paths will be fetched (%.2f MiB download, %.2f MiB unpacked):", + "these %d paths will be fetched (%s download, %s unpacked):", missing.willSubstitute.size(), - downloadSizeMiB, - narSizeMiB); + renderSize(missing.downloadSize), + renderSize(missing.narSize)); } std::vector willSubstituteSorted = {}; std::for_each(missing.willSubstitute.begin(), missing.willSubstitute.end(), [&](const StorePath & p) { @@ -307,7 +309,7 @@ void printVersion(const std::string & programName) std::cout << "System type: " << settings.thisSystem << "\n"; std::cout << "Additional system types: " << concatStringsSep(", ", settings.extraPlatforms.get()) << "\n"; std::cout << "Features: " << concatStringsSep(", ", cfg) << "\n"; - std::cout << "System configuration file: " << settings.nixConfDir + "/nix.conf" << "\n"; + std::cout << "System configuration file: " << (settings.nixConfDir / "nix.conf") << "\n"; std::cout << "User configuration files: " << concatStringsSep(":", settings.nixUserConfFiles) << "\n"; std::cout << "Store directory: " << settings.nixStore << "\n"; std::cout << "State directory: " << settings.nixStateDir << "\n"; @@ -324,16 +326,7 @@ int handleExceptions(const std::string & programName, std::function fun) std::string error = ANSI_RED "error:" ANSI_NORMAL " "; try { - try { - fun(); - } catch (...) { - /* Subtle: we have to make sure that any `interrupted' - condition is discharged before we reach printMsg() - below, since otherwise it will throw an (uncaught) - exception. */ - setInterruptThrown(); - throw; - } + fun(); } catch (Exit & e) { return e.status; } catch (UsageError & e) { @@ -411,7 +404,7 @@ RunPager::~RunPager() PrintFreed::~PrintFreed() { if (show) - std::cout << fmt("%d store paths deleted, %s freed\n", results.paths.size(), showBytes(results.bytesFreed)); + std::cout << fmt("%d store paths deleted, %s freed\n", results.paths.size(), renderSize(results.bytesFreed)); } } // namespace nix diff --git a/src/libstore-c/meson.build b/src/libstore-c/meson.build index a92771efc1d..c81235bf16d 100644 --- a/src/libstore-c/meson.build +++ b/src/libstore-c/meson.build @@ -26,7 +26,6 @@ deps_public_maybe_subproject = [ subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_store.cc', @@ -36,6 +35,8 @@ include_dirs = [ include_directories('.') ] headers = files( 'nix_api_store.h', + 'nix_api_store/derivation.h', + 'nix_api_store/store_path.h', ) # TODO don't install this once tests don't use it and/or move the header into `libstore`, non-`c` diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index 518e0bb8a79..80fcf10cb0d 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -1,3 +1,6 @@ +#include +#include + #include "nix_api_store.h" #include "nix_api_store_internal.h" #include "nix_api_util.h" @@ -7,6 +10,8 @@ #include "nix/store/store-api.hh" #include "nix/store/store-open.hh" #include "nix/store/build-result.hh" +#include "nix/store/local-fs-store.hh" +#include "nix/util/base-nix-32.hh" #include "nix/store/globals.hh" @@ -109,7 +114,8 @@ nix_err nix_store_real_path( if (context) context->last_err_code = NIX_OK; try { - auto res = store->ptr->toRealPath(path->path); + auto store2 = store->ptr.dynamic_pointer_cast(); + auto res = store2 ? store2->toRealPath(path->path) : store->ptr->printStorePath(path->path); return call_nix_get_string_callback(res, callback, user_data); } NIXC_CATCH_ERRS @@ -126,6 +132,36 @@ StorePath * nix_store_parse_path(nix_c_context * context, Store * store, const c NIXC_CATCH_ERRS_NULL } +nix_err nix_store_get_fs_closure( + nix_c_context * context, + Store * store, + const StorePath * store_path, + bool flip_direction, + bool include_outputs, + bool include_derivers, + void * userdata, + void (*callback)(nix_c_context * context, void * userdata, const StorePath * store_path)) +{ + if (context) + context->last_err_code = NIX_OK; + try { + const auto nixStore = store->ptr; + + nix::StorePathSet set; + nixStore->computeFSClosure(store_path->path, set, flip_direction, include_outputs, include_derivers); + + if (callback) { + for (const auto & path : set) { + const StorePath tmp{path}; + callback(context, userdata, &tmp); + if (context && context->last_err_code != NIX_OK) + return context->last_err_code; + } + } + } + NIXC_CATCH_ERRS +} + nix_err nix_store_realise( nix_c_context * context, Store * store, @@ -143,6 +179,14 @@ nix_err nix_store_realise( const auto nixStore = store->ptr; auto results = nixStore->buildPathsWithResults(paths, nix::bmNormal, nixStore); + assert(results.size() == 1); + + // Check if any builds failed + for (auto & result : results) { + if (auto * failureP = result.tryGetFailure()) + failureP->rethrow(); + } + if (callback) { for (const auto & result : results) { if (auto * success = result.tryGetSuccess()) { @@ -175,26 +219,73 @@ void nix_derivation_free(nix_derivation * drv) StorePath * nix_store_path_clone(const StorePath * p) { - return new StorePath{p->path}; + try { + return new StorePath{p->path}; + } catch (...) { + return nullptr; + } } -nix_derivation * nix_derivation_clone(const nix_derivation * d) +} // extern "C" + +template +static auto to_cpp_array(const uint8_t (&r)[S]) { - return new nix_derivation{d->drv, d->store}; + return reinterpret_cast &>(r); } -nix_derivation * nix_derivation_from_json(nix_c_context * context, Store * store, const char * json) +extern "C" { + +nix_err +nix_store_path_hash(nix_c_context * context, const StorePath * store_path, nix_store_path_hash_part * hash_part_out) +{ + try { + auto hashPart = store_path->path.hashPart(); + // Decode from Nix32 (base32) encoding to raw bytes + auto decoded = nix::BaseNix32::decode(hashPart); + + assert(decoded.size() == sizeof(hash_part_out->bytes)); + std::memcpy(hash_part_out->bytes, decoded.data(), sizeof(hash_part_out->bytes)); + return NIX_OK; + } + NIXC_CATCH_ERRS +} + +StorePath * nix_store_create_from_parts( + nix_c_context * context, const nix_store_path_hash_part * hash, const char * name, size_t name_len) { if (context) context->last_err_code = NIX_OK; try { - auto drv = static_cast(nlohmann::json::parse(json)); + // Encode the 20 raw bytes to Nix32 (base32) format + auto hashStr = nix::BaseNix32::encode(std::span{to_cpp_array(hash->bytes)}); - auto drvPath = nix::writeDerivation(*store->ptr, drv, nix::NoRepair, /* read only */ true); + // Construct the store path basename: - + std::string baseName; + baseName += hashStr; + baseName += "-"; + baseName += std::string_view{name, name_len}; - drv.checkInvariants(*store->ptr, drvPath); + return new StorePath{nix::StorePath(std::move(baseName))}; + } + NIXC_CATCH_ERRS_NULL +} - return new nix_derivation{drv}; +nix_derivation * nix_derivation_clone(const nix_derivation * d) +{ + try { + return new nix_derivation{d->drv}; + } catch (...) { + return nullptr; + } +} + +nix_derivation * nix_derivation_from_json(nix_c_context * context, Store * store, const char * json) +{ + if (context) + context->last_err_code = NIX_OK; + try { + return new nix_derivation{nix::Derivation::parseJsonAndValidate(*store->ptr, nlohmann::json::parse(json))}; } NIXC_CATCH_ERRS_NULL } @@ -209,7 +300,7 @@ nix_err nix_derivation_make_outputs( if (context) context->last_err_code = NIX_OK; try { - auto drv = nix::Derivation::fromJSON(nlohmann::json::parse(json)); + auto drv = nix::Derivation::parseJsonAndValidate(*store->ptr, nlohmann::json::parse(json)); auto hashesModulo = hashDerivationModulo(*store->ptr, drv, true); for (auto & output : drv.outputs) { @@ -224,6 +315,20 @@ nix_err nix_derivation_make_outputs( NIXC_CATCH_ERRS } +nix_err nix_derivation_to_json( + nix_c_context * context, const nix_derivation * drv, nix_get_string_callback callback, void * userdata) +{ + if (context) + context->last_err_code = NIX_OK; + try { + auto result = static_cast(drv->drv).dump(); + if (callback) { + callback(result.data(), result.size(), userdata); + } + } + NIXC_CATCH_ERRS +} + StorePath * nix_add_derivation(nix_c_context * context, Store * store, nix_derivation * derivation) { if (context) @@ -248,34 +353,14 @@ nix_err nix_store_copy_closure(nix_c_context * context, Store * srcStore, Store NIXC_CATCH_ERRS } -nix_err nix_store_get_fs_closure( - nix_c_context * context, - Store * store, - const StorePath * store_path, - bool flip_direction, - bool include_outputs, - bool include_derivers, - void * userdata, - void (*callback)(nix_c_context * context, void * userdata, const StorePath * store_path)) +nix_derivation * nix_store_drv_from_store_path(nix_c_context * context, Store * store, const StorePath * path) { if (context) context->last_err_code = NIX_OK; try { - const auto nixStore = store->ptr; - - nix::StorePathSet set; - nixStore->computeFSClosure(store_path->path, set, flip_direction, include_outputs, include_derivers); - - if (callback) { - for (const auto & path : set) { - const StorePath tmp{path}; - callback(context, userdata, &tmp); - if (context && context->last_err_code != NIX_OK) - return context->last_err_code; - } - } + return new nix_derivation{store->ptr->derivationFromPath(path->path)}; } - NIXC_CATCH_ERRS + NIXC_CATCH_ERRS_NULL } nix_err nix_store_drv_from_path( @@ -309,7 +394,7 @@ nix_err nix_store_query_path_info( try { auto info = store->ptr->queryPathInfo(store_path->path); if (callback) { - auto result = info->toJSON(store->ptr->config, true, nix::HashFormat::Nix32).dump(); + auto result = info->toJSON(&store->ptr->config, true, nix::PathInfoJsonFormat::V1).dump(); callback(result.data(), result.size(), userdata); } } @@ -335,11 +420,9 @@ nix_err nix_store_build_paths( auto results = store->ptr->buildPathsWithResults(derived_paths); for (auto & result : results) { - if (callback) { - nlohmann::json json; - nix::to_json(json, result); - callback(userdata, result.path.to_string(store->ptr->config).c_str(), json.dump().c_str()); - } + if (callback) + callback( + userdata, result.path.to_string(store->ptr->config).c_str(), nlohmann::json(result).dump().c_str()); } } NIXC_CATCH_ERRS @@ -370,18 +453,4 @@ nix_err nix_derivation_get_outputs_and_optpaths( NIXC_CATCH_ERRS } -nix_err nix_derivation_to_json( - nix_c_context * context, const nix_derivation * drv, nix_get_string_callback callback, void * userdata) -{ - if (context) - context->last_err_code = NIX_OK; - try { - auto result = drv->drv.toJSON().dump(); - if (callback) { - callback(result.data(), result.size(), userdata); - } - } - NIXC_CATCH_ERRS -} - } // extern "C" diff --git a/src/libstore-c/nix_api_store.h b/src/libstore-c/nix_api_store.h index 7b83be656ec..5e542b0caaf 100644 --- a/src/libstore-c/nix_api_store.h +++ b/src/libstore-c/nix_api_store.h @@ -12,6 +12,8 @@ */ #include "nix_api_util.h" +#include "nix_api_store/store_path.h" +#include "nix_api_store/derivation.h" #include #ifdef __cplusplus @@ -21,10 +23,6 @@ extern "C" { /** @brief Reference to a Nix store */ typedef struct Store Store; -/** @brief Nix store path */ -typedef struct StorePath StorePath; -/** @brief Nix Derivation */ -typedef struct nix_derivation nix_derivation; /** * @brief Initializes the Nix store library @@ -108,7 +106,7 @@ nix_err nix_store_get_storedir(nix_c_context * context, Store * store, nix_get_string_callback callback, void * user_data); /** - * @brief Parse a Nix store path into a StorePath + * @brief Parse a Nix store path that includes the store dir into a StorePath * * @note Don't forget to free this path using nix_store_path_free()! * @param[out] context Optional, stores error information @@ -118,30 +116,6 @@ nix_store_get_storedir(nix_c_context * context, Store * store, nix_get_string_ca */ StorePath * nix_store_parse_path(nix_c_context * context, Store * store, const char * path); -/** - * @brief Get the path name (e.g. "name" in /nix/store/...-name) - * - * @param[in] store_path the path to get the name from - * @param[in] callback called with the name - * @param[in] user_data arbitrary data, passed to the callback when it's called. - */ -void nix_store_path_name(const StorePath * store_path, nix_get_string_callback callback, void * user_data); - -/** - * @brief Copy a StorePath - * - * @param[in] p the path to copy - * @return a new StorePath - */ -StorePath * nix_store_path_clone(const StorePath * p); - -/** @brief Deallocate a StorePath - * - * Does not fail. - * @param[in] p the path to free - */ -void nix_store_path_free(StorePath * p); - /** * @brief Check if a StorePath is valid (i.e. that corresponding store object and its closure of references exists in * the store) @@ -186,6 +160,8 @@ nix_err nix_store_real_path( * @param[in] path Path to build * @param[in] userdata data to pass to every callback invocation * @param[in] callback called for every realised output + * @return NIX_OK if the build succeeded, or an error code if the build/scheduling/outputs/copying/etc failed. + * On error, the callback is never invoked and error information is stored in context. */ nix_err nix_store_realise( nix_c_context * context, @@ -212,9 +188,16 @@ nix_store_get_version(nix_c_context * context, Store * store, nix_get_string_cal /** * @brief Create a `nix_derivation` from a JSON representation of that derivation. * + * @note Unlike `nix_derivation_to_json`, this needs a `Store`. This is because + * over time we expect the internal representation of derivations in Nix to + * differ from accepted derivation formats. The store argument is here to help + * any logic needed to convert from JSON to the internal representation, in + * excess of just parsing. + * * @param[out] context Optional, stores error information. * @param[in] store nix store reference. * @param[in] json JSON of the derivation as a string. + * @return A new derivation, or NULL on error. Free with `nix_derivation_free` when done using the `nix_derivation`. */ nix_derivation * nix_derivation_from_json(nix_c_context * context, Store * store, const char * json); @@ -243,22 +226,6 @@ nix_err nix_derivation_make_outputs( */ StorePath * nix_add_derivation(nix_c_context * context, Store * store, nix_derivation * derivation); -/** - * @brief Deallocate a `nix_derivation` - * - * Does not fail. - * @param[in] drv the derivation to free - */ -void nix_derivation_free(nix_derivation * drv); - -/** - * @brief Copy a `nix_derivation` - * - * @param[in] d the derivation to copy - * @return a new `nix_derivation` - */ -nix_derivation * nix_derivation_clone(const nix_derivation * d); - /** * @brief Copy the closure of `path` from `srcStore` to `dstStore`. * @@ -277,10 +244,15 @@ nix_err nix_store_copy_closure(nix_c_context * context, Store * srcStore, Store * @param[out] context Optional, stores error information * @param[in] store nix store reference * @param[in] store_path The path to compute from - * @param[in] flip_direction - * @param[in] include_outputs - * @param[in] include_derivers - * @param[in] callback The function to call for every store path + * @param[in] flip_direction If false, compute the forward closure (paths referenced by any store path in the closure). + * If true, compute the backward closure (paths that reference any store path in the closure). + * @param[in] include_outputs If flip_direction is false: for any derivation in the closure, include its outputs. + * If flip_direction is true: for any output in the closure, include derivations that produce + * it. + * @param[in] include_derivers If flip_direction is false: for any output in the closure, include the derivation that + * produced it. + * If flip_direction is true: for any derivation in the closure, include its outputs. + * @param[in] callback The function to call for every store path, in no particular order * @param[in] userdata The userdata to pass to the callback */ nix_err nix_store_get_fs_closure( @@ -296,6 +268,14 @@ nix_err nix_store_get_fs_closure( /** * @brief Returns the derivation associated with the store path * + * @param[out] context Optional, stores error information + * @param[in] store The nix store + * @param[in] path The nix store path + * @return A new derivation, or NULL on error. Free with `nix_derivation_free` when done using the `nix_derivation`. + */ +nix_derivation * nix_store_drv_from_store_path(nix_c_context * context, Store * store, const StorePath * path); + +/** * @note The callback borrows the Derivation only for the duration of the call. * * @param[out] context Optional, stores error information diff --git a/src/libstore-c/nix_api_store/derivation.h b/src/libstore-c/nix_api_store/derivation.h new file mode 100644 index 00000000000..239ffd52f8b --- /dev/null +++ b/src/libstore-c/nix_api_store/derivation.h @@ -0,0 +1,57 @@ +#ifndef NIX_API_STORE_DERIVATION_H +#define NIX_API_STORE_DERIVATION_H +/** + * @defgroup libstore_derivation Derivation + * @ingroup libstore + * @brief Derivation operations that don't require a Store + * @{ + */ +/** @file + * @brief Derivation operations + */ + +#include "nix_api_util.h" + +#ifdef __cplusplus +extern "C" { +#endif +// cffi start + +/** @brief Nix Derivation */ +typedef struct nix_derivation nix_derivation; + +/** + * @brief Copy a `nix_derivation` + * + * @param[in] d the derivation to copy + * @return a new `nix_derivation` + */ +nix_derivation * nix_derivation_clone(const nix_derivation * d); + +/** + * @brief Deallocate a `nix_derivation` + * + * Does not fail. + * @param[in] drv the derivation to free + */ +void nix_derivation_free(nix_derivation * drv); + +/** + * @brief Gets the derivation as a JSON string + * + * @param[out] context Optional, stores error information + * @param[in] drv The derivation + * @param[in] callback Called with the JSON string + * @param[in] userdata Arbitrary data passed to the callback + */ +nix_err nix_derivation_to_json( + nix_c_context * context, const nix_derivation * drv, nix_get_string_callback callback, void * userdata); + +// cffi end +#ifdef __cplusplus +} +#endif +/** + * @} + */ +#endif // NIX_API_STORE_DERIVATION_H diff --git a/src/libstore-c/nix_api_store/store_path.h b/src/libstore-c/nix_api_store/store_path.h new file mode 100644 index 00000000000..1aa9bcac7e4 --- /dev/null +++ b/src/libstore-c/nix_api_store/store_path.h @@ -0,0 +1,96 @@ +#ifndef NIX_API_STORE_STORE_PATH_H +#define NIX_API_STORE_STORE_PATH_H +/** + * @defgroup libstore_storepath StorePath + * @ingroup libstore + * @brief Store path operations that don't require a Store + * @{ + */ +/** @file + * @brief Store path operations + */ + +#include +#include + +#include "nix_api_util.h" + +#ifdef __cplusplus +extern "C" { +#endif +// cffi start + +/** @brief Nix store path */ +typedef struct StorePath StorePath; + +/** + * @brief Copy a StorePath + * + * @param[in] p the path to copy + * @return a new StorePath + */ +StorePath * nix_store_path_clone(const StorePath * p); + +/** @brief Deallocate a StorePath + * + * Does not fail. + * @param[in] p the path to free + */ +void nix_store_path_free(StorePath * p); + +/** + * @brief Get the path name (e.g. "" in /nix/store/-) + * + * @param[in] store_path the path to get the name from + * @param[in] callback called with the name + * @param[in] user_data arbitrary data, passed to the callback when it's called. + */ +void nix_store_path_name(const StorePath * store_path, nix_get_string_callback callback, void * user_data); + +/** + * @brief A store path hash + * + * Once decoded from "nix32" encoding, a store path hash is 20 raw bytes. + */ +typedef struct nix_store_path_hash_part +{ + uint8_t bytes[20]; +} nix_store_path_hash_part; + +/** + * @brief Get the path hash (e.g. "" in /nix/store/-) + * + * The hash is returned as raw bytes, decoded from "nix32" encoding. + * + * @param[out] context Optional, stores error information + * @param[in] store_path the path to get the hash from + * @param[out] hash_part_out the decoded hash as 20 raw bytes + * @return NIX_OK on success, error code on failure + */ +nix_err +nix_store_path_hash(nix_c_context * context, const StorePath * store_path, nix_store_path_hash_part * hash_part_out); + +/** + * @brief Create a StorePath from its constituent parts (hash and name) + * + * This function constructs a store path from a hash and name, without needing + * a Store reference or the store directory prefix. + * + * @note Don't forget to free this path using nix_store_path_free()! + * @param[out] context Optional, stores error information + * @param[in] hash The store path hash (20 raw bytes) + * @param[in] name The store path name (the part after the hash) + * @param[in] name_len Length of the name string + * @return owned store path, NULL on error + */ +StorePath * nix_store_create_from_parts( + nix_c_context * context, const nix_store_path_hash_part * hash, const char name[/*name_len*/], size_t name_len); + +// cffi end +#ifdef __cplusplus +} +#endif +/** + * @} + */ +#endif // NIX_API_STORE_STORE_PATH_H diff --git a/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh b/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh index 7ecc5603b6a..a35d2b1eede 100644 --- a/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh +++ b/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh @@ -41,16 +41,17 @@ protected: { #ifdef _WIN32 // no `mkdtemp` with MinGW - auto tmpl = nix::defaultTempDir() + "/tests_nix-store."; + auto tmpl = nix::defaultTempDir() / "tests_nix-store."; for (size_t i = 0; true; ++i) { - nixDir = tmpl + std::string{i}; + nixDir = tmpl.string() + std::to_string(i); if (std::filesystem::create_directory(nixDir)) break; } #else // resolve any symlinks in i.e. on macOS /tmp -> /private/tmp // because this is not allowed for a nix store. - auto tmpl = nix::absPath(std::filesystem::path(nix::defaultTempDir()) / "tests_nix-store.XXXXXX", true); + auto tmpl = + nix::absPath(std::filesystem::path(nix::defaultTempDir()) / "tests_nix-store.XXXXXX", std::nullopt, true); nixDir = mkdtemp((char *) tmpl.c_str()); #endif diff --git a/src/libstore-test-support/include/nix/store/tests/protocol.hh b/src/libstore-test-support/include/nix/store/tests/protocol.hh index 5b57c658517..0f774df0ec0 100644 --- a/src/libstore-test-support/include/nix/store/tests/protocol.hh +++ b/src/libstore-test-support/include/nix/store/tests/protocol.hh @@ -6,6 +6,7 @@ #include "nix/store/tests/libstore.hh" #include "nix/util/tests/characterization.hh" +#include "nix/util/tests/json-characterization.hh" namespace nix { @@ -16,12 +17,30 @@ class ProtoTest : public CharacterizationTest std::filesystem::path goldenMaster(std::string_view testStem) const override { - return unitTestData / (std::string{testStem + ".bin"}); + return unitTestData / testStem; } public: Path storeDir = "/nix/store"; StoreDirConfig store{storeDir}; + + /** + * Golden test for `T` JSON reading + */ + template + void readJsonTest(PathView testStem, const T & expected) + { + nix::readJsonTest(*this, testStem, expected); + } + + /** + * Golden test for `T` JSON write + */ + template + void writeJsonTest(PathView testStem, const T & decoded) + { + nix::writeJsonTest(*this, testStem, decoded); + } }; template @@ -34,7 +53,7 @@ public: template void readProtoTest(PathView testStem, typename Proto::Version version, T expected) { - CharacterizationTest::readTest(testStem, [&](const auto & encoded) { + CharacterizationTest::readTest(std::string{testStem + ".bin"}, [&](const auto & encoded) { T got = ({ StringSource from{encoded}; Proto::template Serialise::read( @@ -55,7 +74,7 @@ public: template void writeProtoTest(PathView testStem, typename Proto::Version version, const T & decoded) { - CharacterizationTest::writeTest(testStem, [&]() { + CharacterizationTest::writeTest(std::string{testStem + ".bin"}, [&]() { StringSink to; Proto::template Serialise::write( this->store, @@ -69,14 +88,25 @@ public: } }; -#define VERSIONED_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \ - TEST_F(FIXTURE, NAME##_read) \ - { \ - readProtoTest(STEM, VERSION, VALUE); \ - } \ - TEST_F(FIXTURE, NAME##_write) \ - { \ - writeProtoTest(STEM, VERSION, VALUE); \ +#define VERSIONED_CHARACTERIZATION_TEST_NO_JSON(FIXTURE, NAME, STEM, VERSION, VALUE) \ + TEST_F(FIXTURE, NAME##_read) \ + { \ + readProtoTest(STEM, VERSION, VALUE); \ + } \ + TEST_F(FIXTURE, NAME##_write) \ + { \ + writeProtoTest(STEM, VERSION, VALUE); \ + } + +#define VERSIONED_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \ + VERSIONED_CHARACTERIZATION_TEST_NO_JSON(FIXTURE, NAME, STEM, VERSION, VALUE) \ + TEST_F(FIXTURE, NAME##_json_read) \ + { \ + readJsonTest(STEM, VALUE); \ + } \ + TEST_F(FIXTURE, NAME##_json_write) \ + { \ + writeJsonTest(STEM, VALUE); \ } } // namespace nix diff --git a/src/libstore-test-support/meson.build b/src/libstore-test-support/meson.build index e929ae2b499..8617225d743 100644 --- a/src/libstore-test-support/meson.build +++ b/src/libstore-test-support/meson.build @@ -29,7 +29,6 @@ rapidcheck = dependency('rapidcheck') deps_public += rapidcheck subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'derived-path.cc', diff --git a/src/libstore-tests/build-result.cc b/src/libstore-tests/build-result.cc new file mode 100644 index 00000000000..85e799c2a73 --- /dev/null +++ b/src/libstore-tests/build-result.cc @@ -0,0 +1,108 @@ +#include + +#include "nix/store/build-result.hh" +#include "nix/util/tests/json-characterization.hh" + +namespace nix { + +class BuildResultTest : public virtual CharacterizationTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "build-result"; + +public: + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; + +using nlohmann::json; + +struct BuildResultJsonTest : BuildResultTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +TEST_P(BuildResultJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + readJsonTest(name, expected); +} + +TEST_P(BuildResultJsonTest, to_json) +{ + auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +using namespace std::literals::chrono_literals; + +INSTANTIATE_TEST_SUITE_P( + BuildResultJSON, + BuildResultJsonTest, + ::testing::Values( + std::pair{ + "not-deterministic", + BuildResult{ + .inner{BuildResult::Failure{ + .status = BuildResult::Failure::NotDeterministic, + .errorMsg = "no idea why", + .isNonDeterministic = false, // Note: This field is separate from the status + }}, + .timesBuilt = 1, + }, + }, + std::pair{ + "output-rejected", + BuildResult{ + .inner{BuildResult::Failure{ + .status = BuildResult::Failure::OutputRejected, + .errorMsg = "no idea why", + .isNonDeterministic = false, + }}, + .timesBuilt = 3, + .startTime = 30, + .stopTime = 50, + }, + }, + std::pair{ + "success", + BuildResult{ + .inner{BuildResult::Success{ + .status = BuildResult::Success::Built, + .builtOutputs{ + { + "foo", + { + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + }, + }, + { + "bar", + { + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + }, + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + }, + }, + }, + }}, + .timesBuilt = 3, + .startTime = 30, + .stopTime = 50, + .cpuUser = std::chrono::microseconds(500s), + .cpuSystem = std::chrono::microseconds(604s), + }, + })); + +} // namespace nix diff --git a/src/libstore-tests/common-protocol.cc b/src/libstore-tests/common-protocol.cc index 35fca165dc3..fa676eb7f4e 100644 --- a/src/libstore-tests/common-protocol.cc +++ b/src/libstore-tests/common-protocol.cc @@ -3,6 +3,7 @@ #include #include +#include "nix/util/json-utils.hh" #include "nix/store/common-protocol.hh" #include "nix/store/common-protocol-impl.hh" #include "nix/store/build-result.hh" @@ -22,7 +23,7 @@ class CommonProtoTest : public ProtoTest template void readProtoTest(PathView testStem, const T & expected) { - CharacterizationTest::readTest(testStem, [&](const auto & encoded) { + CharacterizationTest::readTest(std::string{testStem + ".bin"}, [&](const auto & encoded) { T got = ({ StringSource from{encoded}; CommonProto::Serialise::read(store, CommonProto::ReadConn{.from = from}); @@ -38,7 +39,7 @@ class CommonProtoTest : public ProtoTest template void writeProtoTest(PathView testStem, const T & decoded) { - CharacterizationTest::writeTest(testStem, [&]() -> std::string { + CharacterizationTest::writeTest(std::string{testStem + ".bin"}, [&]() -> std::string { StringSink to; CommonProto::Serialise::write(store, CommonProto::WriteConn{.to = to}, decoded); return to.s; @@ -54,6 +55,14 @@ class CommonProtoTest : public ProtoTest TEST_F(CommonProtoTest, NAME##_write) \ { \ writeProtoTest(STEM, VALUE); \ + } \ + TEST_F(CommonProtoTest, NAME##_json_read) \ + { \ + readJsonTest(STEM, VALUE); \ + } \ + TEST_F(CommonProtoTest, NAME##_json_write) \ + { \ + writeJsonTest(STEM, VALUE); \ } CHARACTERIZATION_TEST( @@ -112,32 +121,49 @@ CHARACTERIZATION_TEST( "realisation", (std::tuple{ Realisation{ - .id = - DrvOutput{ - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, - .signatures = {"asdf", "qwer"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, }, Realisation{ - .id = - { - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, - .signatures = {"asdf", "qwer"}, - .dependentRealisations = - { + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + }, + })) + +CHARACTERIZATION_TEST( + realisation_with_deps, + "realisation-with-deps", + (std::tuple{ + Realisation{ + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + .dependentRealisations = { - DrvOutput{ - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "quux", + { + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, }, - StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, }, - }, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, }, })) diff --git a/src/libstore-tests/content-address.cc b/src/libstore-tests/content-address.cc index 51d591c3853..0474fb2e0c7 100644 --- a/src/libstore-tests/content-address.cc +++ b/src/libstore-tests/content-address.cc @@ -1,6 +1,7 @@ #include #include "nix/store/content-address.hh" +#include "nix/util/tests/json-characterization.hh" namespace nix { @@ -8,33 +9,93 @@ namespace nix { * ContentAddressMethod::parse, ContentAddressMethod::render * --------------------------------------------------------------------------*/ -TEST(ContentAddressMethod, testRoundTripPrintParse_1) +static auto methods = ::testing::Values( + std::pair{ContentAddressMethod::Raw::Text, "text"}, + std::pair{ContentAddressMethod::Raw::Flat, "flat"}, + std::pair{ContentAddressMethod::Raw::NixArchive, "nar"}, + std::pair{ContentAddressMethod::Raw::Git, "git"}); + +struct ContentAddressMethodTest : ::testing::Test, + ::testing::WithParamInterface> +{}; + +TEST_P(ContentAddressMethodTest, testRoundTripPrintParse_1) { - for (ContentAddressMethod cam : { - ContentAddressMethod::Raw::Text, - ContentAddressMethod::Raw::Flat, - ContentAddressMethod::Raw::NixArchive, - ContentAddressMethod::Raw::Git, - }) { - EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam); - } + auto & [cam, _] = GetParam(); + EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam); } -TEST(ContentAddressMethod, testRoundTripPrintParse_2) +TEST_P(ContentAddressMethodTest, testRoundTripPrintParse_2) { - for (const std::string_view camS : { - "text", - "flat", - "nar", - "git", - }) { - EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS); - } + auto & [cam, camS] = GetParam(); + EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS); } +INSTANTIATE_TEST_SUITE_P(ContentAddressMethod, ContentAddressMethodTest, methods); + TEST(ContentAddressMethod, testParseContentAddressMethodOptException) { EXPECT_THROW(ContentAddressMethod::parse("narwhal"), UsageError); } +/* ---------------------------------------------------------------------------- + * JSON + * --------------------------------------------------------------------------*/ + +class ContentAddressTest : public virtual CharacterizationTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "content-address"; + +public: + + /** + * We set these in tests rather than the regular globals so we don't have + * to worry about race conditions if the tests run concurrently. + */ + ExperimentalFeatureSettings mockXpSettings; + + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; + +using nlohmann::json; + +struct ContentAddressJsonTest : ContentAddressTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +TEST_P(ContentAddressJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + readJsonTest(name, expected); +} + +TEST_P(ContentAddressJsonTest, to_json) +{ + auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +INSTANTIATE_TEST_SUITE_P( + ContentAddressJSON, + ContentAddressJsonTest, + ::testing::Values( + std::pair{ + "text", + ContentAddress{ + .method = ContentAddressMethod::Raw::Text, + .hash = hashString(HashAlgorithm::SHA256, "asdf"), + }, + }, + std::pair{ + "nar", + ContentAddress{ + .method = ContentAddressMethod::Raw::NixArchive, + .hash = hashString(HashAlgorithm::SHA256, "qwer"), + }, + })); + } // namespace nix diff --git a/src/libstore-tests/data/build-result/not-deterministic.json b/src/libstore-tests/data/build-result/not-deterministic.json new file mode 100644 index 00000000000..c24a15795b5 --- /dev/null +++ b/src/libstore-tests/data/build-result/not-deterministic.json @@ -0,0 +1,9 @@ +{ + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "NotDeterministic", + "stopTime": 0, + "success": false, + "timesBuilt": 1 +} diff --git a/src/libstore-tests/data/build-result/output-rejected.json b/src/libstore-tests/data/build-result/output-rejected.json new file mode 100644 index 00000000000..9494bf4ec6c --- /dev/null +++ b/src/libstore-tests/data/build-result/output-rejected.json @@ -0,0 +1,9 @@ +{ + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 30, + "status": "OutputRejected", + "stopTime": 50, + "success": false, + "timesBuilt": 3 +} diff --git a/src/libstore-tests/data/build-result/success.json b/src/libstore-tests/data/build-result/success.json new file mode 100644 index 00000000000..4baadb54775 --- /dev/null +++ b/src/libstore-tests/data/build-result/success.json @@ -0,0 +1,23 @@ +{ + "builtOutputs": { + "bar": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!bar", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "signatures": [] + }, + "foo": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + }, + "cpuSystem": 604000000, + "cpuUser": 500000000, + "startTime": 30, + "status": "Built", + "stopTime": 50, + "success": true, + "timesBuilt": 3 +} diff --git a/src/libstore-tests/data/common-protocol/content-address.json b/src/libstore-tests/data/common-protocol/content-address.json new file mode 100644 index 00000000000..3c63e6d9bbd --- /dev/null +++ b/src/libstore-tests/data/common-protocol/content-address.json @@ -0,0 +1,14 @@ +[ + { + "hash": "sha256-+Xc9Ll6mcPltwaewrk/BAQ56Y3G5T//wzhKUc0zrYu0=", + "method": "text" + }, + { + "hash": "sha1-gGemBoenViNZM3hiwqns/Fgzqwo=", + "method": "flat" + }, + { + "hash": "sha256-EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=", + "method": "nar" + } +] diff --git a/src/libstore-tests/data/common-protocol/drv-output.json b/src/libstore-tests/data/common-protocol/drv-output.json new file mode 100644 index 00000000000..2668d70c98c --- /dev/null +++ b/src/libstore-tests/data/common-protocol/drv-output.json @@ -0,0 +1,4 @@ +[ + "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux" +] diff --git a/src/libstore-tests/data/common-protocol/optional-content-address.json b/src/libstore-tests/data/common-protocol/optional-content-address.json new file mode 100644 index 00000000000..1c57fbf25ae --- /dev/null +++ b/src/libstore-tests/data/common-protocol/optional-content-address.json @@ -0,0 +1,7 @@ +[ + null, + { + "hash": "sha1-gGemBoenViNZM3hiwqns/Fgzqwo=", + "method": "flat" + } +] diff --git a/src/libstore-tests/data/common-protocol/optional-store-path.json b/src/libstore-tests/data/common-protocol/optional-store-path.json new file mode 100644 index 00000000000..58519a4d2c4 --- /dev/null +++ b/src/libstore-tests/data/common-protocol/optional-store-path.json @@ -0,0 +1,4 @@ +[ + null, + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/common-protocol/realisation-with-deps.bin b/src/libstore-tests/data/common-protocol/realisation-with-deps.bin new file mode 100644 index 00000000000..54a78b64ebc Binary files /dev/null and b/src/libstore-tests/data/common-protocol/realisation-with-deps.bin differ diff --git a/src/libstore-tests/data/common-protocol/realisation-with-deps.json b/src/libstore-tests/data/common-protocol/realisation-with-deps.json new file mode 100644 index 00000000000..77148d14ca4 --- /dev/null +++ b/src/libstore-tests/data/common-protocol/realisation-with-deps.json @@ -0,0 +1,13 @@ +[ + { + "dependentRealisations": { + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" + }, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/common-protocol/realisation.bin b/src/libstore-tests/data/common-protocol/realisation.bin index 2176c6c4afd..3a0b2b2d8e3 100644 Binary files a/src/libstore-tests/data/common-protocol/realisation.bin and b/src/libstore-tests/data/common-protocol/realisation.bin differ diff --git a/src/libstore-tests/data/common-protocol/realisation.json b/src/libstore-tests/data/common-protocol/realisation.json new file mode 100644 index 00000000000..f9ff09dbb63 --- /dev/null +++ b/src/libstore-tests/data/common-protocol/realisation.json @@ -0,0 +1,17 @@ +[ + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + }, + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/common-protocol/set.json b/src/libstore-tests/data/common-protocol/set.json new file mode 100644 index 00000000000..acd123082da --- /dev/null +++ b/src/libstore-tests/data/common-protocol/set.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "bar", + "foo" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/data/common-protocol/store-path.json b/src/libstore-tests/data/common-protocol/store-path.json new file mode 100644 index 00000000000..16459245be8 --- /dev/null +++ b/src/libstore-tests/data/common-protocol/store-path.json @@ -0,0 +1,4 @@ +[ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/common-protocol/string.json b/src/libstore-tests/data/common-protocol/string.json new file mode 100644 index 00000000000..d3db4f3b4ac --- /dev/null +++ b/src/libstore-tests/data/common-protocol/string.json @@ -0,0 +1,7 @@ +[ + "", + "hi", + "white rabbit", + "大白兔", + "oh no " +] diff --git a/src/libstore-tests/data/common-protocol/vector.json b/src/libstore-tests/data/common-protocol/vector.json new file mode 100644 index 00000000000..2b8cc1b3afe --- /dev/null +++ b/src/libstore-tests/data/common-protocol/vector.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "foo", + "bar" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/data/content-address/nar.json b/src/libstore-tests/data/content-address/nar.json new file mode 100644 index 00000000000..511cb1c5cec --- /dev/null +++ b/src/libstore-tests/data/content-address/nar.json @@ -0,0 +1,4 @@ +{ + "hash": "sha256-9vLqj0XYoFfJVmoz+ZR02i5camYE1zYSFlDicwxvsKM=", + "method": "nar" +} diff --git a/src/libstore-tests/data/content-address/text.json b/src/libstore-tests/data/content-address/text.json new file mode 100644 index 00000000000..96da7874cf7 --- /dev/null +++ b/src/libstore-tests/data/content-address/text.json @@ -0,0 +1,4 @@ +{ + "hash": "sha256-8OTC92xYkW7CWPJGhRvqCR0U1CR6L8PhhpRGGxgW4Ts=", + "method": "text" +} diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json index eb4bd4f3de6..781b4cb1413 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json @@ -12,8 +12,10 @@ "outputHashMode": "recursive", "system": "my-system" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "name": "advanced-attributes-defaults", "outputs": { "out": { @@ -22,5 +24,5 @@ } }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json index 3a4a3079b45..7437b51efb8 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json @@ -8,8 +8,10 @@ "dev": "/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz", "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "name": "advanced-attributes-structured-attrs-defaults", "outputs": { "dev": { @@ -33,5 +35,5 @@ "system": "my-system" }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json index b10355af711..95122ad4184 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json @@ -9,25 +9,27 @@ "dev": "/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz", "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9" }, - "inputDrvs": { - "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] + "inputs": { + "drvs": { + "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + }, + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + } }, - "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] - } + "srcs": [ + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" + ] }, - "inputSrcs": [ - "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" - ], "name": "advanced-attributes-structured-attrs", "outputs": { "bin": { @@ -67,7 +69,8 @@ "outputChecks": { "bin": { "disallowedReferences": [ - "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g" + "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g", + "dev" ], "disallowedRequisites": [ "/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8" @@ -82,7 +85,8 @@ "/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9" ], "allowedRequisites": [ - "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z" + "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z", + "bin" ] } }, @@ -101,5 +105,5 @@ "system": "my-system" }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes.json b/src/libstore-tests/data/derivation/ca/advanced-attributes.json index d6688203660..6b77459bcb7 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes.json @@ -11,9 +11,9 @@ "__sandboxProfile": "sandcastle", "allowSubstitutes": "", "allowedReferences": "/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9", - "allowedRequisites": "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z", + "allowedRequisites": "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z bin", "builder": "/bin/bash", - "disallowedReferences": "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g", + "disallowedReferences": "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g dev", "disallowedRequisites": "/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8", "exportReferencesGraph": "refs1 /164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9 refs2 /nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv", "impureEnvVars": "UNICORN", @@ -25,25 +25,27 @@ "requiredSystemFeatures": "rainbow uid-range", "system": "my-system" }, - "inputDrvs": { - "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] + "inputs": { + "drvs": { + "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + }, + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + } }, - "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] - } + "srcs": [ + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" + ] }, - "inputSrcs": [ - "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" - ], "name": "advanced-attributes", "outputs": { "out": { @@ -52,5 +54,5 @@ } }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ca/derivation-options/all_set.json b/src/libstore-tests/data/derivation/ca/derivation-options/all_set.json new file mode 100644 index 00000000000..8086c752ca0 --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/derivation-options/all_set.json @@ -0,0 +1,69 @@ +{ + "additionalSandboxProfile": "sandcastle", + "allowLocalNetworking": true, + "allowSubstitutes": false, + "exportReferencesGraph": { + "refs1": [ + { + "drvPath": "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv", + "output": "out" + } + ], + "refs2": [ + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" + ] + }, + "impureEnvVars": [ + "UNICORN" + ], + "impureHostDeps": [ + "/usr/bin/ditto" + ], + "noChroot": true, + "outputChecks": { + "forAllOutputs": { + "allowedReferences": [ + { + "drvPath": "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv", + "output": "out" + } + ], + "allowedRequisites": [ + { + "drvPath": "self", + "output": "bin" + }, + { + "drvPath": "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv", + "output": "dev" + } + ], + "disallowedReferences": [ + { + "drvPath": "self", + "output": "dev" + }, + { + "drvPath": "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv", + "output": "out" + } + ], + "disallowedRequisites": [ + { + "drvPath": "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv", + "output": "dev" + } + ], + "ignoreSelfRefs": true, + "maxClosureSize": null, + "maxSize": null + } + }, + "passAsFile": [], + "preferLocalBuild": true, + "requiredSystemFeatures": [ + "rainbow", + "uid-range" + ], + "unsafeDiscardReferences": {} +} diff --git a/src/libstore-tests/data/derivation/ca/derivation-options/structuredAttrs_all_set.json b/src/libstore-tests/data/derivation/ca/derivation-options/structuredAttrs_all_set.json new file mode 100644 index 00000000000..e29447b6a47 --- /dev/null +++ b/src/libstore-tests/data/derivation/ca/derivation-options/structuredAttrs_all_set.json @@ -0,0 +1,89 @@ +{ + "additionalSandboxProfile": "sandcastle", + "allowLocalNetworking": true, + "allowSubstitutes": false, + "exportReferencesGraph": { + "refs1": [ + { + "drvPath": "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv", + "output": "out" + } + ], + "refs2": [ + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" + ] + }, + "impureEnvVars": [ + "UNICORN" + ], + "impureHostDeps": [ + "/usr/bin/ditto" + ], + "noChroot": true, + "outputChecks": { + "perOutput": { + "bin": { + "allowedReferences": null, + "allowedRequisites": null, + "disallowedReferences": [ + { + "drvPath": "self", + "output": "dev" + }, + { + "drvPath": "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv", + "output": "out" + } + ], + "disallowedRequisites": [ + { + "drvPath": "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv", + "output": "dev" + } + ], + "ignoreSelfRefs": false, + "maxClosureSize": null, + "maxSize": null + }, + "dev": { + "allowedReferences": null, + "allowedRequisites": null, + "disallowedReferences": [], + "disallowedRequisites": [], + "ignoreSelfRefs": false, + "maxClosureSize": 5909, + "maxSize": 789 + }, + "out": { + "allowedReferences": [ + { + "drvPath": "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv", + "output": "out" + } + ], + "allowedRequisites": [ + { + "drvPath": "self", + "output": "bin" + }, + { + "drvPath": "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv", + "output": "dev" + } + ], + "disallowedReferences": [], + "disallowedRequisites": [], + "ignoreSelfRefs": false, + "maxClosureSize": null, + "maxSize": null + } + } + }, + "passAsFile": [], + "preferLocalBuild": true, + "requiredSystemFeatures": [ + "rainbow", + "uid-range" + ], + "unsafeDiscardReferences": {} +} diff --git a/src/libstore-tests/data/derivation/ca/self-contained.json b/src/libstore-tests/data/derivation/ca/self-contained.json index 331beb7be26..c05710140cc 100644 --- a/src/libstore-tests/data/derivation/ca/self-contained.json +++ b/src/libstore-tests/data/derivation/ca/self-contained.json @@ -10,8 +10,10 @@ "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9", "system": "x86_64-linux" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "name": "myname", "outputs": { "out": { @@ -20,5 +22,5 @@ } }, "system": "x86_64-linux", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/dyn-dep-derivation.json b/src/libstore-tests/data/derivation/dyn-dep-derivation.json index 1a9f54c5304..1793c5f2d1e 100644 --- a/src/libstore-tests/data/derivation/dyn-dep-derivation.json +++ b/src/libstore-tests/data/derivation/dyn-dep-derivation.json @@ -7,33 +7,35 @@ "env": { "BIG_BAD": "WOLF" }, - "inputDrvs": { - "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { - "dynamicOutputs": { - "cat": { - "dynamicOutputs": {}, - "outputs": [ - "kitten" - ] + "inputs": { + "drvs": { + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { + "dynamicOutputs": { + "cat": { + "dynamicOutputs": {}, + "outputs": [ + "kitten" + ] + }, + "goose": { + "dynamicOutputs": {}, + "outputs": [ + "gosling" + ] + } }, - "goose": { - "dynamicOutputs": {}, - "outputs": [ - "gosling" - ] - } - }, - "outputs": [ - "cat", - "dog" - ] - } + "outputs": [ + "cat", + "dog" + ] + } + }, + "srcs": [ + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" + ] }, - "inputSrcs": [ - "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" - ], "name": "dyn-dep-derivation", "outputs": {}, "system": "wasm-sel4", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json index 0fa543f214a..898762123a3 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json @@ -10,8 +10,10 @@ "out": "/nix/store/1qsc7svv43m4dw2prh6mvyf7cai5czji-advanced-attributes-defaults", "system": "my-system" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "name": "advanced-attributes-defaults", "outputs": { "out": { @@ -19,5 +21,5 @@ } }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json index e02392ea131..c51095986d4 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json @@ -8,8 +8,10 @@ "dev": "/nix/store/8bazivnbipbyi569623skw5zm91z6kc2-advanced-attributes-structured-attrs-defaults-dev", "out": "/nix/store/f8f8nvnx32bxvyxyx2ff7akbvwhwd9dw-advanced-attributes-structured-attrs-defaults" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "name": "advanced-attributes-structured-attrs-defaults", "outputs": { "dev": { @@ -29,5 +31,5 @@ "system": "my-system" }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json index 9230b06b629..bbd68e08705 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json @@ -5,39 +5,41 @@ ], "builder": "/bin/bash", "env": { - "bin": "/nix/store/33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin", - "dev": "/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev", - "out": "/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs" + "bin": "/nix/store/cnpasdljgkhnwaf78cf3qygcp4qbki1c-advanced-attributes-structured-attrs-bin", + "dev": "/nix/store/ijq6mwpa9jbnpnl33qldfqihrr38kprx-advanced-attributes-structured-attrs-dev", + "out": "/nix/store/h1vh648d3p088kdimy0r8ngpfx7c3nzw-advanced-attributes-structured-attrs" }, - "inputDrvs": { - "afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] + "inputs": { + "drvs": { + "afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + }, + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + } }, - "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] - } + "srcs": [ + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" + ] }, - "inputSrcs": [ - "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" - ], "name": "advanced-attributes-structured-attrs", "outputs": { "bin": { - "path": "33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin" + "path": "cnpasdljgkhnwaf78cf3qygcp4qbki1c-advanced-attributes-structured-attrs-bin" }, "dev": { - "path": "wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev" + "path": "ijq6mwpa9jbnpnl33qldfqihrr38kprx-advanced-attributes-structured-attrs-dev" }, "out": { - "path": "7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs" + "path": "h1vh648d3p088kdimy0r8ngpfx7c3nzw-advanced-attributes-structured-attrs" } }, "structuredAttrs": { @@ -64,7 +66,8 @@ "outputChecks": { "bin": { "disallowedReferences": [ - "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar" + "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar", + "dev" ], "disallowedRequisites": [ "/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev" @@ -79,7 +82,8 @@ "/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo" ], "allowedRequisites": [ - "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev" + "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev", + "bin" ] } }, @@ -96,5 +100,5 @@ "system": "my-system" }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes.json b/src/libstore-tests/data/derivation/ia/advanced-attributes.json index ba5911c911a..e2de9431b4b 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes.json @@ -11,43 +11,45 @@ "__sandboxProfile": "sandcastle", "allowSubstitutes": "", "allowedReferences": "/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo", - "allowedRequisites": "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev", + "allowedRequisites": "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev bin", "builder": "/bin/bash", - "disallowedReferences": "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar", + "disallowedReferences": "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar dev", "disallowedRequisites": "/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev", "exportReferencesGraph": "refs1 /nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo refs2 /nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv", "impureEnvVars": "UNICORN", "name": "advanced-attributes", - "out": "/nix/store/wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes", + "out": "/nix/store/ymqmybkq5j4nd1xplw6ccdpbjnfi017v-advanced-attributes", "preferLocalBuild": "1", "requiredSystemFeatures": "rainbow uid-range", "system": "my-system" }, - "inputDrvs": { - "afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] + "inputs": { + "drvs": { + "afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + }, + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + } }, - "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] - } + "srcs": [ + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" + ] }, - "inputSrcs": [ - "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" - ], "name": "advanced-attributes", "outputs": { "out": { - "path": "wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes" + "path": "ymqmybkq5j4nd1xplw6ccdpbjnfi017v-advanced-attributes" } }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ia/derivation-options/all_set.json b/src/libstore-tests/data/derivation/ia/derivation-options/all_set.json new file mode 100644 index 00000000000..2e1c848daf4 --- /dev/null +++ b/src/libstore-tests/data/derivation/ia/derivation-options/all_set.json @@ -0,0 +1,54 @@ +{ + "additionalSandboxProfile": "sandcastle", + "allowLocalNetworking": true, + "allowSubstitutes": false, + "exportReferencesGraph": { + "refs1": [ + "p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo" + ], + "refs2": [ + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" + ] + }, + "impureEnvVars": [ + "UNICORN" + ], + "impureHostDeps": [ + "/usr/bin/ditto" + ], + "noChroot": true, + "outputChecks": { + "forAllOutputs": { + "allowedReferences": [ + "p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo" + ], + "allowedRequisites": [ + { + "drvPath": "self", + "output": "bin" + }, + "z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev" + ], + "disallowedReferences": [ + { + "drvPath": "self", + "output": "dev" + }, + "r5cff30838majxk5mp3ip2diffi8vpaj-bar" + ], + "disallowedRequisites": [ + "9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev" + ], + "ignoreSelfRefs": true, + "maxClosureSize": null, + "maxSize": null + } + }, + "passAsFile": [], + "preferLocalBuild": true, + "requiredSystemFeatures": [ + "rainbow", + "uid-range" + ], + "unsafeDiscardReferences": {} +} diff --git a/src/libstore-tests/data/derivation/ia/derivation-options/defaults.json b/src/libstore-tests/data/derivation/ia/derivation-options/defaults.json new file mode 100644 index 00000000000..79a68989cc3 --- /dev/null +++ b/src/libstore-tests/data/derivation/ia/derivation-options/defaults.json @@ -0,0 +1,24 @@ +{ + "additionalSandboxProfile": "", + "allowLocalNetworking": false, + "allowSubstitutes": true, + "exportReferencesGraph": {}, + "impureEnvVars": [], + "impureHostDeps": [], + "noChroot": false, + "outputChecks": { + "forAllOutputs": { + "allowedReferences": null, + "allowedRequisites": null, + "disallowedReferences": [], + "disallowedRequisites": [], + "ignoreSelfRefs": true, + "maxClosureSize": null, + "maxSize": null + } + }, + "passAsFile": [], + "preferLocalBuild": false, + "requiredSystemFeatures": [], + "unsafeDiscardReferences": {} +} diff --git a/src/libstore-tests/data/derivation/ia/derivation-options/structuredAttrs_all_set.json b/src/libstore-tests/data/derivation/ia/derivation-options/structuredAttrs_all_set.json new file mode 100644 index 00000000000..a29699b5dd6 --- /dev/null +++ b/src/libstore-tests/data/derivation/ia/derivation-options/structuredAttrs_all_set.json @@ -0,0 +1,74 @@ +{ + "additionalSandboxProfile": "sandcastle", + "allowLocalNetworking": true, + "allowSubstitutes": false, + "exportReferencesGraph": { + "refs1": [ + "p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo" + ], + "refs2": [ + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" + ] + }, + "impureEnvVars": [ + "UNICORN" + ], + "impureHostDeps": [ + "/usr/bin/ditto" + ], + "noChroot": true, + "outputChecks": { + "perOutput": { + "bin": { + "allowedReferences": null, + "allowedRequisites": null, + "disallowedReferences": [ + { + "drvPath": "self", + "output": "dev" + }, + "r5cff30838majxk5mp3ip2diffi8vpaj-bar" + ], + "disallowedRequisites": [ + "9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev" + ], + "ignoreSelfRefs": false, + "maxClosureSize": null, + "maxSize": null + }, + "dev": { + "allowedReferences": null, + "allowedRequisites": null, + "disallowedReferences": [], + "disallowedRequisites": [], + "ignoreSelfRefs": false, + "maxClosureSize": 5909, + "maxSize": 789 + }, + "out": { + "allowedReferences": [ + "p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo" + ], + "allowedRequisites": [ + { + "drvPath": "self", + "output": "bin" + }, + "z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev" + ], + "disallowedReferences": [], + "disallowedRequisites": [], + "ignoreSelfRefs": false, + "maxClosureSize": null, + "maxSize": null + } + } + }, + "passAsFile": [], + "preferLocalBuild": true, + "requiredSystemFeatures": [ + "rainbow", + "uid-range" + ], + "unsafeDiscardReferences": {} +} diff --git a/src/libstore-tests/data/derivation/ia/derivation-options/structuredAttrs_defaults.json b/src/libstore-tests/data/derivation/ia/derivation-options/structuredAttrs_defaults.json new file mode 100644 index 00000000000..d898d85f5f8 --- /dev/null +++ b/src/libstore-tests/data/derivation/ia/derivation-options/structuredAttrs_defaults.json @@ -0,0 +1,16 @@ +{ + "additionalSandboxProfile": "", + "allowLocalNetworking": false, + "allowSubstitutes": true, + "exportReferencesGraph": {}, + "impureEnvVars": [], + "impureHostDeps": [], + "noChroot": false, + "outputChecks": { + "perOutput": {} + }, + "passAsFile": [], + "preferLocalBuild": false, + "requiredSystemFeatures": [], + "unsafeDiscardReferences": {} +} diff --git a/src/libstore-tests/data/derivation/invariants/bad-depends-on-drv-pre.json b/src/libstore-tests/data/derivation/invariants/bad-depends-on-drv-pre.json new file mode 100644 index 00000000000..8454cf548fc --- /dev/null +++ b/src/libstore-tests/data/derivation/invariants/bad-depends-on-drv-pre.json @@ -0,0 +1,27 @@ +{ + "args": [], + "builder": "/bin/sh", + "env": { + "__doc": "InputAddressed throws when should be deferred", + "out": "" + }, + "inputs": { + "drvs": { + "lg4c4b8r9hlczwprl6kgnzfd9mc1xmkk-dependency.drv": { + "dynamicOutputs": {}, + "outputs": [ + "out" + ] + } + }, + "srcs": [] + }, + "name": "depends-on-drv", + "outputs": { + "out": { + "path": "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-wrong-name" + } + }, + "system": "x86_64-linux", + "version": 4 +} diff --git a/src/libstore-tests/data/derivation/invariants/bad-env-var.json b/src/libstore-tests/data/derivation/invariants/bad-env-var.json new file mode 100644 index 00000000000..cb0c9492fe8 --- /dev/null +++ b/src/libstore-tests/data/derivation/invariants/bad-env-var.json @@ -0,0 +1,18 @@ +{ + "args": [], + "builder": "/bin/sh", + "env": { + "__doc": "Wrong env var value throws error", + "out": "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-wrong-name" + }, + "inputs": { + "drvs": {}, + "srcs": [] + }, + "name": "bad-env-var", + "outputs": { + "out": {} + }, + "system": "x86_64-linux", + "version": 4 +} diff --git a/src/libstore-tests/data/derivation/invariants/bad-path.json b/src/libstore-tests/data/derivation/invariants/bad-path.json new file mode 100644 index 00000000000..688f2d4e696 --- /dev/null +++ b/src/libstore-tests/data/derivation/invariants/bad-path.json @@ -0,0 +1,20 @@ +{ + "args": [], + "builder": "/bin/sh", + "env": { + "__doc": "Wrong InputAddressed path throws error", + "out": "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-wrong-name" + }, + "inputs": { + "drvs": {}, + "srcs": [] + }, + "name": "bad-path", + "outputs": { + "out": { + "path": "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-wrong-name" + } + }, + "system": "x86_64-linux", + "version": 4 +} diff --git a/src/libstore-tests/data/derivation/invariants/depends-on-drv-pre.json b/src/libstore-tests/data/derivation/invariants/depends-on-drv-pre.json new file mode 100644 index 00000000000..d782cc756c5 --- /dev/null +++ b/src/libstore-tests/data/derivation/invariants/depends-on-drv-pre.json @@ -0,0 +1,25 @@ +{ + "args": [], + "builder": "/bin/sh", + "env": { + "__doc": "Deferred stays deferred with CA dependencies", + "out": "" + }, + "inputs": { + "drvs": { + "lg4c4b8r9hlczwprl6kgnzfd9mc1xmkk-dependency.drv": { + "dynamicOutputs": {}, + "outputs": [ + "out" + ] + } + }, + "srcs": [] + }, + "name": "depends-on-drv", + "outputs": { + "out": {} + }, + "system": "x86_64-linux", + "version": 4 +} diff --git a/src/libstore-tests/data/derivation/invariants/filled-in-deferred-empty-env-var-post.json b/src/libstore-tests/data/derivation/invariants/filled-in-deferred-empty-env-var-post.json new file mode 100644 index 00000000000..c5abdf69298 --- /dev/null +++ b/src/libstore-tests/data/derivation/invariants/filled-in-deferred-empty-env-var-post.json @@ -0,0 +1,20 @@ +{ + "args": [], + "builder": "/bin/sh", + "env": { + "__doc": "Fill in deferred output with empty env var", + "out": "/nix/store/bilpz1nq8qi9r3bzsp72n34yjgqg43ws-filled-in-deferred-empty-env-var" + }, + "inputs": { + "drvs": {}, + "srcs": [] + }, + "name": "filled-in-deferred-empty-env-var", + "outputs": { + "out": { + "path": "bilpz1nq8qi9r3bzsp72n34yjgqg43ws-filled-in-deferred-empty-env-var" + } + }, + "system": "x86_64-linux", + "version": 4 +} diff --git a/src/libstore-tests/data/derivation/invariants/filled-in-deferred-empty-env-var-pre.json b/src/libstore-tests/data/derivation/invariants/filled-in-deferred-empty-env-var-pre.json new file mode 100644 index 00000000000..bc5338925ea --- /dev/null +++ b/src/libstore-tests/data/derivation/invariants/filled-in-deferred-empty-env-var-pre.json @@ -0,0 +1,18 @@ +{ + "args": [], + "builder": "/bin/sh", + "env": { + "__doc": "Fill in deferred output with empty env var", + "out": "" + }, + "inputs": { + "drvs": {}, + "srcs": [] + }, + "name": "filled-in-deferred-empty-env-var", + "outputs": { + "out": {} + }, + "system": "x86_64-linux", + "version": 4 +} diff --git a/src/libstore-tests/data/derivation/invariants/filled-in-deferred-no-env-var-post.json b/src/libstore-tests/data/derivation/invariants/filled-in-deferred-no-env-var-post.json new file mode 100644 index 00000000000..709d7bca0de --- /dev/null +++ b/src/libstore-tests/data/derivation/invariants/filled-in-deferred-no-env-var-post.json @@ -0,0 +1,20 @@ +{ + "args": [], + "builder": "/bin/sh", + "env": { + "__doc": "Fill in deferred with missing env var", + "out": "/nix/store/wpk9qrgg77fyswhailap0gicgw98izx9-filled-in-deferred-no-env-var" + }, + "inputs": { + "drvs": {}, + "srcs": [] + }, + "name": "filled-in-deferred-no-env-var", + "outputs": { + "out": { + "path": "wpk9qrgg77fyswhailap0gicgw98izx9-filled-in-deferred-no-env-var" + } + }, + "system": "x86_64-linux", + "version": 4 +} diff --git a/src/libstore-tests/data/derivation/invariants/filled-in-deferred-no-env-var-pre.json b/src/libstore-tests/data/derivation/invariants/filled-in-deferred-no-env-var-pre.json new file mode 100644 index 00000000000..194e33086a0 --- /dev/null +++ b/src/libstore-tests/data/derivation/invariants/filled-in-deferred-no-env-var-pre.json @@ -0,0 +1,17 @@ +{ + "args": [], + "builder": "/bin/sh", + "env": { + "__doc": "Fill in deferred with missing env var" + }, + "inputs": { + "drvs": {}, + "srcs": [] + }, + "name": "filled-in-deferred-no-env-var", + "outputs": { + "out": {} + }, + "system": "x86_64-linux", + "version": 4 +} diff --git a/src/libstore-tests/data/derivation/invariants/filled-in-idempotent.json b/src/libstore-tests/data/derivation/invariants/filled-in-idempotent.json new file mode 100644 index 00000000000..9b99fb81216 --- /dev/null +++ b/src/libstore-tests/data/derivation/invariants/filled-in-idempotent.json @@ -0,0 +1,20 @@ +{ + "args": [], + "builder": "/bin/sh", + "env": { + "__doc": "Correct path stays unchanged", + "out": "/nix/store/w4bk7hpyxzgy2gx8fsa8f952435pll3i-filled-in-already" + }, + "inputs": { + "drvs": {}, + "srcs": [] + }, + "name": "filled-in-already", + "outputs": { + "out": { + "path": "w4bk7hpyxzgy2gx8fsa8f952435pll3i-filled-in-already" + } + }, + "system": "x86_64-linux", + "version": 4 +} diff --git a/src/libstore-tests/data/derivation/output-caFixedFlat.json b/src/libstore-tests/data/derivation/output-caFixedFlat.json index e6a0123f65c..86b4d2029ca 100644 --- a/src/libstore-tests/data/derivation/output-caFixedFlat.json +++ b/src/libstore-tests/data/derivation/output-caFixedFlat.json @@ -1,5 +1,4 @@ { - "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", - "hashAlgo": "sha256", + "hash": "sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8=", "method": "flat" } diff --git a/src/libstore-tests/data/derivation/output-caFixedNAR.json b/src/libstore-tests/data/derivation/output-caFixedNAR.json index b57e065a934..6f965457229 100644 --- a/src/libstore-tests/data/derivation/output-caFixedNAR.json +++ b/src/libstore-tests/data/derivation/output-caFixedNAR.json @@ -1,5 +1,4 @@ { - "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", - "hashAlgo": "sha256", + "hash": "sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8=", "method": "nar" } diff --git a/src/libstore-tests/data/derivation/output-caFixedText.json b/src/libstore-tests/data/derivation/output-caFixedText.json index 84778509ee2..2611e75dac9 100644 --- a/src/libstore-tests/data/derivation/output-caFixedText.json +++ b/src/libstore-tests/data/derivation/output-caFixedText.json @@ -1,5 +1,4 @@ { - "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", - "hashAlgo": "sha256", + "hash": "sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8=", "method": "text" } diff --git a/src/libstore-tests/data/derivation/simple-derivation.json b/src/libstore-tests/data/derivation/simple-derivation.json index 41a049aef77..04129a096e0 100644 --- a/src/libstore-tests/data/derivation/simple-derivation.json +++ b/src/libstore-tests/data/derivation/simple-derivation.json @@ -7,20 +7,22 @@ "env": { "BIG_BAD": "WOLF" }, - "inputDrvs": { - "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { - "dynamicOutputs": {}, - "outputs": [ - "cat", - "dog" - ] - } + "inputs": { + "drvs": { + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { + "dynamicOutputs": {}, + "outputs": [ + "cat", + "dog" + ] + } + }, + "srcs": [ + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" + ] }, - "inputSrcs": [ - "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" - ], "name": "simple-derivation", "outputs": {}, "system": "wasm-sel4", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/dummy-store/empty.json b/src/libstore-tests/data/dummy-store/empty.json new file mode 100644 index 00000000000..93bec5153b4 --- /dev/null +++ b/src/libstore-tests/data/dummy-store/empty.json @@ -0,0 +1,8 @@ +{ + "buildTrace": {}, + "config": { + "store": "/nix/store" + }, + "contents": {}, + "derivations": {} +} diff --git a/src/libstore-tests/data/dummy-store/one-derivation.json b/src/libstore-tests/data/dummy-store/one-derivation.json new file mode 100644 index 00000000000..a3e3391e6e0 --- /dev/null +++ b/src/libstore-tests/data/dummy-store/one-derivation.json @@ -0,0 +1,22 @@ +{ + "buildTrace": {}, + "config": { + "store": "/nix/store" + }, + "contents": {}, + "derivations": { + "rlqjbbb65ggcx9hy577hvnn929wz1aj0-foo.drv": { + "args": [], + "builder": "", + "env": {}, + "inputs": { + "drvs": {}, + "srcs": [] + }, + "name": "foo", + "outputs": {}, + "system": "", + "version": 4 + } + } +} diff --git a/src/libstore-tests/data/dummy-store/one-flat-file.json b/src/libstore-tests/data/dummy-store/one-flat-file.json new file mode 100644 index 00000000000..804bbf07da6 --- /dev/null +++ b/src/libstore-tests/data/dummy-store/one-flat-file.json @@ -0,0 +1,31 @@ +{ + "buildTrace": {}, + "config": { + "store": "/nix/store" + }, + "contents": { + "5hizn7xyyrhxr0k2magvxl5ccvk0ci9n-my-file": { + "contents": { + "contents": "asdf", + "executable": false, + "type": "regular" + }, + "info": { + "ca": { + "hash": "sha256-f1eduuSIYC1BofXA1tycF79Ai2NSMJQtUErx5DxLYSU=", + "method": "nar" + }, + "deriver": null, + "narHash": "sha256-f1eduuSIYC1BofXA1tycF79Ai2NSMJQtUErx5DxLYSU=", + "narSize": 120, + "references": [], + "registrationTime": null, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + } + } + }, + "derivations": {} +} diff --git a/src/libstore-tests/data/dummy-store/one-realisation.json b/src/libstore-tests/data/dummy-store/one-realisation.json new file mode 100644 index 00000000000..b5c8b8c5621 --- /dev/null +++ b/src/libstore-tests/data/dummy-store/one-realisation.json @@ -0,0 +1,16 @@ +{ + "buildTrace": { + "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=": { + "out": { + "dependentRealisations": {}, + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + } + }, + "config": { + "store": "/nix/store" + }, + "contents": {}, + "derivations": {} +} diff --git a/src/libstore-tests/data/nar-info/impure.json b/src/libstore-tests/data/nar-info/json-1/impure.json similarity index 92% rename from src/libstore-tests/data/nar-info/impure.json rename to src/libstore-tests/data/nar-info/json-1/impure.json index bb9791a6ace..c6fafe13daa 100644 --- a/src/libstore-tests/data/nar-info/impure.json +++ b/src/libstore-tests/data/nar-info/json-1/impure.json @@ -15,6 +15,8 @@ "asdf", "qwer" ], + "storeDir": "/nix/store", "ultimate": true, - "url": "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz" + "url": "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz", + "version": 1 } diff --git a/src/libstore-tests/data/nar-info/json-1/pure.json b/src/libstore-tests/data/nar-info/json-1/pure.json new file mode 100644 index 00000000000..f0424ae571c --- /dev/null +++ b/src/libstore-tests/data/nar-info/json-1/pure.json @@ -0,0 +1,11 @@ +{ + "ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh", + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "references": [ + "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" + ], + "storeDir": "/nix/store", + "version": 1 +} diff --git a/src/libstore-tests/data/nar-info/pure.json b/src/libstore-tests/data/nar-info/json-1/pure_noversion.json similarity index 100% rename from src/libstore-tests/data/nar-info/pure.json rename to src/libstore-tests/data/nar-info/json-1/pure_noversion.json diff --git a/src/libstore-tests/data/nar-info/json-2/impure.json b/src/libstore-tests/data/nar-info/json-2/impure.json new file mode 100644 index 00000000000..b7b9f511827 --- /dev/null +++ b/src/libstore-tests/data/nar-info/json-2/impure.json @@ -0,0 +1,25 @@ +{ + "ca": { + "hash": "sha256-EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=", + "method": "nar" + }, + "compression": "xz", + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "downloadHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "downloadSize": 4029176, + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" + ], + "registrationTime": 23423, + "signatures": [ + "asdf", + "qwer" + ], + "storeDir": "/nix/store", + "ultimate": true, + "url": "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz", + "version": 2 +} diff --git a/src/libstore-tests/data/nar-info/json-2/pure.json b/src/libstore-tests/data/nar-info/json-2/pure.json new file mode 100644 index 00000000000..0350349a9d8 --- /dev/null +++ b/src/libstore-tests/data/nar-info/json-2/pure.json @@ -0,0 +1,14 @@ +{ + "ca": { + "hash": "sha256-EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=", + "method": "nar" + }, + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" + ], + "storeDir": "/nix/store", + "version": 2 +} diff --git a/src/libstore-tests/data/path-info/empty_impure.json b/src/libstore-tests/data/path-info/json-1/empty_impure.json similarity index 74% rename from src/libstore-tests/data/path-info/empty_impure.json rename to src/libstore-tests/data/path-info/json-1/empty_impure.json index be982dcef85..eb262899a60 100644 --- a/src/libstore-tests/data/path-info/empty_impure.json +++ b/src/libstore-tests/data/path-info/json-1/empty_impure.json @@ -6,5 +6,7 @@ "references": [], "registrationTime": null, "signatures": [], - "ultimate": false + "storeDir": "/nix/store", + "ultimate": false, + "version": 1 } diff --git a/src/libstore-tests/data/path-info/empty_pure.json b/src/libstore-tests/data/path-info/json-1/empty_pure.json similarity index 61% rename from src/libstore-tests/data/path-info/empty_pure.json rename to src/libstore-tests/data/path-info/json-1/empty_pure.json index 10d9f508a36..07fdb2db9c3 100644 --- a/src/libstore-tests/data/path-info/empty_pure.json +++ b/src/libstore-tests/data/path-info/json-1/empty_pure.json @@ -2,5 +2,7 @@ "ca": null, "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", "narSize": 0, - "references": [] + "references": [], + "storeDir": "/nix/store", + "version": 1 } diff --git a/src/libstore-tests/data/path-info/impure.json b/src/libstore-tests/data/path-info/json-1/impure.json similarity index 87% rename from src/libstore-tests/data/path-info/impure.json rename to src/libstore-tests/data/path-info/json-1/impure.json index 0c452cc4930..04d1dedc2af 100644 --- a/src/libstore-tests/data/path-info/impure.json +++ b/src/libstore-tests/data/path-info/json-1/impure.json @@ -12,5 +12,7 @@ "asdf", "qwer" ], - "ultimate": true + "storeDir": "/nix/store", + "ultimate": true, + "version": 1 } diff --git a/src/libstore-tests/data/path-info/json-1/pure.json b/src/libstore-tests/data/path-info/json-1/pure.json new file mode 100644 index 00000000000..f0424ae571c --- /dev/null +++ b/src/libstore-tests/data/path-info/json-1/pure.json @@ -0,0 +1,11 @@ +{ + "ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh", + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "references": [ + "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" + ], + "storeDir": "/nix/store", + "version": 1 +} diff --git a/src/libstore-tests/data/path-info/pure.json b/src/libstore-tests/data/path-info/json-1/pure_noversion.json similarity index 100% rename from src/libstore-tests/data/path-info/pure.json rename to src/libstore-tests/data/path-info/json-1/pure_noversion.json diff --git a/src/libstore-tests/data/path-info/json-2/empty_impure.json b/src/libstore-tests/data/path-info/json-2/empty_impure.json new file mode 100644 index 00000000000..d22fbf7ec52 --- /dev/null +++ b/src/libstore-tests/data/path-info/json-2/empty_impure.json @@ -0,0 +1,12 @@ +{ + "ca": null, + "deriver": null, + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 0, + "references": [], + "registrationTime": null, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 +} diff --git a/src/libstore-tests/data/path-info/json-2/empty_pure.json b/src/libstore-tests/data/path-info/json-2/empty_pure.json new file mode 100644 index 00000000000..9d50181da71 --- /dev/null +++ b/src/libstore-tests/data/path-info/json-2/empty_pure.json @@ -0,0 +1,8 @@ +{ + "ca": null, + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 0, + "references": [], + "storeDir": "/nix/store", + "version": 2 +} diff --git a/src/libstore-tests/data/path-info/json-2/impure.json b/src/libstore-tests/data/path-info/json-2/impure.json new file mode 100644 index 00000000000..bed67610b1b --- /dev/null +++ b/src/libstore-tests/data/path-info/json-2/impure.json @@ -0,0 +1,21 @@ +{ + "ca": { + "hash": "sha256-EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=", + "method": "nar" + }, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" + ], + "registrationTime": 23423, + "signatures": [ + "asdf", + "qwer" + ], + "storeDir": "/nix/store", + "ultimate": true, + "version": 2 +} diff --git a/src/libstore-tests/data/path-info/json-2/pure.json b/src/libstore-tests/data/path-info/json-2/pure.json new file mode 100644 index 00000000000..0350349a9d8 --- /dev/null +++ b/src/libstore-tests/data/path-info/json-2/pure.json @@ -0,0 +1,14 @@ +{ + "ca": { + "hash": "sha256-EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=", + "method": "nar" + }, + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" + ], + "storeDir": "/nix/store", + "version": 2 +} diff --git a/src/libstore-tests/data/serve-protocol/build-result-2.2.json b/src/libstore-tests/data/serve-protocol/build-result-2.2.json new file mode 100644 index 00000000000..029bcb5a835 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/build-result-2.2.json @@ -0,0 +1,28 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "NotDeterministic", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "builtOutputs": {}, + "startTime": 0, + "status": "Built", + "stopTime": 0, + "success": true, + "timesBuilt": 0 + } +] diff --git a/src/libstore-tests/data/serve-protocol/build-result-2.3.json b/src/libstore-tests/data/serve-protocol/build-result-2.3.json new file mode 100644 index 00000000000..be14b094726 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/build-result-2.3.json @@ -0,0 +1,28 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": true, + "startTime": 30, + "status": "NotDeterministic", + "stopTime": 50, + "success": false, + "timesBuilt": 3 + }, + { + "builtOutputs": {}, + "startTime": 30, + "status": "Built", + "stopTime": 50, + "success": true, + "timesBuilt": 0 + } +] diff --git a/src/libstore-tests/data/serve-protocol/build-result-2.6.json b/src/libstore-tests/data/serve-protocol/build-result-2.6.json new file mode 100644 index 00000000000..30a8e82e926 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/build-result-2.6.json @@ -0,0 +1,41 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": true, + "startTime": 30, + "status": "NotDeterministic", + "stopTime": 50, + "success": false, + "timesBuilt": 3 + }, + { + "builtOutputs": { + "bar": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!bar", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "signatures": [] + }, + "foo": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + }, + "startTime": 30, + "status": "Built", + "stopTime": 50, + "success": true, + "timesBuilt": 1 + } +] diff --git a/src/libstore-tests/data/serve-protocol/content-address.json b/src/libstore-tests/data/serve-protocol/content-address.json new file mode 100644 index 00000000000..3c63e6d9bbd --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/content-address.json @@ -0,0 +1,14 @@ +[ + { + "hash": "sha256-+Xc9Ll6mcPltwaewrk/BAQ56Y3G5T//wzhKUc0zrYu0=", + "method": "text" + }, + { + "hash": "sha1-gGemBoenViNZM3hiwqns/Fgzqwo=", + "method": "flat" + }, + { + "hash": "sha256-EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=", + "method": "nar" + } +] diff --git a/src/libstore-tests/data/serve-protocol/drv-output.json b/src/libstore-tests/data/serve-protocol/drv-output.json new file mode 100644 index 00000000000..2668d70c98c --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/drv-output.json @@ -0,0 +1,4 @@ +[ + "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux" +] diff --git a/src/libstore-tests/data/serve-protocol/optional-content-address.json b/src/libstore-tests/data/serve-protocol/optional-content-address.json new file mode 100644 index 00000000000..1c57fbf25ae --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/optional-content-address.json @@ -0,0 +1,7 @@ +[ + null, + { + "hash": "sha1-gGemBoenViNZM3hiwqns/Fgzqwo=", + "method": "flat" + } +] diff --git a/src/libstore-tests/data/serve-protocol/optional-store-path.json b/src/libstore-tests/data/serve-protocol/optional-store-path.json new file mode 100644 index 00000000000..58519a4d2c4 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/optional-store-path.json @@ -0,0 +1,4 @@ +[ + null, + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/serve-protocol/realisation-with-deps.bin b/src/libstore-tests/data/serve-protocol/realisation-with-deps.bin new file mode 100644 index 00000000000..54a78b64ebc Binary files /dev/null and b/src/libstore-tests/data/serve-protocol/realisation-with-deps.bin differ diff --git a/src/libstore-tests/data/serve-protocol/realisation-with-deps.json b/src/libstore-tests/data/serve-protocol/realisation-with-deps.json new file mode 100644 index 00000000000..77148d14ca4 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/realisation-with-deps.json @@ -0,0 +1,13 @@ +[ + { + "dependentRealisations": { + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" + }, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/serve-protocol/realisation.bin b/src/libstore-tests/data/serve-protocol/realisation.bin index 2176c6c4afd..3a0b2b2d8e3 100644 Binary files a/src/libstore-tests/data/serve-protocol/realisation.bin and b/src/libstore-tests/data/serve-protocol/realisation.bin differ diff --git a/src/libstore-tests/data/serve-protocol/realisation.json b/src/libstore-tests/data/serve-protocol/realisation.json new file mode 100644 index 00000000000..f9ff09dbb63 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/realisation.json @@ -0,0 +1,17 @@ +[ + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + }, + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/serve-protocol/set.json b/src/libstore-tests/data/serve-protocol/set.json new file mode 100644 index 00000000000..acd123082da --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/set.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "bar", + "foo" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/data/serve-protocol/store-path.json b/src/libstore-tests/data/serve-protocol/store-path.json new file mode 100644 index 00000000000..16459245be8 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/store-path.json @@ -0,0 +1,4 @@ +[ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/serve-protocol/string.json b/src/libstore-tests/data/serve-protocol/string.json new file mode 100644 index 00000000000..d3db4f3b4ac --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/string.json @@ -0,0 +1,7 @@ +[ + "", + "hi", + "white rabbit", + "大白兔", + "oh no " +] diff --git a/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.3.json b/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.3.json new file mode 100644 index 00000000000..0f593f4248d --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.3.json @@ -0,0 +1,28 @@ +[ + { + "ca": null, + "deriver": null, + "narHash": "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", + "narSize": 34878, + "references": [], + "registrationTime": null, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + }, + { + "ca": null, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", + "narSize": 34878, + "references": [ + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv" + ], + "registrationTime": null, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + } +] diff --git a/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.4.json b/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.4.json new file mode 100644 index 00000000000..801f2040002 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.4.json @@ -0,0 +1,37 @@ +[ + { + "ca": null, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "references": [ + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv" + ], + "registrationTime": null, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + }, + { + "ca": { + "hash": "sha256-EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=", + "method": "nar" + }, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" + ], + "registrationTime": null, + "signatures": [ + "fake-sig-1", + "fake-sig-2" + ], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + } +] diff --git a/src/libstore-tests/data/serve-protocol/vector.json b/src/libstore-tests/data/serve-protocol/vector.json new file mode 100644 index 00000000000..2b8cc1b3afe --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/vector.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "foo", + "bar" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/data/store-reference/local_3.txt b/src/libstore-tests/data/store-reference/local_3.txt index 2a67a342682..cd015d74f93 100644 --- a/src/libstore-tests/data/store-reference/local_3.txt +++ b/src/libstore-tests/data/store-reference/local_3.txt @@ -1 +1 @@ -local://?root=/foo bar/baz \ No newline at end of file +local://?root=/foo%20bar/baz \ No newline at end of file diff --git a/src/libstore-tests/data/store-reference/local_3_no_percent.txt b/src/libstore-tests/data/store-reference/local_3_no_percent.txt new file mode 100644 index 00000000000..2a67a342682 --- /dev/null +++ b/src/libstore-tests/data/store-reference/local_3_no_percent.txt @@ -0,0 +1 @@ +local://?root=/foo bar/baz \ No newline at end of file diff --git a/src/libstore-tests/data/worker-protocol/build-mode.json b/src/libstore-tests/data/worker-protocol/build-mode.json new file mode 100644 index 00000000000..2ef158f87f6 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/build-mode.json @@ -0,0 +1,5 @@ +[ + 0, + 1, + 2 +] diff --git a/src/libstore-tests/data/worker-protocol/build-result-1.27.json b/src/libstore-tests/data/worker-protocol/build-result-1.27.json new file mode 100644 index 00000000000..029bcb5a835 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/build-result-1.27.json @@ -0,0 +1,28 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "NotDeterministic", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "builtOutputs": {}, + "startTime": 0, + "status": "Built", + "stopTime": 0, + "success": true, + "timesBuilt": 0 + } +] diff --git a/src/libstore-tests/data/worker-protocol/build-result-1.28.json b/src/libstore-tests/data/worker-protocol/build-result-1.28.json new file mode 100644 index 00000000000..d02845b7faa --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/build-result-1.28.json @@ -0,0 +1,41 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "NotDeterministic", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "builtOutputs": { + "bar": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!bar", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "signatures": [] + }, + "foo": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + }, + "startTime": 0, + "status": "Built", + "stopTime": 0, + "success": true, + "timesBuilt": 0 + } +] diff --git a/src/libstore-tests/data/worker-protocol/build-result-1.29.json b/src/libstore-tests/data/worker-protocol/build-result-1.29.json new file mode 100644 index 00000000000..30a8e82e926 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/build-result-1.29.json @@ -0,0 +1,41 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": true, + "startTime": 30, + "status": "NotDeterministic", + "stopTime": 50, + "success": false, + "timesBuilt": 3 + }, + { + "builtOutputs": { + "bar": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!bar", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "signatures": [] + }, + "foo": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + }, + "startTime": 30, + "status": "Built", + "stopTime": 50, + "success": true, + "timesBuilt": 1 + } +] diff --git a/src/libstore-tests/data/worker-protocol/build-result-1.37.json b/src/libstore-tests/data/worker-protocol/build-result-1.37.json new file mode 100644 index 00000000000..61cddd2ca42 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/build-result-1.37.json @@ -0,0 +1,43 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": true, + "startTime": 30, + "status": "NotDeterministic", + "stopTime": 50, + "success": false, + "timesBuilt": 3 + }, + { + "builtOutputs": { + "bar": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!bar", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "signatures": [] + }, + "foo": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + }, + "cpuSystem": 604000000, + "cpuUser": 500000000, + "startTime": 30, + "status": "Built", + "stopTime": 50, + "success": true, + "timesBuilt": 1 + } +] diff --git a/src/libstore-tests/data/worker-protocol/content-address.json b/src/libstore-tests/data/worker-protocol/content-address.json new file mode 100644 index 00000000000..3c63e6d9bbd --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/content-address.json @@ -0,0 +1,14 @@ +[ + { + "hash": "sha256-+Xc9Ll6mcPltwaewrk/BAQ56Y3G5T//wzhKUc0zrYu0=", + "method": "text" + }, + { + "hash": "sha1-gGemBoenViNZM3hiwqns/Fgzqwo=", + "method": "flat" + }, + { + "hash": "sha256-EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=", + "method": "nar" + } +] diff --git a/src/libstore-tests/data/worker-protocol/derived-path-1.29.json b/src/libstore-tests/data/worker-protocol/derived-path-1.29.json new file mode 100644 index 00000000000..f0efe4a3532 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/derived-path-1.29.json @@ -0,0 +1,16 @@ +[ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "outputs": [ + "*" + ] + }, + { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "outputs": [ + "x", + "y" + ] + } +] diff --git a/src/libstore-tests/data/worker-protocol/derived-path-1.30.json b/src/libstore-tests/data/worker-protocol/derived-path-1.30.json new file mode 100644 index 00000000000..7a67e476100 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/derived-path-1.30.json @@ -0,0 +1,17 @@ +[ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv", + { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "outputs": [ + "*" + ] + }, + { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "outputs": [ + "x", + "y" + ] + } +] diff --git a/src/libstore-tests/data/worker-protocol/drv-output.json b/src/libstore-tests/data/worker-protocol/drv-output.json new file mode 100644 index 00000000000..2668d70c98c --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/drv-output.json @@ -0,0 +1,4 @@ +[ + "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux" +] diff --git a/src/libstore-tests/data/worker-protocol/keyed-build-result-1.29.json b/src/libstore-tests/data/worker-protocol/keyed-build-result-1.29.json new file mode 100644 index 00000000000..c15d47aa363 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/keyed-build-result-1.29.json @@ -0,0 +1,27 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "path": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-xxx", + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": true, + "path": { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "outputs": [ + "out" + ] + }, + "startTime": 30, + "status": "NotDeterministic", + "stopTime": 50, + "success": false, + "timesBuilt": 3 + } +] diff --git a/src/libstore-tests/data/worker-protocol/optional-content-address.json b/src/libstore-tests/data/worker-protocol/optional-content-address.json new file mode 100644 index 00000000000..1c57fbf25ae --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/optional-content-address.json @@ -0,0 +1,7 @@ +[ + null, + { + "hash": "sha1-gGemBoenViNZM3hiwqns/Fgzqwo=", + "method": "flat" + } +] diff --git a/src/libstore-tests/data/worker-protocol/optional-store-path.json b/src/libstore-tests/data/worker-protocol/optional-store-path.json new file mode 100644 index 00000000000..58519a4d2c4 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/optional-store-path.json @@ -0,0 +1,4 @@ +[ + null, + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/worker-protocol/optional-trusted-flag.json b/src/libstore-tests/data/worker-protocol/optional-trusted-flag.json new file mode 100644 index 00000000000..2f3c092f816 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/optional-trusted-flag.json @@ -0,0 +1,5 @@ +[ + null, + true, + false +] diff --git a/src/libstore-tests/data/worker-protocol/realisation-with-deps.bin b/src/libstore-tests/data/worker-protocol/realisation-with-deps.bin new file mode 100644 index 00000000000..54a78b64ebc Binary files /dev/null and b/src/libstore-tests/data/worker-protocol/realisation-with-deps.bin differ diff --git a/src/libstore-tests/data/worker-protocol/realisation-with-deps.json b/src/libstore-tests/data/worker-protocol/realisation-with-deps.json new file mode 100644 index 00000000000..77148d14ca4 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/realisation-with-deps.json @@ -0,0 +1,13 @@ +[ + { + "dependentRealisations": { + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" + }, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/worker-protocol/realisation.bin b/src/libstore-tests/data/worker-protocol/realisation.bin index 2176c6c4afd..3a0b2b2d8e3 100644 Binary files a/src/libstore-tests/data/worker-protocol/realisation.bin and b/src/libstore-tests/data/worker-protocol/realisation.bin differ diff --git a/src/libstore-tests/data/worker-protocol/realisation.json b/src/libstore-tests/data/worker-protocol/realisation.json new file mode 100644 index 00000000000..f9ff09dbb63 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/realisation.json @@ -0,0 +1,17 @@ +[ + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + }, + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/worker-protocol/set.json b/src/libstore-tests/data/worker-protocol/set.json new file mode 100644 index 00000000000..acd123082da --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/set.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "bar", + "foo" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/data/worker-protocol/store-path.json b/src/libstore-tests/data/worker-protocol/store-path.json new file mode 100644 index 00000000000..16459245be8 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/store-path.json @@ -0,0 +1,4 @@ +[ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/worker-protocol/string.json b/src/libstore-tests/data/worker-protocol/string.json new file mode 100644 index 00000000000..d3db4f3b4ac --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/string.json @@ -0,0 +1,7 @@ +[ + "", + "hi", + "white rabbit", + "大白兔", + "oh no " +] diff --git a/src/libstore-tests/data/worker-protocol/unkeyed-valid-path-info-1.15.json b/src/libstore-tests/data/worker-protocol/unkeyed-valid-path-info-1.15.json new file mode 100644 index 00000000000..9cc53c6804e --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/unkeyed-valid-path-info-1.15.json @@ -0,0 +1,28 @@ +[ + { + "ca": null, + "deriver": null, + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "references": [], + "registrationTime": 23423, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + }, + { + "ca": null, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "references": [ + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv" + ], + "registrationTime": 23423, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + } +] diff --git a/src/libstore-tests/data/worker-protocol/valid-path-info-1.15.json b/src/libstore-tests/data/worker-protocol/valid-path-info-1.15.json new file mode 100644 index 00000000000..427c286ddfb --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/valid-path-info-1.15.json @@ -0,0 +1,31 @@ +[ + { + "ca": null, + "deriver": null, + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "path": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "references": [], + "registrationTime": 23423, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + }, + { + "ca": null, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "path": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo" + ], + "registrationTime": 23423, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + } +] diff --git a/src/libstore-tests/data/worker-protocol/valid-path-info-1.16.json b/src/libstore-tests/data/worker-protocol/valid-path-info-1.16.json new file mode 100644 index 00000000000..f980d842174 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/valid-path-info-1.16.json @@ -0,0 +1,53 @@ +[ + { + "ca": null, + "deriver": null, + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "path": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "references": [], + "registrationTime": 23423, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": true, + "version": 2 + }, + { + "ca": null, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "path": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo" + ], + "registrationTime": 23423, + "signatures": [ + "fake-sig-1", + "fake-sig-2" + ], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + }, + { + "ca": { + "hash": "sha256-EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=", + "method": "nar" + }, + "deriver": null, + "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narSize": 34878, + "path": "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo", + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" + ], + "registrationTime": 23423, + "signatures": [], + "storeDir": "/nix/store", + "ultimate": false, + "version": 2 + } +] diff --git a/src/libstore-tests/data/worker-protocol/vector.json b/src/libstore-tests/data/worker-protocol/vector.json new file mode 100644 index 00000000000..2b8cc1b3afe --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/vector.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "foo", + "bar" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/derivation-advanced-attrs.cc b/src/libstore-tests/derivation-advanced-attrs.cc index 9c13bf04830..296ffed619b 100644 --- a/src/libstore-tests/derivation-advanced-attrs.cc +++ b/src/libstore-tests/derivation-advanced-attrs.cc @@ -3,20 +3,20 @@ #include "nix/util/experimental-features.hh" #include "nix/store/derivations.hh" -#include "nix/store/derivations.hh" +#include "nix/store/derived-path.hh" #include "nix/store/derivation-options.hh" #include "nix/store/parsed-derivations.hh" #include "nix/util/types.hh" #include "nix/util/json-utils.hh" #include "nix/store/tests/libstore.hh" -#include "nix/util/tests/characterization.hh" +#include "nix/util/tests/json-characterization.hh" namespace nix { -using nlohmann::json; +using namespace nlohmann; -class DerivationAdvancedAttrsTest : public CharacterizationTest, public LibStoreTest +class DerivationAdvancedAttrsTest : public JsonCharacterizationTest, public LibStoreTest { protected: std::filesystem::path unitTestData = getUnitTestData() / "derivation" / "ia"; @@ -32,6 +32,37 @@ class DerivationAdvancedAttrsTest : public CharacterizationTest, public LibStore * to worry about race conditions if the tests run concurrently. */ ExperimentalFeatureSettings mockXpSettings; + + /** + * Helper function to test getRequiredSystemFeatures for a given derivation file + */ + void testRequiredSystemFeatures(const std::string & fileName, const StringSet & expectedFeatures) + { + this->readTest(fileName, [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); + auto options = derivationOptionsFromStructuredAttrs( + *this->store, got.inputDrvs, got.env, get(got.structuredAttrs), true, this->mockXpSettings); + EXPECT_EQ(options.getRequiredSystemFeatures(got), expectedFeatures); + }); + } + + /** + * Helper function to test DerivationOptions parsing and comparison + */ + void testDerivationOptions( + const std::string & fileName, + const DerivationOptions & expected, + const StringSet & expectedSystemFeatures) + { + this->readTest(fileName, [&](auto encoded) { + auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); + auto options = derivationOptionsFromStructuredAttrs( + *this->store, got.inputDrvs, got.env, get(got.structuredAttrs), true, this->mockXpSettings); + + EXPECT_EQ(options, expected); + EXPECT_EQ(options.getRequiredSystemFeatures(got), expectedSystemFeatures); + }); + } }; class CaDerivationAdvancedAttrsTest : public DerivationAdvancedAttrsTest @@ -51,44 +82,44 @@ using BothFixtures = ::testing::TypesreadTest(NAME ".json", [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - /* Use DRV file instead of C++ literal as source of truth. */ \ - auto aterm = readFile(this->goldenMaster(NAME ".drv")); \ - auto expected = parseDerivation(*this->store, std::move(aterm), NAME, this->mockXpSettings); \ - Derivation got = Derivation::fromJSON(encoded, this->mockXpSettings); \ - EXPECT_EQ(got, expected); \ - }); \ - } \ - \ - TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_to_json) \ - { \ - this->writeTest( \ - NAME ".json", \ - [&]() -> json { \ - /* Use DRV file instead of C++ literal as source of truth. */ \ - auto aterm = readFile(this->goldenMaster(NAME ".drv")); \ - return parseDerivation(*this->store, std::move(aterm), NAME, this->mockXpSettings).toJSON(); \ - }, \ - [](const auto & file) { return json::parse(readFile(file)); }, \ - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ - } \ - \ - TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_from_aterm) \ - { \ - this->readTest(NAME ".drv", [&](auto encoded) { \ - /* Use JSON file instead of C++ literal as source of truth. */ \ - auto json = json::parse(readFile(this->goldenMaster(NAME ".json"))); \ - auto expected = Derivation::fromJSON(json, this->mockXpSettings); \ - auto got = parseDerivation(*this->store, std::move(encoded), NAME, this->mockXpSettings); \ - EXPECT_EQ(got.toJSON(), expected.toJSON()); \ - EXPECT_EQ(got, expected); \ - }); \ - } \ - \ +#define TEST_ATERM_JSON(STEM, NAME) \ + TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_from_json) \ + { \ + this->readTest(NAME ".json", [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + /* Use DRV file instead of C++ literal as source of truth. */ \ + auto aterm = readFile(this->goldenMaster(NAME ".drv")); \ + auto expected = parseDerivation(*this->store, std::move(aterm), NAME, this->mockXpSettings); \ + Derivation got = adl_serializer::from_json(encoded, this->mockXpSettings); \ + EXPECT_EQ(got, expected); \ + }); \ + } \ + \ + TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_to_json) \ + { \ + this->writeTest( \ + NAME ".json", \ + [&]() -> json { \ + /* Use DRV file instead of C++ literal as source of truth. */ \ + auto aterm = readFile(this->goldenMaster(NAME ".drv")); \ + return parseDerivation(*this->store, std::move(aterm), NAME, this->mockXpSettings); \ + }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ + } \ + \ + TYPED_TEST(DerivationAdvancedAttrsBothTest, Derivation_##STEM##_from_aterm) \ + { \ + this->readTest(NAME ".drv", [&](auto encoded) { \ + /* Use JSON file instead of C++ literal as source of truth. */ \ + auto j = json::parse(readFile(this->goldenMaster(NAME ".json"))); \ + auto expected = adl_serializer::from_json(j, this->mockXpSettings); \ + auto got = parseDerivation(*this->store, std::move(encoded), NAME, this->mockXpSettings); \ + EXPECT_EQ(static_cast(got), static_cast(expected)); \ + EXPECT_EQ(got, expected); \ + }); \ + } \ + \ /* No corresponding write test, because we need to read the drv to write the json file */ TEST_ATERM_JSON(advancedAttributes, "advanced-attributes-defaults"); @@ -98,35 +129,69 @@ TEST_ATERM_JSON(advancedAttributes_structuredAttrs_defaults, "advanced-attribute #undef TEST_ATERM_JSON -using ExportReferencesMap = decltype(DerivationOptions::exportReferencesGraph); +/** + * Since these are both repeated and sensative opaque values, it makes + * sense to give them names in this file. + */ +static SingleDerivedPath + pathFoo = SingleDerivedPath::Opaque{StorePath{"p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"}}, + pathFooDev = SingleDerivedPath::Opaque{StorePath{"z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"}}, + pathBar = SingleDerivedPath::Opaque{StorePath{"r5cff30838majxk5mp3ip2diffi8vpaj-bar"}}, + pathBarDev = SingleDerivedPath::Opaque{StorePath{"9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"}}, + pathBarDrvIA = SingleDerivedPath::Opaque{StorePath{"vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"}}, + pathBarDrvCA = SingleDerivedPath::Opaque{StorePath{"qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"}}, + placeholderFoo = + SingleDerivedPath::Built{ + .drvPath = makeConstantStorePathRef(StorePath{"j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv"}), + .output = "out", + }, + placeholderFooDev = + SingleDerivedPath::Built{ + .drvPath = makeConstantStorePathRef(StorePath{"j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv"}), + .output = "dev", + }, + placeholderBar = + SingleDerivedPath::Built{ + .drvPath = makeConstantStorePathRef(StorePath{"qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"}), + .output = "out", + }, + placeholderBarDev = SingleDerivedPath::Built{ + .drvPath = makeConstantStorePathRef(StorePath{"qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"}), + .output = "dev", + }; + +using ExportReferencesMap = decltype(DerivationOptions::exportReferencesGraph); + +static const DerivationOptions advancedAttributes_defaults = { + .outputChecks = + DerivationOptions::OutputChecks{ + .ignoreSelfRefs = true, + }, + .unsafeDiscardReferences = {}, + .passAsFile = {}, + .exportReferencesGraph = {}, + .additionalSandboxProfile = "", + .noChroot = false, + .impureHostDeps = {}, + .impureEnvVars = {}, + .allowLocalNetworking = false, + .requiredSystemFeatures = {}, + .preferLocalBuild = false, + .allowSubstitutes = true, +}; TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_defaults) { this->readTest("advanced-attributes-defaults.drv", [&](auto encoded) { auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); + auto options = derivationOptionsFromStructuredAttrs( + *this->store, got.inputDrvs, got.env, get(got.structuredAttrs), true, this->mockXpSettings); EXPECT_TRUE(!got.structuredAttrs); - EXPECT_EQ(options.additionalSandboxProfile, ""); - EXPECT_EQ(options.noChroot, false); - EXPECT_EQ(options.impureHostDeps, StringSet{}); - EXPECT_EQ(options.impureEnvVars, StringSet{}); - EXPECT_EQ(options.allowLocalNetworking, false); - EXPECT_EQ(options.exportReferencesGraph, ExportReferencesMap{}); - { - auto * checksForAllOutputs_ = std::get_if<0>(&options.outputChecks); - ASSERT_TRUE(checksForAllOutputs_ != nullptr); - auto & checksForAllOutputs = *checksForAllOutputs_; - - EXPECT_EQ(checksForAllOutputs.allowedReferences, std::nullopt); - EXPECT_EQ(checksForAllOutputs.allowedRequisites, std::nullopt); - EXPECT_EQ(checksForAllOutputs.disallowedReferences, StringSet{}); - EXPECT_EQ(checksForAllOutputs.disallowedRequisites, StringSet{}); - } + EXPECT_EQ(options, advancedAttributes_defaults); + EXPECT_EQ(options.canBuildLocally(*this->store, got), false); EXPECT_EQ(options.willBuildLocally(*this->store, got), false); EXPECT_EQ(options.substitutesAllowed(), true); @@ -136,152 +201,125 @@ TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_defaults) TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_defaults) { - this->readTest("advanced-attributes-defaults.drv", [&](auto encoded) { - auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - - EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{}); - }); + testRequiredSystemFeatures("advanced-attributes-defaults.drv", {}); }; TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_defaults) { - this->readTest("advanced-attributes-defaults.drv", [&](auto encoded) { - auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - - EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{"ca-derivations"}); - }); + testRequiredSystemFeatures("advanced-attributes-defaults.drv", {"ca-derivations"}); }; TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes) { + DerivationOptions expected = { + .outputChecks = + DerivationOptions::OutputChecks{ + .ignoreSelfRefs = true, + }, + .unsafeDiscardReferences = {}, + .passAsFile = {}, + .additionalSandboxProfile = "sandcastle", + .noChroot = true, + .impureHostDeps = {"/usr/bin/ditto"}, + .impureEnvVars = {"UNICORN"}, + .allowLocalNetworking = true, + .requiredSystemFeatures = {"rainbow", "uid-range"}, + .preferLocalBuild = true, + .allowSubstitutes = false, + }; + this->readTest("advanced-attributes.drv", [&](auto encoded) { auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); + auto options = derivationOptionsFromStructuredAttrs( + *this->store, got.inputDrvs, got.env, get(got.structuredAttrs), true, this->mockXpSettings); EXPECT_TRUE(!got.structuredAttrs); - EXPECT_EQ(options.additionalSandboxProfile, "sandcastle"); - EXPECT_EQ(options.noChroot, true); - EXPECT_EQ(options.impureHostDeps, StringSet{"/usr/bin/ditto"}); - EXPECT_EQ(options.impureEnvVars, StringSet{"UNICORN"}); - EXPECT_EQ(options.allowLocalNetworking, true); - EXPECT_EQ(options.canBuildLocally(*this->store, got), false); - EXPECT_EQ(options.willBuildLocally(*this->store, got), false); + // Reset fields that vary between test cases to enable whole-object comparison + options.outputChecks = DerivationOptions::OutputChecks{.ignoreSelfRefs = true}; + options.exportReferencesGraph = {}; + + EXPECT_EQ(options, expected); + EXPECT_EQ(options.substitutesAllowed(), false); EXPECT_EQ(options.useUidRange(got), true); }); }; -TEST_F(DerivationAdvancedAttrsTest, advancedAttributes) -{ - this->readTest("advanced-attributes.drv", [&](auto encoded) { - auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - - EXPECT_EQ( - options.exportReferencesGraph, - (ExportReferencesMap{ - { - "refs1", - { - "/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo", - }, - }, - { - "refs2", - { - "/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv", - }, - }, - })); - - { - auto * checksForAllOutputs_ = std::get_if<0>(&options.outputChecks); - ASSERT_TRUE(checksForAllOutputs_ != nullptr); - auto & checksForAllOutputs = *checksForAllOutputs_; - - EXPECT_EQ( - checksForAllOutputs.allowedReferences, StringSet{"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"}); - EXPECT_EQ( - checksForAllOutputs.allowedRequisites, - StringSet{"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"}); - EXPECT_EQ( - checksForAllOutputs.disallowedReferences, StringSet{"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"}); - EXPECT_EQ( - checksForAllOutputs.disallowedRequisites, - StringSet{"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"}); - } +DerivationOptions advancedAttributes_ia = { + .outputChecks = + DerivationOptions::OutputChecks{ + .ignoreSelfRefs = true, + .allowedReferences = std::set>{pathFoo}, + .disallowedReferences = std::set>{pathBar, OutputName{"dev"}}, + .allowedRequisites = std::set>{pathFooDev, OutputName{"bin"}}, + .disallowedRequisites = std::set>{pathBarDev}, + }, + .unsafeDiscardReferences = {}, + .passAsFile = {}, + .exportReferencesGraph{ + {"refs1", {pathFoo}}, + {"refs2", {pathBarDrvIA}}, + }, + .additionalSandboxProfile = "sandcastle", + .noChroot = true, + .impureHostDeps = {"/usr/bin/ditto"}, + .impureEnvVars = {"UNICORN"}, + .allowLocalNetworking = true, + .requiredSystemFeatures = {"rainbow", "uid-range"}, + .preferLocalBuild = true, + .allowSubstitutes = false, +}; - StringSet systemFeatures{"rainbow", "uid-range"}; +TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_ia) +{ + testDerivationOptions("advanced-attributes.drv", advancedAttributes_ia, {"rainbow", "uid-range"}); +}; - EXPECT_EQ(options.getRequiredSystemFeatures(got), systemFeatures); - }); +DerivationOptions advancedAttributes_ca = { + .outputChecks = + DerivationOptions::OutputChecks{ + .ignoreSelfRefs = true, + .allowedReferences = std::set>{placeholderFoo}, + .disallowedReferences = std::set>{placeholderBar, OutputName{"dev"}}, + .allowedRequisites = std::set>{placeholderFooDev, OutputName{"bin"}}, + .disallowedRequisites = std::set>{placeholderBarDev}, + }, + .unsafeDiscardReferences = {}, + .passAsFile = {}, + .exportReferencesGraph{ + {"refs1", {placeholderFoo}}, + {"refs2", {pathBarDrvCA}}, + }, + .additionalSandboxProfile = "sandcastle", + .noChroot = true, + .impureHostDeps = {"/usr/bin/ditto"}, + .impureEnvVars = {"UNICORN"}, + .allowLocalNetworking = true, + .requiredSystemFeatures = {"rainbow", "uid-range"}, + .preferLocalBuild = true, + .allowSubstitutes = false, }; TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes) { - this->readTest("advanced-attributes.drv", [&](auto encoded) { - auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - - EXPECT_EQ( - options.exportReferencesGraph, - (ExportReferencesMap{ - { - "refs1", - { - "/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9", - }, - }, - { - "refs2", - { - "/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv", - }, - }, - })); - - { - auto * checksForAllOutputs_ = std::get_if<0>(&options.outputChecks); - ASSERT_TRUE(checksForAllOutputs_ != nullptr); - auto & checksForAllOutputs = *checksForAllOutputs_; - - EXPECT_EQ( - checksForAllOutputs.allowedReferences, - StringSet{"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"}); - EXPECT_EQ( - checksForAllOutputs.allowedRequisites, - StringSet{"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"}); - EXPECT_EQ( - checksForAllOutputs.disallowedReferences, - StringSet{"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"}); - EXPECT_EQ( - checksForAllOutputs.disallowedRequisites, - StringSet{"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"}); - } - - StringSet systemFeatures{"rainbow", "uid-range"}; - systemFeatures.insert("ca-derivations"); + testDerivationOptions("advanced-attributes.drv", advancedAttributes_ca, {"rainbow", "uid-range", "ca-derivations"}); +}; - EXPECT_EQ(options.getRequiredSystemFeatures(got), systemFeatures); - }); +DerivationOptions advancedAttributes_structuredAttrs_defaults = { + .outputChecks = std::map::OutputChecks>{}, + .unsafeDiscardReferences = {}, + .passAsFile = {}, + .exportReferencesGraph = {}, + .additionalSandboxProfile = "", + .noChroot = false, + .impureHostDeps = {}, + .impureEnvVars = {}, + .allowLocalNetworking = false, + .requiredSystemFeatures = {}, + .preferLocalBuild = false, + .allowSubstitutes = true, }; TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_structuredAttrs_defaults) @@ -289,26 +327,12 @@ TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_structuredAttrs_d this->readTest("advanced-attributes-structured-attrs-defaults.drv", [&](auto encoded) { auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); + auto options = derivationOptionsFromStructuredAttrs( + *this->store, got.inputDrvs, got.env, get(got.structuredAttrs), true, this->mockXpSettings); EXPECT_TRUE(got.structuredAttrs); - EXPECT_EQ(options.additionalSandboxProfile, ""); - EXPECT_EQ(options.noChroot, false); - EXPECT_EQ(options.impureHostDeps, StringSet{}); - EXPECT_EQ(options.impureEnvVars, StringSet{}); - EXPECT_EQ(options.allowLocalNetworking, false); - EXPECT_EQ(options.exportReferencesGraph, ExportReferencesMap{}); - - { - auto * checksPerOutput_ = std::get_if<1>(&options.outputChecks); - ASSERT_TRUE(checksPerOutput_ != nullptr); - auto & checksPerOutput = *checksPerOutput_; - - EXPECT_EQ(checksPerOutput.size(), 0u); - } + EXPECT_EQ(options, advancedAttributes_structuredAttrs_defaults); EXPECT_EQ(options.canBuildLocally(*this->store, got), false); EXPECT_EQ(options.willBuildLocally(*this->store, got), false); @@ -319,55 +343,63 @@ TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_structuredAttrs_d TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs_defaults) { - this->readTest("advanced-attributes-structured-attrs-defaults.drv", [&](auto encoded) { - auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - - EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{}); - }); + testRequiredSystemFeatures("advanced-attributes-structured-attrs-defaults.drv", {}); }; TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs_defaults) { - this->readTest("advanced-attributes-structured-attrs-defaults.drv", [&](auto encoded) { - auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - - EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{"ca-derivations"}); - }); + testRequiredSystemFeatures("advanced-attributes-structured-attrs-defaults.drv", {"ca-derivations"}); }; TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_structuredAttrs) { + DerivationOptions expected = { + .outputChecks = + std::map::OutputChecks>{ + {"dev", + DerivationOptions::OutputChecks{ + .maxSize = 789, + .maxClosureSize = 5909, + }}, + }, + .unsafeDiscardReferences = {}, + .passAsFile = {}, + .exportReferencesGraph = {}, + .additionalSandboxProfile = "sandcastle", + .noChroot = true, + .impureHostDeps = {"/usr/bin/ditto"}, + .impureEnvVars = {"UNICORN"}, + .allowLocalNetworking = true, + .requiredSystemFeatures = {"rainbow", "uid-range"}, + .preferLocalBuild = true, + .allowSubstitutes = false, + }; + this->readTest("advanced-attributes-structured-attrs.drv", [&](auto encoded) { auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); + auto options = derivationOptionsFromStructuredAttrs( + *this->store, got.inputDrvs, got.env, get(got.structuredAttrs), true, this->mockXpSettings); EXPECT_TRUE(got.structuredAttrs); - EXPECT_EQ(options.additionalSandboxProfile, "sandcastle"); - EXPECT_EQ(options.noChroot, true); - EXPECT_EQ(options.impureHostDeps, StringSet{"/usr/bin/ditto"}); - EXPECT_EQ(options.impureEnvVars, StringSet{"UNICORN"}); - EXPECT_EQ(options.allowLocalNetworking, true); - + // Reset fields that vary between test cases to enable whole-object comparison { - auto output_ = get(std::get<1>(options.outputChecks), "dev"); - ASSERT_TRUE(output_); - auto & output = *output_; - - EXPECT_EQ(output.maxSize, 789); - EXPECT_EQ(output.maxClosureSize, 5909); + // Delete all keys but "dev" in options.outputChecks + auto * outputChecksMapP = + std::get_if::OutputChecks>>( + &options.outputChecks); + ASSERT_TRUE(outputChecksMapP); + auto & outputChecksMap = *outputChecksMapP; + auto devEntry = outputChecksMap.find("dev"); + ASSERT_TRUE(devEntry != outputChecksMap.end()); + auto devChecks = devEntry->second; + outputChecksMap.clear(); + outputChecksMap.emplace("dev", std::move(devChecks)); } + options.exportReferencesGraph = {}; + + EXPECT_EQ(options, expected); EXPECT_EQ(options.canBuildLocally(*this->store, got), false); EXPECT_EQ(options.willBuildLocally(*this->store, got), false); @@ -376,112 +408,111 @@ TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_structuredAttrs) }); }; +DerivationOptions advancedAttributes_structuredAttrs_ia = { + .outputChecks = + std::map::OutputChecks>{ + {"out", + DerivationOptions::OutputChecks{ + .allowedReferences = std::set>{pathFoo}, + .allowedRequisites = std::set>{pathFooDev, OutputName{"bin"}}, + }}, + {"bin", + DerivationOptions::OutputChecks{ + .disallowedReferences = std::set>{pathBar, OutputName{"dev"}}, + .disallowedRequisites = std::set>{pathBarDev}, + }}, + {"dev", + DerivationOptions::OutputChecks{ + .maxSize = 789, + .maxClosureSize = 5909, + }}, + }, + .unsafeDiscardReferences = {}, + .passAsFile = {}, + .exportReferencesGraph = + { + {"refs1", {pathFoo}}, + {"refs2", {pathBarDrvIA}}, + }, + .additionalSandboxProfile = "sandcastle", + .noChroot = true, + .impureHostDeps = {"/usr/bin/ditto"}, + .impureEnvVars = {"UNICORN"}, + .allowLocalNetworking = true, + .requiredSystemFeatures = {"rainbow", "uid-range"}, + .preferLocalBuild = true, + .allowSubstitutes = false, +}; + TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) { - this->readTest("advanced-attributes-structured-attrs.drv", [&](auto encoded) { - auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - - EXPECT_EQ( - options.exportReferencesGraph, - (ExportReferencesMap{ - { - "refs1", - { - "/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo", - }, - }, - { - "refs2", - { - "/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv", - }, - }, - })); + testDerivationOptions( + "advanced-attributes-structured-attrs.drv", advancedAttributes_structuredAttrs_ia, {"rainbow", "uid-range"}); +}; +DerivationOptions advancedAttributes_structuredAttrs_ca = { + .outputChecks = + std::map::OutputChecks>{ + {"out", + DerivationOptions::OutputChecks{ + .allowedReferences = std::set>{placeholderFoo}, + .allowedRequisites = std::set>{placeholderFooDev, OutputName{"bin"}}, + }}, + {"bin", + DerivationOptions::OutputChecks{ + .disallowedReferences = std::set>{placeholderBar, OutputName{"dev"}}, + .disallowedRequisites = std::set>{placeholderBarDev}, + }}, + {"dev", + DerivationOptions::OutputChecks{ + .maxSize = 789, + .maxClosureSize = 5909, + }}, + }, + .unsafeDiscardReferences = {}, + .passAsFile = {}, + .exportReferencesGraph = { - { - auto output_ = get(std::get<1>(options.outputChecks), "out"); - ASSERT_TRUE(output_); - auto & output = *output_; - - EXPECT_EQ(output.allowedReferences, StringSet{"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"}); - EXPECT_EQ(output.allowedRequisites, StringSet{"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"}); - } - - { - auto output_ = get(std::get<1>(options.outputChecks), "bin"); - ASSERT_TRUE(output_); - auto & output = *output_; - - EXPECT_EQ(output.disallowedReferences, StringSet{"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"}); - EXPECT_EQ( - output.disallowedRequisites, StringSet{"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"}); - } - } - - StringSet systemFeatures{"rainbow", "uid-range"}; - - EXPECT_EQ(options.getRequiredSystemFeatures(got), systemFeatures); - }); + {"refs1", {placeholderFoo}}, + {"refs2", {pathBarDrvCA}}, + }, + .additionalSandboxProfile = "sandcastle", + .noChroot = true, + .impureHostDeps = {"/usr/bin/ditto"}, + .impureEnvVars = {"UNICORN"}, + .allowLocalNetworking = true, + .requiredSystemFeatures = {"rainbow", "uid-range"}, + .preferLocalBuild = true, + .allowSubstitutes = false, }; TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) { - this->readTest("advanced-attributes-structured-attrs.drv", [&](auto encoded) { - auto got = parseDerivation(*this->store, std::move(encoded), "foo", this->mockXpSettings); - - auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - - EXPECT_EQ( - options.exportReferencesGraph, - (ExportReferencesMap{ - { - "refs1", - { - "/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9", - }, - }, - { - "refs2", - { - "/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv", - }, - }, - })); + testDerivationOptions( + "advanced-attributes-structured-attrs.drv", + advancedAttributes_structuredAttrs_ca, + {"rainbow", "uid-range", "ca-derivations"}); +}; - { - { - auto output_ = get(std::get<1>(options.outputChecks), "out"); - ASSERT_TRUE(output_); - auto & output = *output_; - - EXPECT_EQ(output.allowedReferences, StringSet{"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"}); - EXPECT_EQ(output.allowedRequisites, StringSet{"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"}); - } - - { - auto output_ = get(std::get<1>(options.outputChecks), "bin"); - ASSERT_TRUE(output_); - auto & output = *output_; - - EXPECT_EQ( - output.disallowedReferences, StringSet{"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"}); - EXPECT_EQ( - output.disallowedRequisites, StringSet{"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"}); - } - } +#define TEST_JSON_OPTIONS(FIXUTURE, VAR, VAR2) \ + TEST_F(FIXUTURE, DerivationOptions_##VAR##_from_json) \ + { \ + nix::readJsonTest>( \ + *this, "derivation-options/" #VAR, advancedAttributes_##VAR2); \ + } \ + TEST_F(FIXUTURE, DerivationOptions_##VAR##_to_json) \ + { \ + nix::readJsonTest>( \ + *this, "derivation-options/" #VAR, advancedAttributes_##VAR2); \ + } - StringSet systemFeatures{"rainbow", "uid-range"}; - systemFeatures.insert("ca-derivations"); +TEST_JSON_OPTIONS(DerivationAdvancedAttrsTest, defaults, defaults) +TEST_JSON_OPTIONS(DerivationAdvancedAttrsTest, all_set, ia) +TEST_JSON_OPTIONS(CaDerivationAdvancedAttrsTest, all_set, ca) +TEST_JSON_OPTIONS(DerivationAdvancedAttrsTest, structuredAttrs_defaults, structuredAttrs_defaults) +TEST_JSON_OPTIONS(DerivationAdvancedAttrsTest, structuredAttrs_all_set, structuredAttrs_ia) +TEST_JSON_OPTIONS(CaDerivationAdvancedAttrsTest, structuredAttrs_all_set, structuredAttrs_ca) - EXPECT_EQ(options.getRequiredSystemFeatures(got), systemFeatures); - }); -}; +#undef TEST_JSON_OPTIONS } // namespace nix diff --git a/src/libstore-tests/derivation.cc b/src/libstore-tests/derivation/external-formats.cc similarity index 80% rename from src/libstore-tests/derivation.cc rename to src/libstore-tests/derivation/external-formats.cc index 65a5d011d70..056eeaa8a96 100644 --- a/src/libstore-tests/derivation.cc +++ b/src/libstore-tests/derivation/external-formats.cc @@ -1,57 +1,14 @@ #include #include -#include "nix/util/experimental-features.hh" #include "nix/store/derivations.hh" - -#include "nix/store/tests/libstore.hh" +#include "derivation/test-support.hh" #include "nix/util/tests/json-characterization.hh" namespace nix { using nlohmann::json; -class DerivationTest : public virtual CharacterizationTest, public LibStoreTest -{ - std::filesystem::path unitTestData = getUnitTestData() / "derivation"; - -public: - std::filesystem::path goldenMaster(std::string_view testStem) const override - { - return unitTestData / testStem; - } - - /** - * We set these in tests rather than the regular globals so we don't have - * to worry about race conditions if the tests run concurrently. - */ - ExperimentalFeatureSettings mockXpSettings; -}; - -class CaDerivationTest : public DerivationTest -{ - void SetUp() override - { - mockXpSettings.set("experimental-features", "ca-derivations"); - } -}; - -class DynDerivationTest : public DerivationTest -{ - void SetUp() override - { - mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations"); - } -}; - -class ImpureDerivationTest : public DerivationTest -{ - void SetUp() override - { - mockXpSettings.set("experimental-features", "impure-derivations"); - } -}; - TEST_F(DerivationTest, BadATerm_version) { ASSERT_THROW( @@ -66,23 +23,17 @@ TEST_F(DynDerivationTest, BadATerm_oldVersionDynDeps) FormatError); } -#define MAKE_OUTPUT_JSON_TEST_P(FIXTURE) \ - TEST_P(FIXTURE, from_json) \ - { \ - const auto & [name, expected] = GetParam(); \ - /* Don't use readJsonTest because we want to check experimental \ - features. */ \ - readTest(Path{"output-"} + name + ".json", [&](const auto & encoded_) { \ - json j = json::parse(encoded_); \ - DerivationOutput got = DerivationOutput::fromJSON(j, mockXpSettings); \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_P(FIXTURE, to_json) \ - { \ - const auto & [name, value] = GetParam(); \ - writeJsonTest("output-" + name, value); \ +#define MAKE_OUTPUT_JSON_TEST_P(FIXTURE) \ + TEST_P(FIXTURE, from_json) \ + { \ + const auto & [name, expected] = GetParam(); \ + readJsonTest(Path{"output-"} + name, expected, mockXpSettings); \ + } \ + \ + TEST_P(FIXTURE, to_json) \ + { \ + const auto & [name, value] = GetParam(); \ + writeJsonTest("output-" + name, value); \ } struct DerivationOutputJsonTest : DerivationTest, @@ -193,13 +144,7 @@ INSTANTIATE_TEST_SUITE_P( TEST_P(FIXTURE, from_json) \ { \ const auto & drv = GetParam(); \ - /* Don't use readJsonTest because we want to check experimental \ - features. */ \ - readTest(drv.name + ".json", [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - Derivation got = Derivation::fromJSON(encoded, mockXpSettings); \ - ASSERT_EQ(got, drv); \ - }); \ + readJsonTest(drv.name, drv, mockXpSettings); \ } \ \ TEST_P(FIXTURE, to_json) \ @@ -213,7 +158,8 @@ INSTANTIATE_TEST_SUITE_P( const auto & drv = GetParam(); \ readTest(drv.name + ".drv", [&](auto encoded) { \ auto got = parseDerivation(*store, std::move(encoded), drv.name, mockXpSettings); \ - ASSERT_EQ(got.toJSON(), drv.toJSON()); \ + using nlohmann::json; \ + ASSERT_EQ(static_cast(got), static_cast(drv)); \ ASSERT_EQ(got, drv); \ }); \ } \ diff --git a/src/libstore-tests/derivation/invariants.cc b/src/libstore-tests/derivation/invariants.cc new file mode 100644 index 00000000000..cacdca0cdc0 --- /dev/null +++ b/src/libstore-tests/derivation/invariants.cc @@ -0,0 +1,266 @@ +#include +#include + +#include "nix/store/derivations.hh" +#include "nix/store/tests/libstore.hh" +#include "nix/store/dummy-store-impl.hh" +#include "nix/util/tests/json-characterization.hh" + +#include "derivation/test-support.hh" + +namespace nix { + +class FillInOutputPathsTest : public LibStoreTest, public JsonCharacterizationTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "derivation" / "invariants"; + +protected: + FillInOutputPathsTest() + : LibStoreTest([]() { + auto config = make_ref(DummyStoreConfig::Params{}); + config->readOnly = false; + return config->openDummyStore(); + }()) + { + } + + /** + * Create a CA floating output derivation and write it to the store. + * This is useful for creating dependencies that will cause downstream + * derivations to remain deferred. + */ + StorePath makeCAFloatingDependency(std::string_view name) + { + Derivation depDrv; + depDrv.name = name; + depDrv.platform = "x86_64-linux"; + depDrv.builder = "/bin/sh"; + depDrv.outputs = { + { + "out", + // will ensure that downstream is deferred + DerivationOutput{DerivationOutput::CAFloating{ + .method = ContentAddressMethod::Raw::NixArchive, + .hashAlgo = HashAlgorithm::SHA256, + }}, + }, + }; + depDrv.env = {{"out", ""}}; + + // Fill in the dependency derivation's output paths + depDrv.fillInOutputPaths(*store); + + // Write the dependency to the store + return writeDerivation(*store, depDrv, NoRepair); + } + +public: + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; + +TEST_F(FillInOutputPathsTest, fillsDeferredOutputs_emptyStringEnvVar) +{ + using nlohmann::json; + + // Before: Derivation with deferred output + Derivation drv; + drv.name = "filled-in-deferred-empty-env-var"; + drv.platform = "x86_64-linux"; + drv.builder = "/bin/sh"; + drv.outputs = { + {"out", DerivationOutput{DerivationOutput::Deferred{}}}, + }; + drv.env = {{"__doc", "Fill in deferred output with empty env var"}, {"out", ""}}; + + // Serialize before state + checkpointJson("filled-in-deferred-empty-env-var-pre", drv); + + drv.fillInOutputPaths(*store); + + // Serialize after state + checkpointJson("filled-in-deferred-empty-env-var-post", drv); + + // After: Should have been converted to InputAddressed + auto * outputP = std::get_if(&drv.outputs.at("out").raw); + ASSERT_TRUE(outputP); + auto & output = *outputP; + + // Environment variable should be filled in + EXPECT_EQ(drv.env.at("out"), store->printStorePath(output.path)); +} + +TEST_F(FillInOutputPathsTest, fillsDeferredOutputs_empty_string_var) +{ + using nlohmann::json; + + // Before: Derivation with deferred output + Derivation drv; + drv.name = "filled-in-deferred-no-env-var"; + drv.platform = "x86_64-linux"; + drv.builder = "/bin/sh"; + drv.outputs = { + {"out", DerivationOutput{DerivationOutput::Deferred{}}}, + }; + drv.env = { + {"__doc", "Fill in deferred with missing env var"}, + }; + + // Serialize before state + checkpointJson("filled-in-deferred-no-env-var-pre", drv); + + drv.fillInOutputPaths(*store); + + // Serialize after state + checkpointJson("filled-in-deferred-no-env-var-post", drv); + + // After: Should have been converted to InputAddressed + auto * outputP = std::get_if(&drv.outputs.at("out").raw); + ASSERT_TRUE(outputP); + auto & output = *outputP; + + // Environment variable should be filled in + EXPECT_EQ(drv.env.at("out"), store->printStorePath(output.path)); +} + +TEST_F(FillInOutputPathsTest, preservesInputAddressedOutputs) +{ + auto expectedPath = StorePath{"w4bk7hpyxzgy2gx8fsa8f952435pll3i-filled-in-already"}; + + Derivation drv; + drv.name = "filled-in-already"; + drv.platform = "x86_64-linux"; + drv.builder = "/bin/sh"; + drv.outputs = { + {"out", DerivationOutput{DerivationOutput::InputAddressed{.path = expectedPath}}}, + }; + drv.env = { + {"__doc", "Correct path stays unchanged"}, + {"out", store->printStorePath(expectedPath)}, + }; + + // Serialize before state + checkpointJson("filled-in-idempotent", drv); + + auto drvBefore = drv; + + drv.fillInOutputPaths(*store); + + // Should still be no change + EXPECT_EQ(drv, drvBefore); +} + +TEST_F(FillInOutputPathsTest, throwsOnIncorrectInputAddressedPath) +{ + auto wrongPath = StorePath{"c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-wrong-name"}; + + Derivation drv; + drv.name = "bad-path"; + drv.platform = "x86_64-linux"; + drv.builder = "/bin/sh"; + drv.outputs = { + {"out", DerivationOutput{DerivationOutput::InputAddressed{.path = wrongPath}}}, + }; + drv.env = { + {"__doc", "Wrong InputAddressed path throws error"}, + {"out", store->printStorePath(wrongPath)}, + }; + + // Serialize before state + checkpointJson("bad-path", drv); + + ASSERT_THROW(drv.fillInOutputPaths(*store), Error); +} + +#if 0 +TEST_F(FillInOutputPathsTest, throwsOnIncorrectEnvVar) +{ + auto wrongPath = StorePath{"c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-wrong-name"}; + + Derivation drv; + drv.name = "bad-env-var"; + drv.platform = "x86_64-linux"; + drv.builder = "/bin/sh"; + drv.outputs = { + {"out", DerivationOutput{DerivationOutput::Deferred{}}}, + }; + drv.env = { + {"__doc", "Wrong env var value throws error"}, + {"out", store->printStorePath(wrongPath)}, + }; + + // Serialize before state + checkpointJson("bad-env-var", drv); + + ASSERT_THROW(drv.fillInOutputPaths(*store), Error); +} +#endif + +TEST_F(FillInOutputPathsTest, preservesDeferredWithInputDrvs) +{ + using nlohmann::json; + + // Create a CA floating dependency derivation + auto depDrvPath = makeCAFloatingDependency("dependency"); + + // Create a derivation that depends on the dependency + Derivation drv; + drv.name = "depends-on-drv"; + drv.platform = "x86_64-linux"; + drv.builder = "/bin/sh"; + drv.outputs = { + {"out", DerivationOutput{DerivationOutput::Deferred{}}}, + }; + drv.env = { + {"__doc", "Deferred stays deferred with CA dependencies"}, + {"out", ""}, + }; + // Add the real input derivation dependency + drv.inputDrvs = {.map = {{depDrvPath, {.value = {"out"}}}}}; + + // Serialize before state + checkpointJson("depends-on-drv-pre", drv); + + auto drvBefore = drv; + + // Apply fillInOutputPaths + drv.fillInOutputPaths(*store); + + // Derivation should be unchanged + EXPECT_EQ(drv, drvBefore); +} + +TEST_F(FillInOutputPathsTest, throwsOnPatWhenShouldBeDeffered) +{ + using nlohmann::json; + + // Create a CA floating dependency derivation + auto depDrvPath = makeCAFloatingDependency("dependency"); + + auto wrongPath = StorePath{"c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-wrong-name"}; + + // Create a derivation that depends on the dependency + Derivation drv; + drv.name = "depends-on-drv"; + drv.platform = "x86_64-linux"; + drv.builder = "/bin/sh"; + drv.outputs = { + {"out", DerivationOutput{DerivationOutput::InputAddressed{.path = wrongPath}}}, + }; + drv.env = { + {"__doc", "InputAddressed throws when should be deferred"}, + {"out", ""}, + }; + // Add the real input derivation dependency + drv.inputDrvs = {.map = {{depDrvPath, {.value = {"out"}}}}}; + + // Serialize before state + checkpointJson("bad-depends-on-drv-pre", drv); + + // Apply fillInOutputPaths + ASSERT_THROW(drv.fillInOutputPaths(*store), Error); +} + +} // namespace nix diff --git a/src/libstore-tests/derivation/test-support.hh b/src/libstore-tests/derivation/test-support.hh new file mode 100644 index 00000000000..f48e6caef58 --- /dev/null +++ b/src/libstore-tests/derivation/test-support.hh @@ -0,0 +1,52 @@ +#pragma once + +#include + +#include "nix/util/experimental-features.hh" +#include "nix/store/tests/libstore.hh" +#include "nix/util/tests/characterization.hh" + +namespace nix { + +class DerivationTest : public virtual CharacterizationTest, public LibStoreTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "derivation"; + +public: + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } + + /** + * We set these in tests rather than the regular globals so we don't have + * to worry about race conditions if the tests run concurrently. + */ + ExperimentalFeatureSettings mockXpSettings; +}; + +class CaDerivationTest : public DerivationTest +{ + void SetUp() override + { + mockXpSettings.set("experimental-features", "ca-derivations"); + } +}; + +class DynDerivationTest : public DerivationTest +{ + void SetUp() override + { + mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations"); + } +}; + +class ImpureDerivationTest : public DerivationTest +{ + void SetUp() override + { + mockXpSettings.set("experimental-features", "impure-derivations"); + } +}; + +} // namespace nix diff --git a/src/libstore-tests/derived-path.cc b/src/libstore-tests/derived-path.cc index 6e7648f2589..70e789c0c4f 100644 --- a/src/libstore-tests/derived-path.cc +++ b/src/libstore-tests/derived-path.cc @@ -3,13 +3,13 @@ #include #include -#include "nix/util/tests/characterization.hh" +#include "nix/util/tests/json-characterization.hh" #include "nix/store/tests/derived-path.hh" #include "nix/store/tests/libstore.hh" namespace nix { -class DerivedPathTest : public CharacterizationTest, public LibStoreTest +class DerivedPathTest : public virtual CharacterizationTest, public LibStoreTest { std::filesystem::path unitTestData = getUnitTestData() / "derived-path"; @@ -123,25 +123,51 @@ RC_GTEST_FIXTURE_PROP(DerivedPathTest, prop_round_rip, (const DerivedPath & o)) using nlohmann::json; -#define TEST_JSON(TYPE, NAME, VAL) \ - static const TYPE NAME = VAL; \ - \ - TEST_F(DerivedPathTest, NAME##_from_json) \ - { \ - readTest(#NAME ".json", [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - TYPE got = static_cast(encoded); \ - ASSERT_EQ(got, NAME); \ - }); \ - } \ - \ - TEST_F(DerivedPathTest, NAME##_to_json) \ - { \ - writeTest( \ - #NAME ".json", \ - [&]() -> json { return static_cast(NAME); }, \ - [](const auto & file) { return json::parse(readFile(file)); }, \ - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ +struct SingleDerivedPathJsonTest : DerivedPathTest, + JsonCharacterizationTest, + ::testing::WithParamInterface +{}; + +struct DerivedPathJsonTest : DerivedPathTest, + JsonCharacterizationTest, + ::testing::WithParamInterface +{}; + +#define TEST_JSON(TYPE, NAME, VAL) \ + static const TYPE NAME = VAL; \ + \ + TEST_F(TYPE##JsonTest, NAME##_from_json) \ + { \ + readJsonTest(#NAME, NAME); \ + } \ + \ + TEST_F(TYPE##JsonTest, NAME##_to_json) \ + { \ + writeJsonTest(#NAME, NAME); \ + } + +#define TEST_JSON_XP_DYN(TYPE, NAME, VAL) \ + static const TYPE NAME = VAL; \ + \ + TEST_F(TYPE##JsonTest, NAME##_from_json_throws_without_xp) \ + { \ + std::optional ret; \ + readTest(#NAME ".json", [&](const auto & encoded_) { ret = json::parse(encoded_); }); \ + if (ret) { \ + EXPECT_THROW(nlohmann::adl_serializer::from_json(*ret), MissingExperimentalFeature); \ + } \ + } \ + \ + TEST_F(TYPE##JsonTest, NAME##_from_json) \ + { \ + ExperimentalFeatureSettings xpSettings; \ + xpSettings.set("experimental-features", "dynamic-derivations"); \ + readJsonTest(#NAME, NAME, xpSettings); \ + } \ + \ + TEST_F(TYPE##JsonTest, NAME##_to_json) \ + { \ + writeJsonTest(#NAME, NAME); \ } TEST_JSON( @@ -156,7 +182,7 @@ TEST_JSON( .output = "bar", })); -TEST_JSON( +TEST_JSON_XP_DYN( SingleDerivedPath, single_built_built, (SingleDerivedPath::Built{ @@ -179,7 +205,7 @@ TEST_JSON( .outputs = OutputsSpec::Names{"bar", "baz"}, })); -TEST_JSON( +TEST_JSON_XP_DYN( DerivedPath, multi_built_built, (DerivedPath::Built{ @@ -191,7 +217,7 @@ TEST_JSON( .outputs = OutputsSpec::Names{"baz", "quux"}, })); -TEST_JSON( +TEST_JSON_XP_DYN( DerivedPath, multi_built_built_wildcard, (DerivedPath::Built{ diff --git a/src/libstore-tests/dummy-store.cc b/src/libstore-tests/dummy-store.cc index b841d789002..4a12dcf78c0 100644 --- a/src/libstore-tests/dummy-store.cc +++ b/src/libstore-tests/dummy-store.cc @@ -1,11 +1,32 @@ #include +#include -#include "nix/store/dummy-store.hh" +#include "nix/util/memory-source-accessor.hh" +#include "nix/store/dummy-store-impl.hh" #include "nix/store/globals.hh" #include "nix/store/realisation.hh" +#include "nix/util/tests/json-characterization.hh" + namespace nix { +class DummyStoreTest : public virtual CharacterizationTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "dummy-store"; + +public: + + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } + + static void SetUpTestSuite() + { + initLibStore(false); + } +}; + TEST(DummyStore, realisation_read) { initLibStore(/*loadConfig=*/false); @@ -13,7 +34,7 @@ TEST(DummyStore, realisation_read) auto store = [] { auto cfg = make_ref(StoreReference::Params{}); cfg->readOnly = false; - return cfg->openStore(); + return cfg->openDummyStore(); }(); auto drvHash = Hash::parseExplicitFormatUnprefixed( @@ -22,6 +43,109 @@ TEST(DummyStore, realisation_read) auto outputName = "foo"; EXPECT_EQ(store->queryRealisation({drvHash, outputName}), nullptr); + + UnkeyedRealisation value{ + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}, + }; + + store->buildTrace.insert({drvHash, {{outputName, value}}}); + + auto value2 = store->queryRealisation({drvHash, outputName}); + + ASSERT_TRUE(value2); + EXPECT_EQ(*value2, value); } +/* ---------------------------------------------------------------------------- + * JSON + * --------------------------------------------------------------------------*/ + +using nlohmann::json; + +struct DummyStoreJsonTest : DummyStoreTest, + JsonCharacterizationTest>, + ::testing::WithParamInterface>> +{}; + +TEST_P(DummyStoreJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + using namespace nlohmann; + /* Cannot use `readJsonTest` because need to dereference the stores + for equality. */ + readTest(Path{name} + ".json", [&](const auto & encodedRaw) { + auto encoded = json::parse(encodedRaw); + ref decoded = adl_serializer>::from_json(encoded); + ASSERT_EQ(*decoded, *expected); + }); +} + +TEST_P(DummyStoreJsonTest, to_json) +{ + auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +INSTANTIATE_TEST_SUITE_P(DummyStoreJSON, DummyStoreJsonTest, [] { + initLibStore(false); + auto writeCfg = make_ref(DummyStore::Config::Params{}); + writeCfg->readOnly = false; + return ::testing::Values( + std::pair{ + "empty", + make_ref(DummyStore::Config::Params{})->openDummyStore(), + }, + std::pair{ + "one-flat-file", + [&] { + auto store = writeCfg->openDummyStore(); + store->addToStore( + "my-file", + SourcePath{ + [] { + auto sc = make_ref(); + sc->root = MemorySourceAccessor::File{MemorySourceAccessor::File::Regular{ + .executable = false, + .contents = "asdf", + }}; + return sc; + }(), + }, + ContentAddressMethod::Raw::NixArchive, + HashAlgorithm::SHA256); + return store; + }(), + }, + std::pair{ + "one-derivation", + [&] { + auto store = writeCfg->openDummyStore(); + Derivation drv; + drv.name = "foo"; + store->writeDerivation(drv); + return store; + }(), + }, + std::pair{ + "one-realisation", + [&] { + auto store = writeCfg->openDummyStore(); + store->buildTrace.insert_or_assign( + Hash::parseExplicitFormatUnprefixed( + "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", + HashAlgorithm::SHA256, + HashFormat::Base16), + std::map{ + { + "out", + UnkeyedRealisation{ + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + }, + }); + return store; + }(), + }); +}()); + } // namespace nix diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 2784b31dce8..58f624611a4 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -52,13 +52,14 @@ gtest = dependency('gmock') deps_private += gtest subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( + 'build-result.cc', 'common-protocol.cc', 'content-address.cc', 'derivation-advanced-attrs.cc', - 'derivation.cc', + 'derivation/external-formats.cc', + 'derivation/invariants.cc', 'derived-path.cc', 'downstream-placeholder.cc', 'dummy-store.cc', @@ -84,6 +85,7 @@ sources = files( 'store-reference.cc', 'uds-remote-store.cc', 'worker-protocol.cc', + 'write-derivation.cc', ) include_dirs = [ include_directories('.') ] @@ -105,7 +107,7 @@ this_exe = executable( test( meson.project_name(), this_exe, - env : asan_test_options_env + { + env : { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', 'HOME' : meson.current_build_dir() / 'test-home', 'NIX_REMOTE' : meson.current_build_dir() / 'test-home' / 'store', @@ -139,7 +141,7 @@ if get_option('benchmarks') benchmark( 'nix-store-benchmarks', benchmark_exe, - env : asan_test_options_env + { + env : { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, ) diff --git a/src/libstore-tests/nar-info-disk-cache.cc b/src/libstore-tests/nar-info-disk-cache.cc index 98a94b91e8f..b925a4a1e04 100644 --- a/src/libstore-tests/nar-info-disk-cache.cc +++ b/src/libstore-tests/nar-info-disk-cache.cc @@ -14,9 +14,9 @@ TEST(NarInfoDiskCacheImpl, create_and_read) int prio = 12345; bool wantMassQuery = true; - Path tmpDir = createTempDir(); + auto tmpDir = createTempDir(); AutoDelete delTmpDir(tmpDir); - Path dbPath(tmpDir + "/test-narinfo-disk-cache.sqlite"); + auto dbPath(tmpDir / "test-narinfo-disk-cache.sqlite"); int savedId; int barId; @@ -24,7 +24,7 @@ TEST(NarInfoDiskCacheImpl, create_and_read) SQLiteStmt getIds; { - auto cache = getTestNarInfoDiskCache(dbPath); + auto cache = getTestNarInfoDiskCache(dbPath.string()); // Set up "background noise" and check that different caches receive different ids { @@ -73,7 +73,7 @@ TEST(NarInfoDiskCacheImpl, create_and_read) { // We can't clear the in-memory cache, so we use a new cache object. This is // more realistic anyway. - auto cache2 = getTestNarInfoDiskCache(dbPath); + auto cache2 = getTestNarInfoDiskCache(dbPath.string()); { auto r = cache2->upToDateCacheExists("http://foo"); diff --git a/src/libstore-tests/nar-info.cc b/src/libstore-tests/nar-info.cc index 751c5e305bb..493ca2a8c37 100644 --- a/src/libstore-tests/nar-info.cc +++ b/src/libstore-tests/nar-info.cc @@ -11,9 +11,19 @@ namespace nix { using nlohmann::json; -class NarInfoTest : public CharacterizationTest, public LibStoreTest +class NarInfoTestV1 : public CharacterizationTest, public LibStoreTest { - std::filesystem::path unitTestData = getUnitTestData() / "nar-info"; + std::filesystem::path unitTestData = getUnitTestData() / "nar-info" / "json-1"; + + std::filesystem::path goldenMaster(PathView testStem) const override + { + return unitTestData / (testStem + ".json"); + } +}; + +class NarInfoTestV2 : public CharacterizationTest, public LibStoreTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "nar-info" / "json-2"; std::filesystem::path goldenMaster(PathView testStem) const override { @@ -59,27 +69,63 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) return info; } -#define JSON_TEST(STEM, PURE) \ - TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \ - { \ - readTest(#STEM, [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - auto expected = makeNarInfo(*store, PURE); \ - NarInfo got = NarInfo::fromJSON(*store, expected.path, encoded); \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \ - { \ - writeTest( \ - #STEM, \ - [&]() -> json { return makeNarInfo(*store, PURE).toJSON(*store, PURE, HashFormat::SRI); }, \ - [](const auto & file) { return json::parse(readFile(file)); }, \ - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ +#define JSON_READ_TEST_V1(STEM, PURE) \ + TEST_F(NarInfoTestV1, NarInfo_##STEM##_from_json) \ + { \ + readTest(#STEM, [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + auto expected = makeNarInfo(*store, PURE); \ + auto got = UnkeyedNarInfo::fromJSON(&*store, encoded); \ + ASSERT_EQ(got, expected); \ + }); \ + } + +#define JSON_WRITE_TEST_V1(STEM, PURE) \ + TEST_F(NarInfoTestV1, NarInfo_##STEM##_to_json) \ + { \ + writeTest( \ + #STEM, \ + [&]() -> json { return makeNarInfo(*store, PURE).toJSON(&*store, PURE, PathInfoJsonFormat::V1); }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } -JSON_TEST(pure, false) -JSON_TEST(impure, true) +#define JSON_TEST_V1(STEM, PURE) \ + JSON_READ_TEST_V1(STEM, PURE) \ + JSON_WRITE_TEST_V1(STEM, PURE) + +#define JSON_READ_TEST_V2(STEM, PURE) \ + TEST_F(NarInfoTestV2, NarInfo_##STEM##_from_json) \ + { \ + readTest(#STEM, [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + auto expected = makeNarInfo(*store, PURE); \ + auto got = UnkeyedNarInfo::fromJSON(nullptr, encoded); \ + ASSERT_EQ(got, expected); \ + }); \ + } + +#define JSON_WRITE_TEST_V2(STEM, PURE) \ + TEST_F(NarInfoTestV2, NarInfo_##STEM##_to_json) \ + { \ + writeTest( \ + #STEM, \ + [&]() -> json { return makeNarInfo(*store, PURE).toJSON(nullptr, PURE, PathInfoJsonFormat::V2); }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ + } + +#define JSON_TEST_V2(STEM, PURE) \ + JSON_READ_TEST_V2(STEM, PURE) \ + JSON_WRITE_TEST_V2(STEM, PURE) + +JSON_TEST_V1(pure, false) +JSON_TEST_V1(impure, true) + +// Test that JSON without explicit version field parses as V1 +JSON_READ_TEST_V1(pure_noversion, false) + +JSON_TEST_V2(pure, false) +JSON_TEST_V2(impure, true) } // namespace nix diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index dfd554ec160..ea600f90570 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -1,5 +1,7 @@ #include +#include + #include "nix_api_util.h" #include "nix_api_store.h" @@ -92,6 +94,70 @@ TEST_F(nix_api_store_test, DoesNotCrashWhenContextIsNull) nix_store_path_free(path); } +// Verify it's 20 bytes +static_assert(sizeof(nix_store_path_hash_part::bytes) == 20); +static_assert(sizeof(nix_store_path_hash_part::bytes) == sizeof(nix_store_path_hash_part)); + +TEST_F(nix_api_store_test, nix_store_path_hash) +{ + StorePath * path = nix_store_parse_path(ctx, store, (nixStoreDir + PATH_SUFFIX).c_str()); + ASSERT_NE(path, nullptr); + + nix_store_path_hash_part hash; + auto ret = nix_store_path_hash(ctx, path, &hash); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // The hash should be non-zero + bool allZero = true; + for (size_t i = 0; i < sizeof(hash.bytes); i++) { + if (hash.bytes[i] != 0) { + allZero = false; + break; + } + } + ASSERT_FALSE(allZero); + + nix_store_path_free(path); +} + +TEST_F(nix_api_store_test, nix_store_create_from_parts_roundtrip) +{ + // Parse a path + StorePath * original = nix_store_parse_path(ctx, store, (nixStoreDir + PATH_SUFFIX).c_str()); + EXPECT_NE(original, nullptr); + + // Get its hash + nix_store_path_hash_part hash; + auto ret = nix_store_path_hash(ctx, original, &hash); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // Get its name + std::string name; + nix_store_path_name(original, OBSERVE_STRING(name)); + + // Reconstruct from parts + StorePath * reconstructed = nix_store_create_from_parts(ctx, &hash, name.c_str(), name.size()); + assert_ctx_ok(); + ASSERT_NE(reconstructed, nullptr); + + // Should be equal + EXPECT_EQ(original->path, reconstructed->path); + + nix_store_path_free(original); + nix_store_path_free(reconstructed); +} + +TEST_F(nix_api_store_test, nix_store_create_from_parts_invalid_name) +{ + nix_store_path_hash_part hash = {}; + // Invalid name with spaces + StorePath * path = nix_store_create_from_parts(ctx, &hash, "invalid name", 12); + ASSERT_EQ(path, nullptr); + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_NIX_ERROR); +} + TEST_F(nix_api_store_test, get_version) { std::string str; @@ -146,9 +212,9 @@ TEST_F(nix_api_store_test, nix_store_real_path) TEST_F(nix_api_util_context, nix_store_real_path_relocated) { auto tmp = nix::createTempDir(); - std::string storeRoot = tmp + "/store"; - std::string stateDir = tmp + "/state"; - std::string logDir = tmp + "/log"; + auto storeRoot = (tmp / "store").string(); + auto stateDir = (tmp / "state").string(); + auto logDir = (tmp / "log").string(); const char * rootkv[] = {"root", storeRoot.c_str()}; const char * statekv[] = {"state", stateDir.c_str()}; const char * logkv[] = {"log", logDir.c_str()}; @@ -184,7 +250,8 @@ TEST_F(nix_api_util_context, nix_store_real_path_relocated) TEST_F(nix_api_util_context, nix_store_real_path_binary_cache) { - Store * store = nix_store_open(ctx, nix::fmt("file://%s/binary-cache", nix::createTempDir()).c_str(), nullptr); + Store * store = + nix_store_open(ctx, nix::fmt("file://%s/binary-cache", nix::createTempDir().string()).c_str(), nullptr); assert_ctx_ok(); ASSERT_NE(store, nullptr); @@ -218,6 +285,70 @@ struct LambdaAdapter } }; +class NixApiStoreTestWithRealisedPath : public nix_api_store_test_base +{ +public: + StorePath * drvPath = nullptr; + nix_derivation * drv = nullptr; + Store * store = nullptr; + StorePath * outPath = nullptr; + + void SetUp() override + { + nix_api_store_test_base::SetUp(); + + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + store = open_local_store(); + + std::filesystem::path unitTestData{getenv("_NIX_TEST_UNIT_DATA")}; + std::ifstream t{unitTestData / "derivation/ca/self-contained.json"}; + std::stringstream buffer; + buffer << t.rdbuf(); + + // Replace the hardcoded system with the current system + std::string jsonStr = nix::replaceStrings(buffer.str(), "x86_64-linux", nix::settings.thisSystem.get()); + + drv = nix_derivation_from_json(ctx, store, jsonStr.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath_) { + ASSERT_NE(outname, nullptr) << "Output name should not be NULL"; + auto is_valid_path = nix_store_is_valid_path(ctx, store, outPath_); + ASSERT_EQ(is_valid_path, true); + ASSERT_STREQ(outname, "out") << "Expected single 'out' output"; + ASSERT_EQ(outPath, nullptr) << "Output path callback should only be called once"; + outPath = nix_store_path_clone(outPath_); + }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + ASSERT_NE(outPath, nullptr) << "Derivation should have produced an output"; + } + + void TearDown() override + { + if (drvPath) + nix_store_path_free(drvPath); + if (outPath) + nix_store_path_free(outPath); + if (drv) + nix_derivation_free(drv); + if (store) + nix_store_free(store); + + nix_api_store_test_base::TearDown(); + } +}; + TEST_F(nix_api_store_test_base, build_from_json) { // FIXME get rid of these @@ -232,7 +363,10 @@ TEST_F(nix_api_store_test_base, build_from_json) std::stringstream buffer; buffer << t.rdbuf(); - auto * drv = nix_derivation_from_json(ctx, store, buffer.str().c_str()); + // Replace the hardcoded system with the current system + std::string jsonStr = nix::replaceStrings(buffer.str(), "x86_64-linux", nix::settings.thisSystem.get()); + + auto * drv = nix_derivation_from_json(ctx, store, jsonStr.c_str()); assert_ctx_ok(); ASSERT_NE(drv, nullptr); @@ -240,15 +374,21 @@ TEST_F(nix_api_store_test_base, build_from_json) assert_ctx_ok(); ASSERT_NE(drv, nullptr); + int callbackCount = 0; auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { + ASSERT_NE(outname, nullptr); + ASSERT_STREQ(outname, "out"); + ASSERT_NE(outPath, nullptr); auto is_valid_path = nix_store_is_valid_path(ctx, store, outPath); ASSERT_EQ(is_valid_path, true); + callbackCount++; }}; auto ret = nix_store_realise( ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); assert_ctx_ok(); ASSERT_EQ(ret, NIX_OK); + ASSERT_EQ(callbackCount, 1) << "Callback should have been invoked exactly once"; // Clean up nix_store_path_free(drvPath); @@ -256,4 +396,563 @@ TEST_F(nix_api_store_test_base, build_from_json) nix_store_free(store); } +TEST_F(nix_api_store_test_base, nix_store_realise_invalid_system) +{ + // Test that nix_store_realise properly reports errors when the system is invalid + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + auto * store = open_local_store(); + + std::filesystem::path unitTestData{getenv("_NIX_TEST_UNIT_DATA")}; + std::ifstream t{unitTestData / "derivation/ca/self-contained.json"}; + std::stringstream buffer; + buffer << t.rdbuf(); + + // Use an invalid system that cannot be built + std::string jsonStr = nix::replaceStrings(buffer.str(), "x86_64-linux", "bogus65-bogusos"); + + auto * drv = nix_derivation_from_json(ctx, store, jsonStr.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + auto * drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + int callbackCount = 0; + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { callbackCount++; }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + + // Should fail with an error + ASSERT_NE(ret, NIX_OK); + ASSERT_EQ(callbackCount, 0) << "Callback should not be invoked when build fails"; + + // Check that error message is set + std::string errMsg = nix_err_msg(nullptr, ctx, nullptr); + ASSERT_FALSE(errMsg.empty()) << "Error message should be set"; + ASSERT_NE(errMsg.find("system"), std::string::npos) << "Error should mention system"; + + // Clean up + nix_store_path_free(drvPath); + nix_derivation_free(drv); + nix_store_free(store); +} + +TEST_F(nix_api_store_test_base, nix_store_realise_builder_fails) +{ + // Test that nix_store_realise properly reports errors when the builder fails + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + auto * store = open_local_store(); + + std::filesystem::path unitTestData{getenv("_NIX_TEST_UNIT_DATA")}; + std::ifstream t{unitTestData / "derivation/ca/self-contained.json"}; + std::stringstream buffer; + buffer << t.rdbuf(); + + // Replace with current system and make builder command fail + std::string jsonStr = nix::replaceStrings(buffer.str(), "x86_64-linux", nix::settings.thisSystem.get()); + jsonStr = nix::replaceStrings(jsonStr, "echo $name foo > $out", "exit 1"); + + auto * drv = nix_derivation_from_json(ctx, store, jsonStr.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + auto * drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + int callbackCount = 0; + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { callbackCount++; }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + + // Should fail with an error + ASSERT_NE(ret, NIX_OK); + ASSERT_EQ(callbackCount, 0) << "Callback should not be invoked when build fails"; + + // Check that error message is set + std::string errMsg = nix_err_msg(nullptr, ctx, nullptr); + ASSERT_FALSE(errMsg.empty()) << "Error message should be set"; + + // Clean up + nix_store_path_free(drvPath); + nix_derivation_free(drv); + nix_store_free(store); +} + +TEST_F(nix_api_store_test_base, nix_store_realise_builder_no_output) +{ + // Test that nix_store_realise properly reports errors when builder succeeds but produces no output + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + auto * store = open_local_store(); + + std::filesystem::path unitTestData{getenv("_NIX_TEST_UNIT_DATA")}; + std::ifstream t{unitTestData / "derivation/ca/self-contained.json"}; + std::stringstream buffer; + buffer << t.rdbuf(); + + // Replace with current system and make builder succeed but not produce output + std::string jsonStr = nix::replaceStrings(buffer.str(), "x86_64-linux", nix::settings.thisSystem.get()); + jsonStr = nix::replaceStrings(jsonStr, "echo $name foo > $out", "true"); + + auto * drv = nix_derivation_from_json(ctx, store, jsonStr.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + auto * drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + int callbackCount = 0; + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { callbackCount++; }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + + // Should fail with an error + ASSERT_NE(ret, NIX_OK); + ASSERT_EQ(callbackCount, 0) << "Callback should not be invoked when build produces no output"; + + // Check that error message is set + std::string errMsg = nix_err_msg(nullptr, ctx, nullptr); + ASSERT_FALSE(errMsg.empty()) << "Error message should be set"; + + // Clean up + nix_store_path_free(drvPath); + nix_derivation_free(drv); + nix_store_free(store); +} + +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_with_outputs) +{ + // Test closure computation with include_outputs on a derivation path + struct CallbackData + { + std::set * paths; + }; + + std::set closure_paths; + CallbackData data{&closure_paths}; + + auto ret = nix_store_get_fs_closure( + ctx, + store, + drvPath, // Use derivation path + false, // flip_direction + true, // include_outputs - include the outputs in the closure + false, // include_derivers + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + std::string path_str; + nix_store_path_name(path, OBSERVE_STRING(path_str)); + auto [it, inserted] = data->paths->insert(path_str); + ASSERT_TRUE(inserted) << "Duplicate path in closure: " << path_str; + }); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // The closure should contain the derivation and its outputs + ASSERT_GE(closure_paths.size(), 2); + + // Verify the output path is in the closure + std::string outPathName; + nix_store_path_name(outPath, OBSERVE_STRING(outPathName)); + ASSERT_EQ(closure_paths.count(outPathName), 1); +} + +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_without_outputs) +{ + // Test closure computation WITHOUT include_outputs on a derivation path + struct CallbackData + { + std::set * paths; + }; + + std::set closure_paths; + CallbackData data{&closure_paths}; + + auto ret = nix_store_get_fs_closure( + ctx, + store, + drvPath, // Use derivation path + false, // flip_direction + false, // include_outputs - do NOT include the outputs + false, // include_derivers + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + std::string path_str; + nix_store_path_name(path, OBSERVE_STRING(path_str)); + auto [it, inserted] = data->paths->insert(path_str); + ASSERT_TRUE(inserted) << "Duplicate path in closure: " << path_str; + }); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // Verify the output path is NOT in the closure + std::string outPathName; + nix_store_path_name(outPath, OBSERVE_STRING(outPathName)); + ASSERT_EQ(closure_paths.count(outPathName), 0) << "Output path should not be in closure when includeOutputs=false"; +} + +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_flip_direction) +{ + // Test closure computation with flip_direction on a derivation path + // When flip_direction=true, we get the reverse dependencies (what depends on this path) + // For a derivation, this should NOT include outputs even with include_outputs=true + struct CallbackData + { + std::set * paths; + }; + + std::set closure_paths; + CallbackData data{&closure_paths}; + + auto ret = nix_store_get_fs_closure( + ctx, + store, + drvPath, // Use derivation path + true, // flip_direction - get reverse dependencies + true, // include_outputs + false, // include_derivers + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + std::string path_str; + nix_store_path_name(path, OBSERVE_STRING(path_str)); + auto [it, inserted] = data->paths->insert(path_str); + ASSERT_TRUE(inserted) << "Duplicate path in closure: " << path_str; + }); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // Verify the output path is NOT in the closure when direction is flipped + std::string outPathName; + nix_store_path_name(outPath, OBSERVE_STRING(outPathName)); + ASSERT_EQ(closure_paths.count(outPathName), 0) << "Output path should not be in closure when flip_direction=true"; +} + +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_include_derivers) +{ + // Test closure computation with include_derivers on an output path + // This should include the derivation that produced the output + struct CallbackData + { + std::set * paths; + }; + + std::set closure_paths; + CallbackData data{&closure_paths}; + + auto ret = nix_store_get_fs_closure( + ctx, + store, + outPath, // Use output path (not derivation) + false, // flip_direction + false, // include_outputs + true, // include_derivers - include the derivation + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + std::string path_str; + nix_store_path_name(path, OBSERVE_STRING(path_str)); + auto [it, inserted] = data->paths->insert(path_str); + ASSERT_TRUE(inserted) << "Duplicate path in closure: " << path_str; + }); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // Verify the derivation path is in the closure + // Deriver is nasty stateful, and this assertion is only guaranteed because + // we're using an empty store as our starting point. Otherwise, if the + // output happens to exist, the deriver could be anything. + std::string drvPathName; + nix_store_path_name(drvPath, OBSERVE_STRING(drvPathName)); + ASSERT_EQ(closure_paths.count(drvPathName), 1) << "Derivation should be in closure when include_derivers=true"; +} + +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_realise_output_ordering) +{ + // Test that nix_store_realise returns outputs in alphabetical order by output name. + // This test uses a CA derivation with 10 outputs in randomized input order + // to verify that the callback order is deterministic and alphabetical. + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + auto * store = open_local_store(); + + // Create a CA derivation with 10 outputs using proper placeholders + auto outa_ph = nix::hashPlaceholder("outa"); + auto outb_ph = nix::hashPlaceholder("outb"); + auto outc_ph = nix::hashPlaceholder("outc"); + auto outd_ph = nix::hashPlaceholder("outd"); + auto oute_ph = nix::hashPlaceholder("oute"); + auto outf_ph = nix::hashPlaceholder("outf"); + auto outg_ph = nix::hashPlaceholder("outg"); + auto outh_ph = nix::hashPlaceholder("outh"); + auto outi_ph = nix::hashPlaceholder("outi"); + auto outj_ph = nix::hashPlaceholder("outj"); + + std::string drvJson = R"({ + "version": 4, + "name": "multi-output-test", + "system": ")" + nix::settings.thisSystem.get() + + R"(", + "builder": "/bin/sh", + "args": ["-c", "echo a > $outa; echo b > $outb; echo c > $outc; echo d > $outd; echo e > $oute; echo f > $outf; echo g > $outg; echo h > $outh; echo i > $outi; echo j > $outj"], + "env": { + "builder": "/bin/sh", + "name": "multi-output-test", + "system": ")" + nix::settings.thisSystem.get() + + R"(", + "outf": ")" + outf_ph + + R"(", + "outd": ")" + outd_ph + + R"(", + "outi": ")" + outi_ph + + R"(", + "oute": ")" + oute_ph + + R"(", + "outh": ")" + outh_ph + + R"(", + "outc": ")" + outc_ph + + R"(", + "outb": ")" + outb_ph + + R"(", + "outg": ")" + outg_ph + + R"(", + "outj": ")" + outj_ph + + R"(", + "outa": ")" + outa_ph + + R"(" + }, + "inputs": { + "drvs": {}, + "srcs": [] + }, + "outputs": { + "outd": { "hashAlgo": "sha256", "method": "nar" }, + "outf": { "hashAlgo": "sha256", "method": "nar" }, + "outg": { "hashAlgo": "sha256", "method": "nar" }, + "outb": { "hashAlgo": "sha256", "method": "nar" }, + "outc": { "hashAlgo": "sha256", "method": "nar" }, + "outi": { "hashAlgo": "sha256", "method": "nar" }, + "outj": { "hashAlgo": "sha256", "method": "nar" }, + "outh": { "hashAlgo": "sha256", "method": "nar" }, + "outa": { "hashAlgo": "sha256", "method": "nar" }, + "oute": { "hashAlgo": "sha256", "method": "nar" } + } + })"; + + auto * drv = nix_derivation_from_json(ctx, store, drvJson.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + auto * drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + // Realise the derivation - capture the order outputs are returned + std::map outputs; + std::vector output_order; + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { + ASSERT_NE(outname, nullptr); + ASSERT_NE(outPath, nullptr); + output_order.push_back(outname); + outputs.emplace(outname, outPath->path); + }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + ASSERT_EQ(outputs.size(), 10); + + // Verify outputs are returned in alphabetical order by output name + std::vector expected_order = { + "outa", "outb", "outc", "outd", "oute", "outf", "outg", "outh", "outi", "outj"}; + ASSERT_EQ(output_order, expected_order) << "Outputs should be returned in alphabetical order by output name"; + + // Now compute closure with include_outputs and collect paths in order + struct CallbackData + { + std::vector * paths; + }; + + std::vector closure_paths; + CallbackData data{&closure_paths}; + + ret = nix_store_get_fs_closure( + ctx, + store, + drvPath, + false, // flip_direction + true, // include_outputs - include the outputs in the closure + false, // include_derivers + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + std::string path_str; + nix_store_path_name(path, OBSERVE_STRING(path_str)); + data->paths->push_back(path_str); + }); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // Should contain at least the derivation and 10 outputs + ASSERT_GE(closure_paths.size(), 11); + + // Verify all outputs are present in the closure + for (const auto & [outname, outPath] : outputs) { + std::string outPathName = store->ptr->printStorePath(outPath); + + bool found = false; + for (const auto & p : closure_paths) { + // nix_store_path_name returns just the name part, so match against full path name + if (outPathName.find(p) != std::string::npos) { + found = true; + break; + } + } + ASSERT_TRUE(found) << "Output " << outname << " (" << outPathName << ") not found in closure"; + } + + nix_store_path_free(drvPath); + nix_derivation_free(drv); + nix_store_free(store); +} + +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_error_propagation) +{ + // Test that errors in the callback abort the closure computation + struct CallbackData + { + int * count; + }; + + int call_count = 0; + CallbackData data{&call_count}; + + auto ret = nix_store_get_fs_closure( + ctx, + store, + drvPath, // Use derivation path + false, // flip_direction + true, // include_outputs + false, // include_derivers + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + (*data->count)++; + // Set an error immediately + nix_set_err_msg(context, NIX_ERR_UNKNOWN, "Test error"); + }); + + // Should have aborted with error + ASSERT_EQ(ret, NIX_ERR_UNKNOWN); + ASSERT_EQ(call_count, 1); // Should have been called exactly once, then aborted +} + +/** + * @brief Helper function to load JSON from a test data file + * + * @param filename Relative path from _NIX_TEST_UNIT_DATA + * @return JSON string contents of the file + */ +static std::string load_json_from_test_data(const char * filename) +{ + std::filesystem::path unitTestData{getenv("_NIX_TEST_UNIT_DATA")}; + std::ifstream t{unitTestData / filename}; + std::stringstream buffer; + buffer << t.rdbuf(); + return buffer.str(); +} + +TEST_F(nix_api_store_test, nix_derivation_to_json_roundtrip) +{ + // Load JSON from test data + auto originalJson = load_json_from_test_data("derivation/invariants/filled-in-deferred-empty-env-var-pre.json"); + + // Parse to derivation + auto * drv = nix_derivation_from_json(ctx, store, originalJson.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + // Convert back to JSON + std::string convertedJson; + auto ret = nix_derivation_to_json(ctx, drv, OBSERVE_STRING(convertedJson)); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + ASSERT_FALSE(convertedJson.empty()); + + // Parse both JSON strings to compare (ignoring whitespace differences) + auto originalParsed = nlohmann::json::parse(originalJson); + auto convertedParsed = nlohmann::json::parse(convertedJson); + + // Remove parts that will be different due to filling-in. + originalParsed.at("outputs").erase("out"); + originalParsed.at("env").erase("out"); + convertedParsed.at("outputs").erase("out"); + convertedParsed.at("env").erase("out"); + + // They should be equivalent + ASSERT_EQ(originalParsed, convertedParsed); + + nix_derivation_free(drv); +} + +TEST_F(nix_api_store_test, nix_derivation_store_round_trip) +{ + // Load a derivation from JSON + auto json = load_json_from_test_data("derivation/invariants/filled-in-deferred-empty-env-var-pre.json"); + auto * drv = nix_derivation_from_json(ctx, store, json.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + // Add to store + auto * drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + // Retrieve from store + auto * drv2 = nix_store_drv_from_store_path(ctx, store, drvPath); + assert_ctx_ok(); + ASSERT_NE(drv2, nullptr); + + // The round trip should make the same derivation + ASSERT_EQ(drv->drv, drv2->drv); + + nix_store_path_free(drvPath); + nix_derivation_free(drv); + nix_derivation_free(drv2); +} + +TEST_F(nix_api_store_test, nix_derivation_clone) +{ + // Load a derivation from JSON + auto json = load_json_from_test_data("derivation/invariants/filled-in-deferred-empty-env-var-pre.json"); + auto * drv = nix_derivation_from_json(ctx, store, json.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + // Clone the derivation + auto * drv2 = nix_derivation_clone(drv); + ASSERT_NE(drv2, nullptr); + + // The clone should be equal + ASSERT_EQ(drv->drv, drv2->drv); + + nix_derivation_free(drv); + nix_derivation_free(drv2); +} + } // namespace nixC diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index d5255f4f988..ac547aca35e 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -42,20 +42,16 @@ mkMesonExecutable (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - # Hack for sake of the dev shell - passthru.externalBuildInputs = [ + buildInputs = [ sqlite rapidcheck gtest - ] - ++ lib.optionals withBenchmarks [ - gbenchmark - ]; - - buildInputs = finalAttrs.passthru.externalBuildInputs ++ [ nix-store nix-store-c nix-store-test-support + ] + ++ lib.optionals withBenchmarks [ + gbenchmark ]; mesonFlags = [ @@ -83,7 +79,6 @@ mkMesonExecutable (finalAttrs: { } ( '' - export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0 export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} export NIX_REMOTE=$HOME/store ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} diff --git a/src/libstore-tests/path-info.cc b/src/libstore-tests/path-info.cc index 63310c1c391..6c0fd183beb 100644 --- a/src/libstore-tests/path-info.cc +++ b/src/libstore-tests/path-info.cc @@ -10,9 +10,19 @@ namespace nix { using nlohmann::json; -class PathInfoTest : public CharacterizationTest, public LibStoreTest +class PathInfoTestV1 : public CharacterizationTest, public LibStoreTest { - std::filesystem::path unitTestData = getUnitTestData() / "path-info"; + std::filesystem::path unitTestData = getUnitTestData() / "path-info" / "json-1"; + + std::filesystem::path goldenMaster(PathView testStem) const override + { + return unitTestData / (testStem + ".json"); + } +}; + +class PathInfoTestV2 : public CharacterizationTest, public LibStoreTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "path-info" / "json-2"; std::filesystem::path goldenMaster(PathView testStem) const override { @@ -23,6 +33,7 @@ class PathInfoTest : public CharacterizationTest, public LibStoreTest static UnkeyedValidPathInfo makeEmpty() { return { + "/nix/store", Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; } @@ -65,33 +76,70 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo return makeFullKeyed(store, includeImpureInfo); } -#define JSON_TEST(STEM, OBJ, PURE) \ - TEST_F(PathInfoTest, PathInfo_##STEM##_from_json) \ +#define JSON_READ_TEST_V1(STEM, OBJ) \ + TEST_F(PathInfoTestV1, PathInfo_##STEM##_from_json) \ + { \ + readTest(#STEM, [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(&*store, encoded); \ + auto expected = OBJ; \ + ASSERT_EQ(got, expected); \ + }); \ + } + +#define JSON_WRITE_TEST_V1(STEM, OBJ, PURE) \ + TEST_F(PathInfoTestV1, PathInfo_##STEM##_to_json) \ { \ - readTest(#STEM, [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(*store, encoded); \ - auto expected = OBJ; \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(PathInfoTest, PathInfo_##STEM##_to_json) \ + writeTest( \ + #STEM, \ + [&]() -> json { return OBJ.toJSON(&*store, PURE, PathInfoJsonFormat::V1); }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ + } + +#define JSON_TEST_V1(STEM, OBJ, PURE) \ + JSON_READ_TEST_V1(STEM, OBJ) \ + JSON_WRITE_TEST_V1(STEM, OBJ, PURE) + +#define JSON_READ_TEST_V2(STEM, OBJ) \ + TEST_F(PathInfoTestV2, PathInfo_##STEM##_from_json) \ + { \ + readTest(#STEM, [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(nullptr, encoded); \ + auto expected = OBJ; \ + ASSERT_EQ(got, expected); \ + }); \ + } + +#define JSON_WRITE_TEST_V2(STEM, OBJ, PURE) \ + TEST_F(PathInfoTestV2, PathInfo_##STEM##_to_json) \ { \ writeTest( \ #STEM, \ - [&]() -> json { return OBJ.toJSON(*store, PURE, HashFormat::SRI); }, \ + [&]() -> json { return OBJ.toJSON(nullptr, PURE, PathInfoJsonFormat::V2); }, \ [](const auto & file) { return json::parse(readFile(file)); }, \ [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } -JSON_TEST(empty_pure, makeEmpty(), false) -JSON_TEST(empty_impure, makeEmpty(), true) +#define JSON_TEST_V2(STEM, OBJ, PURE) \ + JSON_READ_TEST_V2(STEM, OBJ) \ + JSON_WRITE_TEST_V2(STEM, OBJ, PURE) + +JSON_TEST_V1(empty_pure, makeEmpty(), false) +JSON_TEST_V1(empty_impure, makeEmpty(), true) +JSON_TEST_V1(pure, makeFull(*store, false), false) +JSON_TEST_V1(impure, makeFull(*store, true), true) + +// Test that JSON without explicit version field parses as V1 +JSON_READ_TEST_V1(pure_noversion, makeFull(*store, false)) -JSON_TEST(pure, makeFull(*store, false), false) -JSON_TEST(impure, makeFull(*store, true), true) +JSON_TEST_V2(empty_pure, makeEmpty(), false) +JSON_TEST_V2(empty_impure, makeEmpty(), true) +JSON_TEST_V2(pure, makeFull(*store, false), false) +JSON_TEST_V2(impure, makeFull(*store, true), true) -TEST_F(PathInfoTest, PathInfo_full_shortRefs) +TEST_F(PathInfoTestV2, PathInfo_full_shortRefs) { ValidPathInfo it = makeFullKeyed(*store, true); // it.references = unkeyed.references; diff --git a/src/libstore-tests/realisation.cc b/src/libstore-tests/realisation.cc index a5a5bee508a..d16049bc5b0 100644 --- a/src/libstore-tests/realisation.cc +++ b/src/libstore-tests/realisation.cc @@ -49,16 +49,16 @@ INSTANTIATE_TEST_SUITE_P( RealisationJsonTest, ([] { Realisation simple{ - - .id = - { - .drvHash = Hash::parseExplicitFormatUnprefixed( - "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", - HashAlgorithm::SHA256, - HashFormat::Base16), - .outputName = "foo", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}, + }, + { + .drvHash = Hash::parseExplicitFormatUnprefixed( + "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", + HashAlgorithm::SHA256, + HashFormat::Base16), + .outputName = "foo", + }, }; return ::testing::Values( std::pair{ diff --git a/src/libstore-tests/references.cc b/src/libstore-tests/references.cc index 27ecad08fbf..f2c6fb51e5c 100644 --- a/src/libstore-tests/references.cc +++ b/src/libstore-tests/references.cc @@ -1,4 +1,6 @@ #include "nix/store/references.hh" +#include "nix/store/path-references.hh" +#include "nix/util/memory-source-accessor.hh" #include @@ -79,4 +81,145 @@ TEST(references, scan) } } +TEST(references, scanForReferencesDeep) +{ + using File = MemorySourceAccessor::File; + + // Create store paths to search for + StorePath path1{"dc04vv14dak1c1r48qa0m23vr9jy8sm0-foo"}; + StorePath path2{"zc842j0rz61mjsp3h3wp5ly71ak6qgdn-bar"}; + StorePath path3{"a5cn2i4b83gnsm60d38l3kgb8qfplm11-baz"}; + + StorePathSet refs{path1, path2, path3}; + + std::string_view hash1 = path1.hashPart(); + std::string_view hash2 = path2.hashPart(); + std::string_view hash3 = path3.hashPart(); + + // Create an in-memory file system with various reference patterns + auto accessor = make_ref(); + accessor->root = File::Directory{ + .entries{ + { + // file1.txt: contains hash1 + "file1.txt", + File::Regular{ + .contents = "This file references " + hash1 + " in its content", + }, + }, + { + // file2.txt: contains hash2 and hash3 + "file2.txt", + File::Regular{ + .contents = "Multiple refs: " + hash2 + " and also " + hash3, + }, + }, + { + // file3.txt: contains no references + "file3.txt", + File::Regular{ + .contents = "This file has no store path references at all", + }, + }, + { + // subdir: a subdirectory + "subdir", + File::Directory{ + .entries{ + { + // subdir/file4.txt: contains hash1 again + "file4.txt", + File::Regular{ + .contents = "Subdirectory file with " + hash1, + }, + }, + }, + }, + }, + { + // link1: a symlink that contains a reference in its target + "link1", + File::Symlink{ + .target = hash2 + "-target", + }, + }, + }, + }; + + // Test the callback-based API + { + std::map foundRefs; + + scanForReferencesDeep(*accessor, CanonPath::root, refs, [&](FileRefScanResult result) { + foundRefs[std::move(result.filePath)] = std::move(result.foundRefs); + }); + + // Verify we found the expected references + EXPECT_EQ(foundRefs.size(), 4); // file1, file2, file4, link1 + + // Check file1.txt found path1 + { + CanonPath f1Path("/file1.txt"); + auto it = foundRefs.find(f1Path); + ASSERT_TRUE(it != foundRefs.end()); + EXPECT_EQ(it->second.size(), 1); + EXPECT_TRUE(it->second.count(path1)); + } + + // Check file2.txt found path2 and path3 + { + CanonPath f2Path("/file2.txt"); + auto it = foundRefs.find(f2Path); + ASSERT_TRUE(it != foundRefs.end()); + EXPECT_EQ(it->second.size(), 2); + EXPECT_TRUE(it->second.count(path2)); + EXPECT_TRUE(it->second.count(path3)); + } + + // Check file3.txt is not in results (no refs) + { + CanonPath f3Path("/file3.txt"); + EXPECT_FALSE(foundRefs.count(f3Path)); + } + + // Check subdir/file4.txt found path1 + { + CanonPath f4Path("/subdir/file4.txt"); + auto it = foundRefs.find(f4Path); + ASSERT_TRUE(it != foundRefs.end()); + EXPECT_EQ(it->second.size(), 1); + EXPECT_TRUE(it->second.count(path1)); + } + + // Check symlink found path2 + { + CanonPath linkPath("/link1"); + auto it = foundRefs.find(linkPath); + ASSERT_TRUE(it != foundRefs.end()); + EXPECT_EQ(it->second.size(), 1); + EXPECT_TRUE(it->second.count(path2)); + } + } + + // Test the map-based convenience API + { + auto results = scanForReferencesDeep(*accessor, CanonPath::root, refs); + + EXPECT_EQ(results.size(), 4); // file1, file2, file4, link1 + + // Verify all expected files are in the results + EXPECT_TRUE(results.count(CanonPath("/file1.txt"))); + EXPECT_TRUE(results.count(CanonPath("/file2.txt"))); + EXPECT_TRUE(results.count(CanonPath("/subdir/file4.txt"))); + EXPECT_TRUE(results.count(CanonPath("/link1"))); + EXPECT_FALSE(results.count(CanonPath("/file3.txt"))); + + // Verify the references found in each file are correct + EXPECT_EQ(results.at(CanonPath("/file1.txt")), StorePathSet{path1}); + EXPECT_EQ(results.at(CanonPath("/file2.txt")), StorePathSet({path2, path3})); + EXPECT_EQ(results.at(CanonPath("/subdir/file4.txt")), StorePathSet{path1}); + EXPECT_EQ(results.at(CanonPath("/link1")), StorePathSet{path2}); + } +} + } // namespace nix diff --git a/src/libstore-tests/s3-binary-cache-store.cc b/src/libstore-tests/s3-binary-cache-store.cc index 251e96172b6..59090a589f0 100644 --- a/src/libstore-tests/s3-binary-cache-store.cc +++ b/src/libstore-tests/s3-binary-cache-store.cc @@ -1,8 +1,9 @@ #include "nix/store/s3-binary-cache-store.hh" +#include "nix/store/http-binary-cache-store.hh" +#include "nix/store/filetransfer.hh" +#include "nix/store/s3-url.hh" -#if NIX_WITH_S3_SUPPORT - -# include +#include namespace nix { @@ -10,9 +11,133 @@ TEST(S3BinaryCacheStore, constructConfig) { S3BinaryCacheStoreConfig config{"s3", "foobar", {}}; - EXPECT_EQ(config.bucketName, "foobar"); + // The bucket name is stored as the host part of the authority in cacheUri + EXPECT_EQ( + config.cacheUri, + (ParsedURL{ + .scheme = "s3", + .authority = ParsedURL::Authority{.host = "foobar"}, + })); } -} // namespace nix +TEST(S3BinaryCacheStore, constructConfigWithRegion) +{ + Store::Config::Params params{{"region", "eu-west-1"}}; + S3BinaryCacheStoreConfig config{"s3", "my-bucket", params}; + + EXPECT_EQ( + config.cacheUri, + (ParsedURL{ + .scheme = "s3", + .authority = ParsedURL::Authority{.host = "my-bucket"}, + .query = (StringMap) {{"region", "eu-west-1"}}, + })); + EXPECT_EQ(config.region.get(), "eu-west-1"); +} + +TEST(S3BinaryCacheStore, defaultSettings) +{ + S3BinaryCacheStoreConfig config{"s3", "test-bucket", {}}; + + EXPECT_EQ( + config.cacheUri, + (ParsedURL{ + .scheme = "s3", + .authority = ParsedURL::Authority{.host = "test-bucket"}, + })); + + // Check default values + EXPECT_EQ(config.region.get(), "us-east-1"); + EXPECT_EQ(config.profile.get(), "default"); + EXPECT_EQ(config.scheme.get(), "https"); + EXPECT_EQ(config.endpoint.get(), ""); +} + +/** + * Test that S3BinaryCacheStore properly preserves S3-specific parameters + */ +TEST(S3BinaryCacheStore, s3StoreConfigPreservesParameters) +{ + StringMap params; + params["region"] = "eu-west-1"; + params["endpoint"] = "custom.s3.com"; + + S3BinaryCacheStoreConfig config("s3", "test-bucket", params); + + // The config should preserve S3-specific parameters + EXPECT_EQ( + config.cacheUri, + (ParsedURL{ + .scheme = "s3", + .authority = ParsedURL::Authority{.host = "test-bucket"}, + .query = (StringMap) {{"region", "eu-west-1"}, {"endpoint", "custom.s3.com"}}, + })); +} + +/** + * Test that S3 store scheme is properly registered + */ +TEST(S3BinaryCacheStore, s3SchemeRegistration) +{ + auto schemes = S3BinaryCacheStoreConfig::uriSchemes(); + EXPECT_TRUE(schemes.count("s3") > 0) << "S3 scheme should be supported"; -#endif + // Verify HttpBinaryCacheStoreConfig doesn't directly list S3 + auto httpSchemes = HttpBinaryCacheStoreConfig::uriSchemes(); + EXPECT_FALSE(httpSchemes.count("s3") > 0) << "HTTP store shouldn't directly list S3 scheme"; +} + +/** + * Test that only S3-specific parameters are preserved in cacheUri, + * while non-S3 store parameters are not propagated to the URL + */ +TEST(S3BinaryCacheStore, parameterFiltering) +{ + StringMap params; + params["region"] = "eu-west-1"; + params["endpoint"] = "minio.local"; + params["want-mass-query"] = "true"; // Non-S3 store parameter + params["priority"] = "10"; // Non-S3 store parameter + + S3BinaryCacheStoreConfig config("s3", "test-bucket", params); + + // Only S3-specific params should be in cacheUri.query + EXPECT_EQ( + config.cacheUri, + (ParsedURL{ + .scheme = "s3", + .authority = ParsedURL::Authority{.host = "test-bucket"}, + .query = (StringMap) {{"region", "eu-west-1"}, {"endpoint", "minio.local"}}, + })); + + // But the non-S3 params should still be set on the config + EXPECT_EQ(config.wantMassQuery.get(), true); + EXPECT_EQ(config.priority.get(), 10); + + // And all params (S3 and non-S3) should be returned by getReference() + auto ref = config.getReference(); + EXPECT_EQ(ref.params["region"], "eu-west-1"); + EXPECT_EQ(ref.params["endpoint"], "minio.local"); + EXPECT_EQ(ref.params["want-mass-query"], "true"); + EXPECT_EQ(ref.params["priority"], "10"); +} + +/** + * Test storage class configuration + */ +TEST(S3BinaryCacheStore, storageClassDefault) +{ + S3BinaryCacheStoreConfig config{"s3", "test-bucket", {}}; + EXPECT_EQ(config.storageClass.get(), std::nullopt); +} + +TEST(S3BinaryCacheStore, storageClassConfiguration) +{ + StringMap params; + params["storage-class"] = "GLACIER"; + + S3BinaryCacheStoreConfig config("s3", "test-bucket", params); + EXPECT_EQ(config.storageClass.get(), std::optional("GLACIER")); +} + +} // namespace nix diff --git a/src/libstore-tests/s3-url.cc b/src/libstore-tests/s3-url.cc index 60652dd9cab..9fa625fd6c7 100644 --- a/src/libstore-tests/s3-url.cc +++ b/src/libstore-tests/s3-url.cc @@ -1,10 +1,8 @@ #include "nix/store/s3-url.hh" #include "nix/util/tests/gmock-matchers.hh" -#if NIX_WITH_S3_SUPPORT || NIX_WITH_CURL_S3 - -# include -# include +#include +#include namespace nix { @@ -72,6 +70,25 @@ INSTANTIATE_TEST_SUITE_P( }, "with_profile_and_region", }, + ParsedS3URLTestCase{ + "s3://my-bucket/my-key.txt?versionId=abc123xyz", + { + .bucket = "my-bucket", + .key = {"my-key.txt"}, + .versionId = "abc123xyz", + }, + "with_versionId", + }, + ParsedS3URLTestCase{ + "s3://bucket/path/to/object?region=eu-west-1&versionId=version456", + { + .bucket = "bucket", + .key = {"path", "to", "object"}, + .region = "eu-west-1", + .versionId = "version456", + }, + "with_region_and_versionId", + }, ParsedS3URLTestCase{ "s3://bucket/key?endpoint=https://minio.local&scheme=http", { @@ -224,9 +241,38 @@ INSTANTIATE_TEST_SUITE_P( }, "https://s3.ap-southeast-2.amazonaws.com/bucket/path/to/file.txt", "complex_path_and_region", + }, + S3ToHttpsConversionTestCase{ + ParsedS3URL{ + .bucket = "my-bucket", + .key = {"my-key.txt"}, + .versionId = "abc123xyz", + }, + ParsedURL{ + .scheme = "https", + .authority = ParsedURL::Authority{.host = "s3.us-east-1.amazonaws.com"}, + .path = {"", "my-bucket", "my-key.txt"}, + .query = {{"versionId", "abc123xyz"}}, + }, + "https://s3.us-east-1.amazonaws.com/my-bucket/my-key.txt?versionId=abc123xyz", + "with_versionId", + }, + S3ToHttpsConversionTestCase{ + ParsedS3URL{ + .bucket = "versioned-bucket", + .key = {"path", "to", "object"}, + .region = "eu-west-1", + .versionId = "version456", + }, + ParsedURL{ + .scheme = "https", + .authority = ParsedURL::Authority{.host = "s3.eu-west-1.amazonaws.com"}, + .path = {"", "versioned-bucket", "path", "to", "object"}, + .query = {{"versionId", "version456"}}, + }, + "https://s3.eu-west-1.amazonaws.com/versioned-bucket/path/to/object?versionId=version456", + "with_region_and_versionId", }), [](const ::testing::TestParamInfo & info) { return info.param.description; }); } // namespace nix - -#endif diff --git a/src/libstore-tests/serve-protocol.cc b/src/libstore-tests/serve-protocol.cc index a63201164b7..258dbf04990 100644 --- a/src/libstore-tests/serve-protocol.cc +++ b/src/libstore-tests/serve-protocol.cc @@ -4,6 +4,7 @@ #include #include +#include "nix/util/json-utils.hh" #include "nix/store/serve-protocol.hh" #include "nix/store/serve-protocol-impl.hh" #include "nix/store/serve-protocol-connection.hh" @@ -16,6 +17,8 @@ namespace nix { const char serveProtoDir[] = "serve-protocol"; +static constexpr std::string_view defaultStoreDir = "/nix/store"; + struct ServeProtoTest : VersionedProtoTest { /** @@ -95,32 +98,51 @@ VERSIONED_CHARACTERIZATION_TEST( defaultVersion, (std::tuple{ Realisation{ - .id = - DrvOutput{ - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, - .signatures = {"asdf", "qwer"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, }, Realisation{ - .id = - { - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, - .signatures = {"asdf", "qwer"}, - .dependentRealisations = - { + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + realisation_with_deps, + "realisation-with-deps", + defaultVersion, + (std::tuple{ + Realisation{ + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + .dependentRealisations = { - DrvOutput{ - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "quux", + { + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, }, - StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, }, - }, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, }, })) @@ -196,25 +218,27 @@ VERSIONED_CHARACTERIZATION_TEST( { "foo", { - .id = - DrvOutput{ - .drvHash = - Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "foo", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, }, }, { "bar", { - .id = - DrvOutput{ - .drvHash = - Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "bar", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + }, + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, }, }, }, @@ -241,12 +265,12 @@ VERSIONED_CHARACTERIZATION_TEST( 2 << 8 | 3, (std::tuple{ ({ - UnkeyedValidPathInfo info{Hash::dummy}; + UnkeyedValidPathInfo info{std::string{defaultStoreDir}, Hash::dummy}; info.narSize = 34878; info; }), ({ - UnkeyedValidPathInfo info{Hash::dummy}; + UnkeyedValidPathInfo info{std::string{defaultStoreDir}, Hash::dummy}; info.deriver = StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", }; @@ -268,6 +292,7 @@ VERSIONED_CHARACTERIZATION_TEST( (std::tuple{ ({ UnkeyedValidPathInfo info{ + std::string{defaultStoreDir}, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; info.deriver = StorePath{ @@ -313,7 +338,7 @@ VERSIONED_CHARACTERIZATION_TEST( }), })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( ServeProtoTest, build_options_2_1, "build-options-2.1", @@ -323,7 +348,7 @@ VERSIONED_CHARACTERIZATION_TEST( .buildTimeout = 6, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( ServeProtoTest, build_options_2_2, "build-options-2.2", @@ -334,7 +359,7 @@ VERSIONED_CHARACTERIZATION_TEST( .maxLogSize = 7, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( ServeProtoTest, build_options_2_3, "build-options-2.3", @@ -347,7 +372,7 @@ VERSIONED_CHARACTERIZATION_TEST( .enforceDeterminism = true, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( ServeProtoTest, build_options_2_7, "build-options-2.7", @@ -418,7 +443,7 @@ VERSIONED_CHARACTERIZATION_TEST( TEST_F(ServeProtoTest, handshake_log) { - CharacterizationTest::writeTest("handshake-to-client", [&]() -> std::string { + CharacterizationTest::writeTest("handshake-to-client.bin", [&]() -> std::string { StringSink toClientLog; Pipe toClient, toServer; @@ -454,7 +479,7 @@ struct NullBufferedSink : BufferedSink TEST_F(ServeProtoTest, handshake_client_replay) { - CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](std::string toClientLog) { NullBufferedSink nullSink; StringSource in{toClientLog}; @@ -466,7 +491,7 @@ TEST_F(ServeProtoTest, handshake_client_replay) TEST_F(ServeProtoTest, handshake_client_truncated_replay_throws) { - CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](std::string toClientLog) { for (size_t len = 0; len < toClientLog.size(); ++len) { NullBufferedSink nullSink; auto substring = toClientLog.substr(0, len); @@ -484,7 +509,7 @@ TEST_F(ServeProtoTest, handshake_client_truncated_replay_throws) TEST_F(ServeProtoTest, handshake_client_corrupted_throws) { - CharacterizationTest::readTest("handshake-to-client", [&](const std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](const std::string toClientLog) { for (size_t idx = 0; idx < toClientLog.size(); ++idx) { // corrupt a copy std::string toClientLogCorrupt = toClientLog; diff --git a/src/libstore-tests/store-reference.cc b/src/libstore-tests/store-reference.cc index a52b92b78a7..272d6732a85 100644 --- a/src/libstore-tests/store-reference.cc +++ b/src/libstore-tests/store-reference.cc @@ -100,9 +100,12 @@ URI_TEST(local_1, localExample_1) URI_TEST(local_2, localExample_2) -/* Test path with spaces */ +/* Test path with encoded spaces */ URI_TEST(local_3, localExample_3) +/* Test path with spaces that are improperly not encoded */ +URI_TEST_READ(local_3_no_percent, localExample_3) + URI_TEST_READ(local_shorthand_1, localExample_1) URI_TEST_READ(local_shorthand_2, localExample_2) diff --git a/src/libstore-tests/worker-protocol.cc b/src/libstore-tests/worker-protocol.cc index 489151c8c28..7416d732301 100644 --- a/src/libstore-tests/worker-protocol.cc +++ b/src/libstore-tests/worker-protocol.cc @@ -4,6 +4,7 @@ #include #include +#include "nix/util/json-utils.hh" #include "nix/store/worker-protocol.hh" #include "nix/store/worker-protocol-connection.hh" #include "nix/store/worker-protocol-impl.hh" @@ -16,6 +17,8 @@ namespace nix { const char workerProtoDir[] = "worker-protocol"; +static constexpr std::string_view defaultStoreDir = "/nix/store"; + struct WorkerProtoTest : VersionedProtoTest { /** @@ -148,32 +151,51 @@ VERSIONED_CHARACTERIZATION_TEST( defaultVersion, (std::tuple{ Realisation{ - .id = - DrvOutput{ - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, - .signatures = {"asdf", "qwer"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, }, Realisation{ - .id = - { - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, - .signatures = {"asdf", "qwer"}, - .dependentRealisations = - { + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + realisation_with_deps, + "realisation-with-deps", + defaultVersion, + (std::tuple{ + Realisation{ + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + .dependentRealisations = { - DrvOutput{ - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "quux", + { + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, }, - StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, }, - }, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, }, })) @@ -214,25 +236,25 @@ VERSIONED_CHARACTERIZATION_TEST( { "foo", { - .id = - DrvOutput{ - .drvHash = - Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "foo", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, }, }, { "bar", { - .id = - DrvOutput{ - .drvHash = - Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "bar", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + }, + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, }, }, }, @@ -267,25 +289,27 @@ VERSIONED_CHARACTERIZATION_TEST( { "foo", { - .id = - DrvOutput{ - .drvHash = - Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "foo", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, }, }, { "bar", { - .id = - DrvOutput{ - .drvHash = - Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "bar", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + }, + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, }, }, }, @@ -324,25 +348,27 @@ VERSIONED_CHARACTERIZATION_TEST( { "foo", { - .id = - DrvOutput{ - .drvHash = - Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "foo", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, }, }, { "bar", { - .id = - DrvOutput{ - .drvHash = - Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "bar", - }, - .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + }, + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, }, }, }, @@ -402,6 +428,7 @@ VERSIONED_CHARACTERIZATION_TEST( (std::tuple{ ({ UnkeyedValidPathInfo info{ + std::string{defaultStoreDir}, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; info.registrationTime = 23423; @@ -410,6 +437,7 @@ VERSIONED_CHARACTERIZATION_TEST( }), ({ UnkeyedValidPathInfo info{ + std::string{defaultStoreDir}, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; info.deriver = StorePath{ @@ -438,6 +466,7 @@ VERSIONED_CHARACTERIZATION_TEST( "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", }, UnkeyedValidPathInfo{ + std::string{defaultStoreDir}, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }, }; @@ -451,6 +480,7 @@ VERSIONED_CHARACTERIZATION_TEST( "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", }, UnkeyedValidPathInfo{ + std::string{defaultStoreDir}, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }, }; @@ -485,6 +515,7 @@ VERSIONED_CHARACTERIZATION_TEST( "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", }, UnkeyedValidPathInfo{ + std::string{defaultStoreDir}, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }, }; @@ -499,6 +530,7 @@ VERSIONED_CHARACTERIZATION_TEST( "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", }, UnkeyedValidPathInfo{ + std::string{defaultStoreDir}, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }, }; @@ -626,7 +658,7 @@ VERSIONED_CHARACTERIZATION_TEST( }, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( WorkerProtoTest, clientHandshakeInfo_1_30, "client-handshake-info_1_30", @@ -635,7 +667,7 @@ VERSIONED_CHARACTERIZATION_TEST( {}, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( WorkerProtoTest, clientHandshakeInfo_1_33, "client-handshake-info_1_33", @@ -649,7 +681,7 @@ VERSIONED_CHARACTERIZATION_TEST( }, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( WorkerProtoTest, clientHandshakeInfo_1_35, "client-handshake-info_1_35", @@ -667,7 +699,7 @@ VERSIONED_CHARACTERIZATION_TEST( TEST_F(WorkerProtoTest, handshake_log) { - CharacterizationTest::writeTest("handshake-to-client", [&]() -> std::string { + CharacterizationTest::writeTest("handshake-to-client.bin", [&]() -> std::string { StringSink toClientLog; Pipe toClient, toServer; @@ -728,7 +760,7 @@ struct NullBufferedSink : BufferedSink TEST_F(WorkerProtoTest, handshake_client_replay) { - CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](std::string toClientLog) { NullBufferedSink nullSink; StringSource in{toClientLog}; @@ -741,7 +773,7 @@ TEST_F(WorkerProtoTest, handshake_client_replay) TEST_F(WorkerProtoTest, handshake_client_truncated_replay_throws) { - CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](std::string toClientLog) { for (size_t len = 0; len < toClientLog.size(); ++len) { NullBufferedSink nullSink; auto substring = toClientLog.substr(0, len); @@ -759,7 +791,7 @@ TEST_F(WorkerProtoTest, handshake_client_truncated_replay_throws) TEST_F(WorkerProtoTest, handshake_client_corrupted_throws) { - CharacterizationTest::readTest("handshake-to-client", [&](const std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](const std::string toClientLog) { for (size_t idx = 0; idx < toClientLog.size(); ++idx) { // corrupt a copy std::string toClientLogCorrupt = toClientLog; diff --git a/src/libstore-tests/write-derivation.cc b/src/libstore-tests/write-derivation.cc new file mode 100644 index 00000000000..c320f92faf3 --- /dev/null +++ b/src/libstore-tests/write-derivation.cc @@ -0,0 +1,57 @@ +#include +#include + +#include "nix/util/tests/gmock-matchers.hh" +#include "nix/store/derivations.hh" +#include "nix/store/dummy-store-impl.hh" +#include "nix/store/tests/libstore.hh" + +namespace nix { +namespace { + +class WriteDerivationTest : public LibStoreTest +{ +protected: + WriteDerivationTest(ref config_) + : LibStoreTest(config_->openDummyStore()) + , config(std::move(config_)) + { + config->readOnly = false; + } + + WriteDerivationTest() + : WriteDerivationTest(make_ref(DummyStoreConfig::Params{})) + { + } + + ref config; +}; + +static Derivation makeSimpleDrv() +{ + Derivation drv; + drv.name = "simple-derivation"; + drv.platform = "system"; + drv.builder = "foo"; + drv.args = {"bar", "baz"}; + drv.env = StringPairs{{"BIG_BAD", "WOLF"}}; + return drv; +} + +} // namespace + +TEST_F(WriteDerivationTest, addToStoreFromDumpCalledOnce) +{ + auto drv = makeSimpleDrv(); + + auto path1 = writeDerivation(*store, drv, NoRepair); + config->readOnly = true; + auto path2 = writeDerivation(*store, drv, NoRepair); + EXPECT_EQ(path1, path2); + EXPECT_THAT( + [&] { writeDerivation(*store, drv, Repair); }, + ::testing::ThrowsMessage( + testing::HasSubstrIgnoreANSIMatcher("operation 'writeDerivation' is not supported by store 'dummy://'"))); +} + +} // namespace nix diff --git a/src/libstore/aws-creds.cc b/src/libstore/aws-creds.cc index dc8584e1ba6..dfdd81abbc4 100644 --- a/src/libstore/aws-creds.cc +++ b/src/libstore/aws-creds.cc @@ -1,6 +1,6 @@ #include "nix/store/aws-creds.hh" -#if NIX_WITH_CURL_S3 +#if NIX_WITH_AWS_AUTH # include # include "nix/store/s3-url.hh" @@ -22,36 +22,14 @@ namespace nix { -namespace { - -static void initAwsCrt() +AwsAuthError::AwsAuthError(int errorCode) + : Error("AWS authentication error: '%s' (%d)", aws_error_str(errorCode), errorCode) + , errorCode(errorCode) { - struct CrtWrapper - { - Aws::Crt::ApiHandle apiHandle; - - CrtWrapper() - { - apiHandle.InitializeLogging(Aws::Crt::LogLevel::Warn, static_cast(nullptr)); - } - - ~CrtWrapper() - { - try { - // CRITICAL: Clear credential provider cache BEFORE AWS CRT shuts down - // This ensures all providers (which hold references to ClientBootstrap) - // are destroyed while AWS CRT is still valid - clearAwsCredentialsCache(); - // Now it's safe for ApiHandle destructor to run - } catch (...) { - ignoreExceptionInDestructor(); - } - } - }; - - static CrtWrapper crt; } +namespace { + static AwsCredentials getCredentialsFromProvider(std::shared_ptr provider) { if (!provider || !provider->IsValid()) { @@ -63,8 +41,7 @@ static AwsCredentials getCredentialsFromProvider(std::shared_ptrGetCredentials([prom](std::shared_ptr credentials, int errorCode) { if (errorCode != 0 || !credentials) { - prom->set_exception( - std::make_exception_ptr(AwsAuthError("Failed to resolve AWS credentials: error code %d", errorCode))); + prom->set_exception(std::make_exception_ptr(AwsAuthError(errorCode))); } else { auto accessKeyId = Aws::Crt::ByteCursorToStringView(credentials->GetAccessKeyId()); auto secretAccessKey = Aws::Crt::ByteCursorToStringView(credentials->GetSecretAccessKey()); @@ -95,82 +72,104 @@ static AwsCredentials getCredentialsFromProvider(std::shared_ptr>; - -static CredentialProviderCache credentialProviderCache; - } // anonymous namespace -AwsCredentials getAwsCredentials(const std::string & profile) +class AwsCredentialProviderImpl : public AwsCredentialProvider { - // Get or create credential provider with caching - std::shared_ptr provider; +public: + AwsCredentialProviderImpl() + { + // Map Nix's verbosity to AWS CRT log level + Aws::Crt::LogLevel logLevel; + if (verbosity >= lvlVomit) { + logLevel = Aws::Crt::LogLevel::Trace; + } else if (verbosity >= lvlDebug) { + logLevel = Aws::Crt::LogLevel::Debug; + } else if (verbosity >= lvlChatty) { + logLevel = Aws::Crt::LogLevel::Info; + } else { + logLevel = Aws::Crt::LogLevel::Warn; + } + apiHandle.InitializeLogging(logLevel, stderr); + } - // Try to find existing provider - credentialProviderCache.visit(profile, [&](const auto & pair) { provider = pair.second; }); - - if (!provider) { - // Create new provider if not found - debug( - "[pid=%d] creating new AWS credential provider for profile '%s'", - getpid(), - profile.empty() ? "(default)" : profile.c_str()); + AwsCredentials getCredentialsRaw(const std::string & profile); + AwsCredentials getCredentials(const ParsedS3URL & url) override + { + auto profile = url.profile.value_or(""); try { - initAwsCrt(); - - if (profile.empty()) { - Aws::Crt::Auth::CredentialsProviderChainDefaultConfig config; - config.Bootstrap = Aws::Crt::ApiHandle::GetOrCreateStaticDefaultClientBootstrap(); - provider = Aws::Crt::Auth::CredentialsProvider::CreateCredentialsProviderChainDefault(config); - } else { - Aws::Crt::Auth::CredentialsProviderProfileConfig config; - config.Bootstrap = Aws::Crt::ApiHandle::GetOrCreateStaticDefaultClientBootstrap(); - // This is safe because the underlying C library will copy this string - // c.f. https://github.com/awslabs/aws-c-auth/blob/main/source/credentials_provider_profile.c#L220 - config.ProfileNameOverride = Aws::Crt::ByteCursorFromCString(profile.c_str()); - provider = Aws::Crt::Auth::CredentialsProvider::CreateCredentialsProviderProfile(config); - } - } catch (Error & e) { - e.addTrace( - {}, - "while creating AWS credentials provider for %s", - profile.empty() ? "default profile" : fmt("profile '%s'", profile)); + return getCredentialsRaw(profile); + } catch (AwsAuthError & e) { + warn("AWS authentication failed for S3 request %s: %s", url.toHttpsUrl(), e.message()); + credentialProviderCache.erase(profile); throw; } + } - if (!provider) { - throw AwsAuthError( - "Failed to create AWS credentials provider for %s", - profile.empty() ? "default profile" : fmt("profile '%s'", profile)); - } + std::shared_ptr createProviderForProfile(const std::string & profile); - // Insert into cache (try_emplace is thread-safe and won't overwrite if another thread added it) - credentialProviderCache.try_emplace(profile, provider); +private: + Aws::Crt::ApiHandle apiHandle; + boost::concurrent_flat_map> + credentialProviderCache; +}; + +std::shared_ptr +AwsCredentialProviderImpl::createProviderForProfile(const std::string & profile) +{ + debug( + "[pid=%d] creating new AWS credential provider for profile '%s'", + getpid(), + profile.empty() ? "(default)" : profile.c_str()); + + if (profile.empty()) { + Aws::Crt::Auth::CredentialsProviderChainDefaultConfig config; + config.Bootstrap = Aws::Crt::ApiHandle::GetOrCreateStaticDefaultClientBootstrap(); + return Aws::Crt::Auth::CredentialsProvider::CreateCredentialsProviderChainDefault(config); } - return getCredentialsFromProvider(provider); + Aws::Crt::Auth::CredentialsProviderProfileConfig config; + config.Bootstrap = Aws::Crt::ApiHandle::GetOrCreateStaticDefaultClientBootstrap(); + // This is safe because the underlying C library will copy this string + // c.f. https://github.com/awslabs/aws-c-auth/blob/main/source/credentials_provider_profile.c#L220 + config.ProfileNameOverride = Aws::Crt::ByteCursorFromCString(profile.c_str()); + return Aws::Crt::Auth::CredentialsProvider::CreateCredentialsProviderProfile(config); } -void invalidateAwsCredentials(const std::string & profile) +AwsCredentials AwsCredentialProviderImpl::getCredentialsRaw(const std::string & profile) { - credentialProviderCache.erase(profile); + std::shared_ptr provider; + + credentialProviderCache.try_emplace_and_cvisit( + profile, + nullptr, + [&](auto & kv) { provider = kv.second = createProviderForProfile(profile); }, + [&](const auto & kv) { provider = kv.second; }); + + if (!provider) { + credentialProviderCache.erase_if(profile, [](const auto & kv) { + [[maybe_unused]] auto [_, provider] = kv; + return !provider; + }); + + throw AwsAuthError( + "Failed to create AWS credentials provider for %s", + profile.empty() ? "default profile" : fmt("profile '%s'", profile)); + } + + return getCredentialsFromProvider(provider); } -void clearAwsCredentialsCache() +ref makeAwsCredentialsProvider() { - credentialProviderCache.clear(); + return make_ref(); } -AwsCredentials preResolveAwsCredentials(const ParsedS3URL & s3Url) +ref getAwsCredentialsProvider() { - std::string profile = s3Url.profile.value_or(""); - - // Get credentials (automatically cached) - return getAwsCredentials(profile); + static auto instance = makeAwsCredentialsProvider(); + return instance; } } // namespace nix diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index badfb4b1484..848669ae84f 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -8,7 +8,7 @@ #include "nix/util/sync.hh" #include "nix/store/remote-fs-accessor.hh" #include "nix/store/nar-info-disk-cache.hh" -#include "nix/store/nar-accessor.hh" +#include "nix/util/nar-accessor.hh" #include "nix/util/thread-pool.hh" #include "nix/util/callback.hh" #include "nix/util/signals.hh" @@ -76,9 +76,11 @@ std::optional BinaryCacheStore::getNixCacheInfo() return getFile(cacheInfoFile); } -void BinaryCacheStore::upsertFile(const std::string & path, std::string && data, const std::string & mimeType) +void BinaryCacheStore::upsertFile( + const std::string & path, std::string && data, const std::string & mimeType, uint64_t sizeHint) { - upsertFile(path, std::make_shared(std::move(data)), mimeType); + StringSource source{data}; + upsertFile(path, source, mimeType, sizeHint); } void BinaryCacheStore::getFile(const std::string & path, Callback> callback) noexcept @@ -125,8 +127,7 @@ void BinaryCacheStore::writeNarInfo(ref narInfo) upsertFile(narInfoFile, narInfo->to_string(*this), "text/x-nix-narinfo"); - pathInfoCache->lock()->upsert( - std::string(narInfo->path.to_string()), PathInfoCacheValue{.value = std::shared_ptr(narInfo)}); + pathInfoCache->lock()->upsert(narInfo->path, PathInfoCacheValue{.value = std::shared_ptr(narInfo)}); if (diskCache) diskCache->upsertNarInfo( @@ -138,9 +139,7 @@ void BinaryCacheStore::writeNarInfo(ref narInfo) ref BinaryCacheStore::addToStoreCommon( Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs, std::function mkInfo) { - auto [fdTemp, fnTemp] = createTempFile(); - - AutoDelete autoDelete(fnTemp); + auto fdTemp = createAnonymousTempFile(); auto now1 = std::chrono::steady_clock::now(); @@ -206,7 +205,7 @@ ref BinaryCacheStore::addToStoreCommon( if (config.writeNARListing) { nlohmann::json j = { {"version", 1}, - {"root", listNar(ref(narAccessor), CanonPath::root, true)}, + {"root", listNarDeep(*narAccessor, CanonPath::root)}, }; upsertFile(std::string(info.path.hashPart()) + ".ls", j.dump(), "application/json"); @@ -270,11 +269,10 @@ ref BinaryCacheStore::addToStoreCommon( /* Atomically write the NAR file. */ if (repair || !fileExists(narInfo->url)) { + FdSource source{fdTemp.get()}; + source.restart(); /* Seek back to the start of the file. */ stats.narWrite++; - upsertFile( - narInfo->url, - std::make_shared(fnTemp, std::ios_base::in | std::ios_base::binary), - "application/x-nix-nar"); + upsertFile(narInfo->url, source, "application/x-nix-nar", narInfo->fileSize); } else stats.narWriteAverted++; @@ -408,10 +406,20 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink) { auto info = queryPathInfo(storePath).cast(); - LengthSink narSize; - TeeSink tee{sink, narSize}; + uint64_t narSize = 0; + + LambdaSink uncompressedSink{ + [&](std::string_view data) { + narSize += data.size(); + sink(data); + }, + [&]() { + stats.narRead++; + // stats.narReadCompressedBytes += nar->size(); // FIXME + stats.narReadBytes += narSize; + }}; - auto decompressor = makeDecompressionSink(info->compression, tee); + auto decompressor = makeDecompressionSink(info->compression, uncompressedSink); try { getFile(info->url, *decompressor); @@ -421,9 +429,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink) decompressor->finish(); - stats.narRead++; - // stats.narReadCompressedBytes += nar->size(); // FIXME - stats.narReadBytes += narSize.length; + // Note: don't do anything here because it's never reached if we're called as a coroutine. } void BinaryCacheStore::queryPathInfoUncached( @@ -502,10 +508,15 @@ StorePath BinaryCacheStore::addToStore( ->path; } +std::string BinaryCacheStore::makeRealisationPath(const DrvOutput & id) +{ + return realisationsPrefix + "/" + id.to_string() + ".doi"; +} + void BinaryCacheStore::queryRealisationUncached( - const DrvOutput & id, Callback> callback) noexcept + const DrvOutput & id, Callback> callback) noexcept { - auto outputInfoFilePath = realisationsPrefix + "/" + id.to_string() + ".doi"; + auto outputInfoFilePath = makeRealisationPath(id); auto callbackPtr = std::make_shared(std::move(callback)); @@ -515,11 +526,12 @@ void BinaryCacheStore::queryRealisationUncached( if (!data) return (*callbackPtr)({}); - std::shared_ptr realisation; + std::shared_ptr realisation; try { - realisation = std::make_shared(nlohmann::json::parse(*data)); + realisation = std::make_shared(nlohmann::json::parse(*data)); } catch (Error & e) { - e.addTrace({}, "while parsing file '%s' as a realisation", outputInfoFilePath); + e.addTrace( + {}, "while parsing file '%s' as a realisation for key '%s'", outputInfoFilePath, id.to_string()); throw; } return (*callbackPtr)(std::move(realisation)); @@ -535,8 +547,7 @@ void BinaryCacheStore::registerDrvOutput(const Realisation & info) { if (diskCache) diskCache->upsertRealisation(config.getReference().render(/*FIXME withParams=*/false), info); - auto filePath = realisationsPrefix + "/" + info.id.to_string() + ".doi"; - upsertFile(filePath, static_cast(info).dump(), "application/json"); + upsertFile(makeRealisationPath(info.id), static_cast(info).dump(), "application/json"); } ref BinaryCacheStore::getRemoteFSAccessor(bool requireValidPath) diff --git a/src/libstore/build-result.cc b/src/libstore/build-result.cc index 3e96d585235..c9ac7d60b7e 100644 --- a/src/libstore/build-result.cc +++ b/src/libstore/build-result.cc @@ -1,4 +1,6 @@ #include "nix/store/build-result.hh" +#include "nix/util/json-utils.hh" +#include #include @@ -13,100 +15,161 @@ std::strong_ordering BuildResult::Success::operator<=>(const BuildResult::Succes bool BuildResult::Failure::operator==(const BuildResult::Failure &) const noexcept = default; std::strong_ordering BuildResult::Failure::operator<=>(const BuildResult::Failure &) const noexcept = default; +static constexpr std::array, 4> successStatusStrings{{ +#define ENUM_ENTRY(e) {BuildResult::Success::e, #e} + ENUM_ENTRY(Built), + ENUM_ENTRY(Substituted), + ENUM_ENTRY(AlreadyValid), + ENUM_ENTRY(ResolvesToAlreadyValid), +#undef ENUM_ENTRY +}}; + std::string_view BuildResult::Success::statusToString(BuildResult::Success::Status status) { - switch (status) { - case BuildResult::Success::Built: - return "Built"; - case BuildResult::Success::Substituted: - return "Substituted"; - case BuildResult::Success::AlreadyValid: - return "AlreadyValid"; - case BuildResult::Success::ResolvesToAlreadyValid: - return "ResolvesToAlreadyValid"; - default: - unreachable(); + for (const auto & [enumVal, str] : successStatusStrings) { + if (enumVal == status) + return str; + } + throw Error("unknown success status: %d", static_cast(status)); +} + +static BuildResult::Success::Status successStatusFromString(std::string_view str) +{ + for (const auto & [enumVal, enumStr] : successStatusStrings) { + if (enumStr == str) + return enumVal; } + throw Error("unknown built result success status '%s'", str); } +static constexpr std::array, 13> failureStatusStrings{{ +#define ENUM_ENTRY(e) {BuildResult::Failure::e, #e} + ENUM_ENTRY(PermanentFailure), + ENUM_ENTRY(InputRejected), + ENUM_ENTRY(OutputRejected), + ENUM_ENTRY(TransientFailure), + ENUM_ENTRY(CachedFailure), + ENUM_ENTRY(TimedOut), + ENUM_ENTRY(MiscFailure), + ENUM_ENTRY(DependencyFailed), + ENUM_ENTRY(LogLimitExceeded), + ENUM_ENTRY(NotDeterministic), + ENUM_ENTRY(NoSubstituters), + ENUM_ENTRY(HashMismatch), + ENUM_ENTRY(Cancelled), +#undef ENUM_ENTRY +}}; + std::string_view BuildResult::Failure::statusToString(BuildResult::Failure::Status status) { - switch (status) { - case BuildResult::Failure::PermanentFailure: - return "PermanentFailure"; - case BuildResult::Failure::InputRejected: - return "InputRejected"; - case BuildResult::Failure::OutputRejected: - return "OutputRejected"; - case BuildResult::Failure::TransientFailure: - return "TransientFailure"; - case BuildResult::Failure::CachedFailure: - return "CachedFailure"; - case BuildResult::Failure::TimedOut: - return "TimedOut"; - case BuildResult::Failure::MiscFailure: - return "MiscFailure"; - case BuildResult::Failure::DependencyFailed: - return "DependencyFailed"; - case BuildResult::Failure::LogLimitExceeded: - return "LogLimitExceeded"; - case BuildResult::Failure::NotDeterministic: - return "NotDeterministic"; - case BuildResult::Failure::NoSubstituters: - return "NoSubstituters"; - case BuildResult::Failure::HashMismatch: - return "HashMismatch"; - case BuildResult::Failure::Cancelled: - return "Cancelled"; - default: - unreachable(); + for (const auto & [enumVal, str] : failureStatusStrings) { + if (enumVal == status) + return str; } + throw Error("unknown failure status: %d", static_cast(status)); } -void to_json(nlohmann::json & json, const BuildResult & buildResult) +static BuildResult::Failure::Status failureStatusFromString(std::string_view str) { - json = nlohmann::json::object(); - // FIXME: change this to have `success` and `failure` objects. - if (auto success = buildResult.tryGetSuccess()) { - json["status"] = BuildResult::Success::statusToString(success->status); - } else if (auto failure = buildResult.tryGetFailure()) { - json["status"] = BuildResult::Failure::statusToString(failure->status); - if (failure->errorMsg != "") - json["errorMsg"] = failure->errorMsg; - if (failure->isNonDeterministic) - json["isNonDeterministic"] = failure->isNonDeterministic; + for (const auto & [enumVal, enumStr] : failureStatusStrings) { + if (enumStr == str) + return enumVal; } - if (buildResult.timesBuilt) - json["timesBuilt"] = buildResult.timesBuilt; - if (buildResult.startTime) - json["startTime"] = buildResult.startTime; - if (buildResult.stopTime) - json["stopTime"] = buildResult.stopTime; + throw Error("unknown built result failure status '%s'", str); } -void to_json(nlohmann::json & json, const KeyedBuildResult & buildResult) +} // namespace nix + +namespace nlohmann { + +using namespace nix; + +void adl_serializer::to_json(json & res, const BuildResult & br) { - to_json(json, (const BuildResult &) buildResult); - auto path = nlohmann::json::object(); + res = json::object(); + + // Common fields + res["timesBuilt"] = br.timesBuilt; + res["startTime"] = br.startTime; + res["stopTime"] = br.stopTime; + + if (br.cpuUser.has_value()) { + res["cpuUser"] = br.cpuUser->count(); + } + if (br.cpuSystem.has_value()) { + res["cpuSystem"] = br.cpuSystem->count(); + } + + // Handle success or failure variant std::visit( overloaded{ - [&](const DerivedPathOpaque & opaque) { path["opaque"] = opaque.path.to_string(); }, - [&](const DerivedPathBuilt & drv) { - path["drvPath"] = drv.drvPath->getBaseStorePath().to_string(); - path["outputs"] = drv.outputs; - auto outputs = nlohmann::json::object(); - if (auto success = buildResult.tryGetSuccess()) { - for (auto & [name, output] : success->builtOutputs) - outputs[name] = { - {"path", output.outPath.to_string()}, - {"signatures", output.signatures}, - }; - json["builtOutputs"] = std::move(outputs); - } + [&](const BuildResult::Success & success) { + res["success"] = true; + res["status"] = BuildResult::Success::statusToString(success.status); + res["builtOutputs"] = success.builtOutputs; + }, + [&](const BuildResult::Failure & failure) { + res["success"] = false; + res["status"] = BuildResult::Failure::statusToString(failure.status); + res["errorMsg"] = failure.errorMsg; + res["isNonDeterministic"] = failure.isNonDeterministic; }, }, - buildResult.path.raw()); - json["path"] = std::move(path); + br.inner); } -} // namespace nix +BuildResult adl_serializer::from_json(const json & _json) +{ + auto & json = getObject(_json); + + BuildResult br; + + // Common fields + br.timesBuilt = getUnsigned(valueAt(json, "timesBuilt")); + br.startTime = getUnsigned(valueAt(json, "startTime")); + br.stopTime = getUnsigned(valueAt(json, "stopTime")); + + if (auto cpuUser = optionalValueAt(json, "cpuUser")) { + br.cpuUser = std::chrono::microseconds(getUnsigned(*cpuUser)); + } + if (auto cpuSystem = optionalValueAt(json, "cpuSystem")) { + br.cpuSystem = std::chrono::microseconds(getUnsigned(*cpuSystem)); + } + + // Determine success or failure based on success field + bool success = getBoolean(valueAt(json, "success")); + std::string statusStr = getString(valueAt(json, "status")); + + if (success) { + BuildResult::Success s; + s.status = successStatusFromString(statusStr); + s.builtOutputs = valueAt(json, "builtOutputs"); + br.inner = std::move(s); + } else { + BuildResult::Failure f; + f.status = failureStatusFromString(statusStr); + f.errorMsg = getString(valueAt(json, "errorMsg")); + f.isNonDeterministic = getBoolean(valueAt(json, "isNonDeterministic")); + br.inner = std::move(f); + } + + return br; +} + +KeyedBuildResult adl_serializer::from_json(const json & json0) +{ + auto json = getObject(json0); + + return KeyedBuildResult{ + adl_serializer::from_json(json0), + valueAt(json, "path"), + }; +} + +void adl_serializer::to_json(json & json, const KeyedBuildResult & kbr) +{ + adl_serializer::to_json(json, kbr); + json["path"] = kbr.path; +} + +} // namespace nlohmann diff --git a/src/libstore/build/derivation-builder.cc b/src/libstore/build/derivation-builder.cc new file mode 100644 index 00000000000..39ac40175f7 --- /dev/null +++ b/src/libstore/build/derivation-builder.cc @@ -0,0 +1,27 @@ +#include "nix/util/json-utils.hh" +#include "nix/store/build/derivation-builder.hh" + +namespace nlohmann { + +using namespace nix; + +ExternalBuilder adl_serializer::from_json(const json & json) +{ + auto obj = getObject(json); + return { + .systems = valueAt(obj, "systems"), + .program = valueAt(obj, "program"), + .args = valueAt(obj, "args"), + }; +} + +void adl_serializer::to_json(json & json, const ExternalBuilder & eb) +{ + json = { + {"systems", eb.systems}, + {"program", eb.program}, + {"args", eb.args}, + }; +} + +} // namespace nlohmann diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index cf21f80387f..10ba0e78b77 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -1,6 +1,5 @@ #include "nix/store/build/derivation-building-goal.hh" #include "nix/store/build/derivation-env-desugar.hh" -#include "nix/store/build/derivation-trampoline-goal.hh" #ifndef _WIN32 // TODO enable build hook on Windows # include "nix/store/build/hook-instance.hh" # include "nix/store/build/derivation-builder.hh" @@ -27,22 +26,13 @@ namespace nix { DerivationBuildingGoal::DerivationBuildingGoal( - const StorePath & drvPath, const Derivation & drv_, Worker & worker, BuildMode buildMode) - : Goal(worker, gaveUpOnSubstitution()) + const StorePath & drvPath, const Derivation & drv, Worker & worker, BuildMode buildMode, bool storeDerivation) + : Goal(worker, gaveUpOnSubstitution(storeDerivation)) , drvPath(drvPath) + , drv{std::make_unique(drv)} , buildMode(buildMode) { - drv = std::make_unique(drv_); - - try { - drvOptions = - std::make_unique(DerivationOptions::fromStructuredAttrs(drv->env, drv->structuredAttrs)); - } catch (Error & e) { - e.addTrace({}, "while parsing derivation '%s'", worker.store.printStorePath(drvPath)); - throw; - } - - name = fmt("building of '%s' from in-memory derivation", worker.store.printStorePath(drvPath)); + name = fmt("building derivation '%s'", worker.store.printStorePath(drvPath)); trace("created"); /* Prevent the .chroot directory from being @@ -67,11 +57,7 @@ DerivationBuildingGoal::~DerivationBuildingGoal() std::string DerivationBuildingGoal::key() { - /* Ensure that derivations get built in order of their name, - i.e. a derivation named "aardvark" always comes before - "baboon". And substitution goals always happen before - derivation goals (due to "bd$"). */ - return "bd$" + std::string(drvPath.name()) + "$" + worker.store.printStorePath(drvPath); + return "dd$" + std::string(drvPath.name()) + "$" + worker.store.printStorePath(drvPath); } void DerivationBuildingGoal::killChild() @@ -93,18 +79,6 @@ void DerivationBuildingGoal::timedOut(Error && ex) [[maybe_unused]] Done _ = doneFailure({BuildResult::Failure::TimedOut, std::move(ex)}); } -/** - * Used for `inputGoals` local variable below - */ -struct value_comparison -{ - template - bool operator()(const ref & lhs, const ref & rhs) const - { - return *lhs < *rhs; - } -}; - std::string showKnownOutputs(const StoreDirConfig & store, const Derivation & drv) { std::string msg; @@ -125,50 +99,10 @@ static void runPostBuildHook( /* At least one of the output paths could not be produced using a substitute. So we have to build instead. */ -Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() +Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution(bool storeDerivation) { Goals waitees; - std::map, GoalPtr, value_comparison> inputGoals; - - { - std::function, const DerivedPathMap::ChildNode &)> - addWaiteeDerivedPath; - - addWaiteeDerivedPath = [&](ref inputDrv, - const DerivedPathMap::ChildNode & inputNode) { - if (!inputNode.value.empty()) { - auto g = worker.makeGoal( - DerivedPath::Built{ - .drvPath = inputDrv, - .outputs = inputNode.value, - }, - buildMode == bmRepair ? bmRepair : bmNormal); - inputGoals.insert_or_assign(inputDrv, g); - waitees.insert(std::move(g)); - } - for (const auto & [outputName, childNode] : inputNode.childMap) - addWaiteeDerivedPath( - make_ref(SingleDerivedPath::Built{inputDrv, outputName}), childNode); - }; - - for (const auto & [inputDrvPath, inputNode] : drv->inputDrvs.map) { - /* Ensure that pure, non-fixed-output derivations don't - depend on impure derivations. */ - if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && !drv->type().isImpure() - && !drv->type().isFixed()) { - auto inputDrv = worker.evalStore.readDerivation(inputDrvPath); - if (inputDrv.type().isImpure()) - throw Error( - "pure derivation '%s' depends on impure derivation '%s'", - worker.store.printStorePath(drvPath), - worker.store.printStorePath(inputDrvPath)); - } - - addWaiteeDerivedPath(makeConstantStorePathRef(inputDrvPath), inputNode); - } - } - /* Copy the input sources from the eval store to the build store. @@ -213,177 +147,17 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() /* Determine the full set of input paths. */ - /* First, the input derivations. */ - { - auto & fullDrv = *drv; - - auto drvType = fullDrv.type(); - bool resolveDrv = - std::visit( - overloaded{ - [&](const DerivationType::InputAddressed & ia) { - /* must resolve if deferred. */ - return ia.deferred; - }, - [&](const DerivationType::ContentAddressed & ca) { - return !fullDrv.inputDrvs.map.empty() - && (ca.fixed - /* Can optionally resolve if fixed, which is good - for avoiding unnecessary rebuilds. */ - ? experimentalFeatureSettings.isEnabled(Xp::CaDerivations) - /* Must resolve if floating and there are any inputs - drvs. */ - : true); - }, - [&](const DerivationType::Impure &) { return true; }}, - drvType.raw) - /* no inputs are outputs of dynamic derivations */ - || std::ranges::any_of(fullDrv.inputDrvs.map.begin(), fullDrv.inputDrvs.map.end(), [](auto & pair) { - return !pair.second.childMap.empty(); - }); - - if (resolveDrv && !fullDrv.inputDrvs.map.empty()) { - experimentalFeatureSettings.require(Xp::CaDerivations); - - /* We are be able to resolve this derivation based on the - now-known results of dependencies. If so, we become a - stub goal aliasing that resolved derivation goal. */ - std::optional attempt = fullDrv.tryResolve( - worker.store, - [&](ref drvPath, const std::string & outputName) -> std::optional { - auto mEntry = get(inputGoals, drvPath); - if (!mEntry) - return std::nullopt; - - auto & buildResult = (*mEntry)->buildResult; - return std::visit( - overloaded{ - [](const BuildResult::Failure &) -> std::optional { return std::nullopt; }, - [&](const BuildResult::Success & success) -> std::optional { - auto i = get(success.builtOutputs, outputName); - if (!i) - return std::nullopt; - - return i->outPath; - }, - }, - buildResult.inner); - }); - if (!attempt) { - /* TODO (impure derivations-induced tech debt) (see below): - The above attempt should have found it, but because we manage - inputDrvOutputs statefully, sometimes it gets out of sync with - the real source of truth (store). So we query the store - directly if there's a problem. */ - attempt = fullDrv.tryResolve(worker.store, &worker.evalStore); - } - assert(attempt); - Derivation drvResolved{std::move(*attempt)}; - - auto pathResolved = writeDerivation(worker.store, drvResolved); - - auto msg = - fmt("resolved derivation: '%s' -> '%s'", - worker.store.printStorePath(drvPath), - worker.store.printStorePath(pathResolved)); - act = std::make_unique( - *logger, - lvlInfo, - actBuildWaiting, - msg, - Logger::Fields{ - worker.store.printStorePath(drvPath), - worker.store.printStorePath(pathResolved), - }); - - /* TODO https://github.com/NixOS/nix/issues/13247 we should - let the calling goal do this, so it has a change to pass - just the output(s) it cares about. */ - auto resolvedDrvGoal = - worker.makeDerivationTrampolineGoal(pathResolved, OutputsSpec::All{}, drvResolved, buildMode); - { - Goals waitees{resolvedDrvGoal}; - co_await await(std::move(waitees)); - } - - trace("resolved derivation finished"); - - auto resolvedResult = resolvedDrvGoal->buildResult; - - // No `std::visit` for coroutines yet - if (auto * successP = resolvedResult.tryGetSuccess()) { - auto & success = *successP; - SingleDrvOutputs builtOutputs; - - auto outputHashes = staticOutputHashes(worker.evalStore, *drv); - auto resolvedHashes = staticOutputHashes(worker.store, drvResolved); - - StorePathSet outputPaths; - - for (auto & outputName : drvResolved.outputNames()) { - auto outputHash = get(outputHashes, outputName); - auto resolvedHash = get(resolvedHashes, outputName); - if ((!outputHash) || (!resolvedHash)) - throw Error( - "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolve)", - worker.store.printStorePath(drvPath), - outputName); - - auto realisation = [&] { - auto take1 = get(success.builtOutputs, outputName); - if (take1) - return *take1; - - /* The above `get` should work. But stateful tracking of - outputs in resolvedResult, this can get out of sync with the - store, which is our actual source of truth. For now we just - check the store directly if it fails. */ - auto take2 = worker.evalStore.queryRealisation(DrvOutput{*resolvedHash, outputName}); - if (take2) - return *take2; - - throw Error( - "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/realisation)", - worker.store.printStorePath(pathResolved), - outputName); - }(); - - if (!drv->type().isImpure()) { - auto newRealisation = realisation; - newRealisation.id = DrvOutput{*outputHash, outputName}; - newRealisation.signatures.clear(); - if (!drv->type().isFixed()) { - auto & drvStore = worker.evalStore.isValidPath(drvPath) ? worker.evalStore : worker.store; - newRealisation.dependentRealisations = - drvOutputReferences(worker.store, *drv, realisation.outPath, &drvStore); - } - worker.store.signRealisation(newRealisation); - worker.store.registerDrvOutput(newRealisation); - } - outputPaths.insert(realisation.outPath); - builtOutputs.emplace(outputName, realisation); - } - - runPostBuildHook(worker.store, *logger, drvPath, outputPaths); - - auto status = success.status; - if (status == BuildResult::Success::AlreadyValid) - status = BuildResult::Success::ResolvesToAlreadyValid; - - co_return doneSuccess(success.status, std::move(builtOutputs)); - } else if (resolvedResult.tryGetFailure()) { - co_return doneFailure({ - BuildResult::Failure::DependencyFailed, - "build of resolved derivation '%s' failed", - worker.store.printStorePath(pathResolved), - }); - } else - assert(false); - } + if (storeDerivation) { + assert(drv->inputDrvs.map.empty()); + /* Store the resolved derivation, as part of the record of + what we're actually building */ + writeDerivation(worker.store, *drv); + } + { /* If we get this far, we know no dynamic drvs inputs */ - for (auto & [depDrvPath, depNode] : fullDrv.inputDrvs.map) { + for (auto & [depDrvPath, depNode] : drv->inputDrvs.map) { for (auto & outputName : depNode.value) { /* Don't need to worry about `inputGoals`, because impure derivations are always resolved above. Can @@ -424,6 +198,38 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() Goal::Co DerivationBuildingGoal::tryToBuild() { + auto drvOptions = [&] { + DerivationOptions temp; + try { + temp = + derivationOptionsFromStructuredAttrs(worker.store, drv->inputDrvs, drv->env, get(drv->structuredAttrs)); + } catch (Error & e) { + e.addTrace({}, "while parsing derivation '%s'", worker.store.printStorePath(drvPath)); + throw; + } + + auto res = tryResolve( + temp, + [&](ref drvPath, const std::string & outputName) -> std::optional { + try { + return resolveDerivedPath( + worker.store, SingleDerivedPath::Built{drvPath, outputName}, &worker.evalStore); + } catch (Error &) { + return std::nullopt; + } + }); + + /* The derivation must have all of its inputs gotten this point, + so the resolution will surely succeed. + + (Actually, we shouldn't even enter this goal until we have a + resolved derivation, or derivation with only input addressed + transitive inputs, so this should be a no-opt anyways.) + */ + assert(res); + return *res; + }(); + std::map initialOutputs; /* Recheck at this point. In particular, whereas before we were @@ -457,7 +263,6 @@ Goal::Co DerivationBuildingGoal::tryToBuild() : buildMode == bmCheck ? "checking outputs of '%s'" : "building '%s'", worker.store.printStorePath(drvPath)); - fmt("building '%s'", worker.store.printStorePath(drvPath)); #ifndef _WIN32 // TODO enable build hook on Windows if (hook) msg += fmt(" on '%s'", hook->machineName); @@ -491,6 +296,8 @@ Goal::Co DerivationBuildingGoal::tryToBuild() bool useHook; + const ExternalBuilder * externalBuilder = nullptr; + while (true) { trace("trying to build"); @@ -500,10 +307,10 @@ Goal::Co DerivationBuildingGoal::tryToBuild() crashes. If we can't acquire the lock, then continue; hopefully some other goal can start a build, and if not, the main loop will sleep a few seconds and then retry this goal. */ - PathSet lockFiles; + std::set lockFiles; /* FIXME: Should lock something like the drv itself so we don't build same CA drv concurrently */ - if (dynamic_cast(&worker.store)) { + if (auto * localStore = dynamic_cast(&worker.store)) { /* If we aren't a local store, we might need to use the local store as a build remote, but that would cause a deadlock. */ /* FIXME: Make it so we can use ourselves as a build remote even if we @@ -513,9 +320,9 @@ Goal::Co DerivationBuildingGoal::tryToBuild() */ for (auto & i : drv->outputsAndOptPaths(worker.store)) { if (i.second.second) - lockFiles.insert(worker.store.Store::toRealPath(*i.second.second)); + lockFiles.insert(localStore->toRealPath(*i.second.second)); else - lockFiles.insert(worker.store.Store::toRealPath(drvPath) + "." + i.first); + lockFiles.insert(localStore->toRealPath(drvPath) + "." + i.first); } } @@ -548,24 +355,26 @@ Goal::Co DerivationBuildingGoal::tryToBuild() /* If any of the outputs already exist but are not valid, delete them. */ - for (auto & [_, status] : initialOutputs) { - if (!status.known || status.known->isValid()) - continue; - auto storePath = status.known->path; - debug("removing invalid path '%s'", worker.store.printStorePath(status.known->path)); - deletePath(worker.store.Store::toRealPath(storePath)); + if (auto * localStore = dynamic_cast(&worker.store)) { + for (auto & [_, status] : initialOutputs) { + if (!status.known || status.known->isValid()) + continue; + auto storePath = status.known->path; + debug("removing invalid path '%s'", worker.store.printStorePath(status.known->path)); + deletePath(localStore->toRealPath(storePath)); + } } /* Don't do a remote build if the derivation has the attribute `preferLocalBuild' set. Also, check and repair modes are only supported for local builds. */ - bool buildLocally = (buildMode != bmNormal || drvOptions->willBuildLocally(worker.store, *drv)) + bool buildLocally = (buildMode != bmNormal || drvOptions.willBuildLocally(worker.store, *drv)) && settings.maxBuildJobs.get() != 0; if (buildLocally) { useHook = false; } else { - switch (tryBuildHook(initialOutputs)) { + switch (tryBuildHook(initialOutputs, drvOptions)) { case rpAccept: /* Yes, it has started doing so. Wait until we get EOF from the hook. */ @@ -584,7 +393,44 @@ Goal::Co DerivationBuildingGoal::tryToBuild() co_await waitForAWhile(); continue; case rpDecline: - /* We should do it ourselves. */ + /* We should do it ourselves. + + Now that we've decided we can't / won't do a remote build, check + that we can in fact build locally. First see if there is an + external builder for a "semi-local build". If there is, prefer to + use that. If there is not, then check if we can do a "true" local + build. */ + + externalBuilder = settings.findExternalDerivationBuilderIfSupported(*drv); + + if (!externalBuilder && !drvOptions.canBuildLocally(worker.store, *drv)) { + auto msg = + fmt("Cannot build '%s'.\n" + "Reason: " ANSI_RED "required system or feature not available" ANSI_NORMAL + "\n" + "Required system: '%s' with features {%s}\n" + "Current system: '%s' with features {%s}", + Magenta(worker.store.printStorePath(drvPath)), + Magenta(drv->platform), + concatStringsSep(", ", drvOptions.getRequiredSystemFeatures(*drv)), + Magenta(settings.thisSystem), + concatStringsSep(", ", worker.store.Store::config.systemFeatures)); + + // since aarch64-darwin has Rosetta 2, this user can actually run x86_64-darwin on their hardware - + // we should tell them to run the command to install Darwin 2 + if (drv->platform == "x86_64-darwin" && settings.thisSystem == "aarch64-darwin") + msg += fmt( + "\nNote: run `%s` to run programs for x86_64-darwin", + Magenta( + "/usr/sbin/softwareupdate --install-rosetta && launchctl stop org.nixos.nix-daemon")); + +#ifndef _WIN32 // TODO enable `DerivationBuilder` on Windows + builder.reset(); +#endif + outputLocks.unlock(); + worker.permanentFailure = true; + co_return doneFailure({BuildResult::Failure::InputRejected, std::move(msg)}); + } useHook = false; break; } @@ -764,44 +610,43 @@ Goal::Co DerivationBuildingGoal::tryToBuild() } try { - desugaredEnv = DesugaredEnv::create(worker.store, *drv, *drvOptions, inputPaths); + desugaredEnv = DesugaredEnv::create(worker.store, *drv, drvOptions, inputPaths); } catch (BuildError & e) { outputLocks.unlock(); worker.permanentFailure = true; co_return doneFailure(std::move(e)); } + DerivationBuilderParams params{ + .drvPath = drvPath, + .buildResult = buildResult, + .drv = *drv, + .drvOptions = drvOptions, + .inputPaths = inputPaths, + .initialOutputs = initialOutputs, + .buildMode = buildMode, + .defaultPathsInChroot = std::move(defaultPathsInChroot), + .systemFeatures = worker.store.config.systemFeatures.get(), + .desugaredEnv = std::move(desugaredEnv), + .act = act, + }; + /* If we have to wait and retry (see below), then `builder` will already be created, so we don't need to create it again. */ - builder = makeDerivationBuilder( - *localStoreP, - std::make_unique(*this, builder), - DerivationBuilderParams{ - .drvPath = drvPath, - .buildResult = buildResult, - .drv = *drv, - .drvOptions = *drvOptions, - .inputPaths = inputPaths, - .initialOutputs = initialOutputs, - .buildMode = buildMode, - .defaultPathsInChroot = std::move(defaultPathsInChroot), - .systemFeatures = worker.store.config.systemFeatures.get(), - .desugaredEnv = std::move(desugaredEnv), - .act = act, - }); + builder = externalBuilder ? makeExternalDerivationBuilder( + *localStoreP, + std::make_unique(*this, builder), + std::move(params), + *externalBuilder) + : makeDerivationBuilder( + *localStoreP, + std::make_unique(*this, builder), + std::move(params)); } - std::optional builderOutOpt; - try { - /* Okay, we have to build. */ - builderOutOpt = builder->startBuild(); - } catch (BuildError & e) { - builder.reset(); - outputLocks.unlock(); - worker.permanentFailure = true; - co_return doneFailure(std::move(e)); // InputRejected - } - if (!builderOutOpt) { + if (auto builderOutOpt = builder->startBuild()) { + builderOut = *std::move(builderOutOpt); + } else { if (!actLock) actLock = std::make_unique( *logger, @@ -810,9 +655,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() fmt("waiting for a free build user ID for '%s'", Magenta(worker.store.printStorePath(drvPath)))); co_await waitForAWhile(); continue; - } else { - builderOut = *std::move(builderOutOpt); - }; + } break; } @@ -985,7 +828,8 @@ BuildError DerivationBuildingGoal::fixupBuilderFailureErrorMessage(BuilderFailur return BuildError{e.status, msg}; } -HookReply DerivationBuildingGoal::tryBuildHook(const std::map & initialOutputs) +HookReply DerivationBuildingGoal::tryBuildHook( + const std::map & initialOutputs, const DerivationOptions & drvOptions) { #ifdef _WIN32 // TODO enable build hook on Windows return rpDecline; @@ -1002,7 +846,7 @@ HookReply DerivationBuildingGoal::tryBuildHook(const std::mapsink << "try" << (worker.getNrLocalBuilds() < settings.maxBuildJobs ? 1 : 0) << drv->platform - << worker.store.printStorePath(drvPath) << drvOptions->getRequiredSystemFeatures(*drv); + << worker.store.printStorePath(drvPath) << drvOptions.getRequiredSystemFeatures(*drv); worker.hook->sink.flush(); /* Read the first line of input, which should be a word indicating @@ -1302,13 +1146,22 @@ DerivationBuildingGoal::checkPathValidity(std::map & // without the `ca-derivations` experimental flag). worker.store.registerDrvOutput( Realisation{ + { + .outPath = info.known->path, + }, drvOutput, - info.known->path, }); } } if (info.known && info.known->isValid()) - validOutputs.emplace(i.first, Realisation{drvOutput, info.known->path}); + validOutputs.emplace( + i.first, + Realisation{ + { + .outPath = info.known->path, + }, + drvOutput, + }); } bool allValid = true; diff --git a/src/libstore/build/derivation-check.cc b/src/libstore/build/derivation-check.cc index 64e824368bd..677546e878b 100644 --- a/src/libstore/build/derivation-check.cc +++ b/src/libstore/build/derivation-check.cc @@ -11,7 +11,7 @@ void checkOutputs( Store & store, const StorePath & drvPath, const decltype(Derivation::outputs) & drvOutputs, - const decltype(DerivationOptions::outputChecks) & outputChecks, + const decltype(DerivationOptions::outputChecks) & outputChecks, const std::map & outputs, Activity & act) { @@ -93,7 +93,7 @@ void checkOutputs( return std::make_pair(std::move(pathsDone), closureSize); }; - auto applyChecks = [&](const DerivationOptions::OutputChecks & checks) { + auto applyChecks = [&](const DerivationOptions::OutputChecks & checks) { if (checks.maxSize && info.narSize > *checks.maxSize) throw BuildError( BuildResult::Failure::OutputRejected, @@ -113,28 +113,33 @@ void checkOutputs( *checks.maxClosureSize); } - auto checkRefs = [&](const StringSet & value, bool allowed, bool recursive) { + auto checkRefs = [&](const std::set> & value, bool allowed, bool recursive) { /* Parse a list of reference specifiers. Each element must either be a store path, or the symbolic name of the output of the derivation (such as `out'). */ StorePathSet spec; for (auto & i : value) { - if (store.isStorePath(i)) - spec.insert(store.parseStorePath(i)); - else if (auto output = get(outputs, i)) - spec.insert(output->path); - else { - std::string outputsListing = - concatMapStringsSep(", ", outputs, [](auto & o) { return o.first; }); - throw BuildError( - BuildResult::Failure::OutputRejected, - "derivation '%s' output check for '%s' contains an illegal reference specifier '%s'," - " expected store path or output name (one of [%s])", - store.printStorePath(drvPath), - outputName, - i, - outputsListing); - } + std::visit( + overloaded{ + [&](const StorePath & path) { spec.insert(path); }, + [&](const OutputName & refOutputName) { + if (auto output = get(outputs, refOutputName)) + spec.insert(output->path); + else { + std::string outputsListing = + concatMapStringsSep(", ", outputs, [](auto & o) { return o.first; }); + throw BuildError( + BuildResult::Failure::OutputRejected, + "derivation '%s' output check for '%s' contains output name '%s'," + " but this is not a valid output of this derivation." + " (Valid outputs are [%s].)", + store.printStorePath(drvPath), + outputName, + refOutputName, + outputsListing); + } + }}, + i); } auto used = recursive ? getClosure(info.path).first : info.references; @@ -188,8 +193,8 @@ void checkOutputs( std::visit( overloaded{ - [&](const DerivationOptions::OutputChecks & checks) { applyChecks(checks); }, - [&](const std::map & checksPerOutput) { + [&](const DerivationOptions::OutputChecks & checks) { applyChecks(checks); }, + [&](const std::map::OutputChecks> & checksPerOutput) { if (auto outputChecks = get(checksPerOutput, outputName)) applyChecks(*outputChecks); diff --git a/src/libstore/build/derivation-check.hh b/src/libstore/build/derivation-check.hh index d4808f9a2fc..ee2d0122952 100644 --- a/src/libstore/build/derivation-check.hh +++ b/src/libstore/build/derivation-check.hh @@ -21,7 +21,7 @@ void checkOutputs( Store & store, const StorePath & drvPath, const decltype(Derivation::outputs) & drvOutputs, - const decltype(DerivationOptions::outputChecks) & drvOptions, + const decltype(DerivationOptions::outputChecks) & drvOptions, const std::map & outputs, Activity & act); diff --git a/src/libstore/build/derivation-env-desugar.cc b/src/libstore/build/derivation-env-desugar.cc index d6e002d911e..75b62c11650 100644 --- a/src/libstore/build/derivation-env-desugar.cc +++ b/src/libstore/build/derivation-env-desugar.cc @@ -18,14 +18,17 @@ std::string & DesugaredEnv::atFileEnvPair(std::string_view name, std::string fil } DesugaredEnv DesugaredEnv::create( - Store & store, const Derivation & drv, const DerivationOptions & drvOptions, const StorePathSet & inputPaths) + Store & store, + const Derivation & drv, + const DerivationOptions & drvOptions, + const StorePathSet & inputPaths) { DesugaredEnv res; if (drv.structuredAttrs) { auto json = drv.structuredAttrs->prepareStructuredAttrs(store, drvOptions, inputPaths, drv.outputs); res.atFileEnvPair("NIX_ATTRS_SH_FILE", ".attrs.sh") = StructuredAttrs::writeShell(json); - res.atFileEnvPair("NIX_ATTRS_JSON_FILE", ".attrs.json") = json.dump(); + res.atFileEnvPair("NIX_ATTRS_JSON_FILE", ".attrs.json") = static_cast(std::move(json)).dump(); } else { /* In non-structured mode, set all bindings either directory in the environment or via a file, as specified by @@ -46,7 +49,7 @@ DesugaredEnv DesugaredEnv::create( } /* Handle exportReferencesGraph(), if set. */ - for (auto & [fileName, storePaths] : drvOptions.getParsedExportReferencesGraph(store)) { + for (auto & [fileName, storePaths] : drvOptions.exportReferencesGraph) { /* Write closure info to . */ res.extraFiles.insert_or_assign( fileName, store.makeValidityRegistration(store.exportReferences(storePaths, inputPaths), false, false)); diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index fcbb9205737..1908f957313 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -1,5 +1,6 @@ #include "nix/store/build/derivation-goal.hh" #include "nix/store/build/derivation-building-goal.hh" +#include "nix/store/build/derivation-resolution-goal.hh" #ifndef _WIN32 // TODO enable build hook on Windows # include "nix/store/build/hook-instance.hh" # include "nix/store/build/derivation-builder.hh" @@ -29,10 +30,12 @@ DerivationGoal::DerivationGoal( const Derivation & drv, const OutputName & wantedOutput, Worker & worker, - BuildMode buildMode) - : Goal(worker, haveDerivation()) + BuildMode buildMode, + bool storeDerivation) + : Goal(worker, haveDerivation(storeDerivation)) , drvPath(drvPath) , wantedOutput(wantedOutput) + , drv{std::make_unique(drv)} , outputHash{[&] { auto outputHashes = staticOutputHashes(worker.evalStore, drv); if (auto * mOutputHash = get(outputHashes, wantedOutput)) @@ -41,11 +44,8 @@ DerivationGoal::DerivationGoal( }()} , buildMode(buildMode) { - this->drv = std::make_unique(drv); - name = - fmt("building of '%s' from in-memory derivation", - DerivedPath::Built{makeConstantStorePathRef(drvPath), drv.outputNames()}.to_string(worker.store)); + name = fmt("getting output '%s' from derivation '%s'", wantedOutput, worker.store.printStorePath(drvPath)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); @@ -54,23 +54,20 @@ DerivationGoal::DerivationGoal( std::string DerivationGoal::key() { - /* Ensure that derivations get built in order of their name, - i.e. a derivation named "aardvark" always comes before - "baboon". And substitution goals always happen before - derivation goals (due to "b$"). */ - return "b$" + std::string(drvPath.name()) + "$" + SingleDerivedPath::Built{ + return "db$" + std::string(drvPath.name()) + "$" + SingleDerivedPath::Built{ .drvPath = makeConstantStorePathRef(drvPath), .output = wantedOutput, }.to_string(worker.store); } -Goal::Co DerivationGoal::haveDerivation() +Goal::Co DerivationGoal::haveDerivation(bool storeDerivation) { trace("have derivation"); - auto drvOptions = [&]() -> DerivationOptions { + auto drvOptions = [&]() -> DerivationOptions { try { - return DerivationOptions::fromStructuredAttrs(drv->env, drv->structuredAttrs); + return derivationOptionsFromStructuredAttrs( + worker.store, drv->inputDrvs, drv->env, get(drv->structuredAttrs)); } catch (Error & e) { e.addTrace({}, "while parsing derivation '%s'", worker.store.printStorePath(drvPath)); throw; @@ -104,8 +101,7 @@ Goal::Co DerivationGoal::haveDerivation() them. */ if (settings.useSubstitutes && drvOptions.substitutesAllowed()) { if (!checkResult) - waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal( - DrvOutput{outputHash, wantedOutput}, buildMode == bmRepair ? Repair : NoRepair))); + waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal(DrvOutput{outputHash, wantedOutput}))); else { auto * cap = getDerivationCA(*drv); waitees.insert(upcast_goal(worker.makePathSubstitutionGoal( @@ -146,9 +142,101 @@ Goal::Co DerivationGoal::haveDerivation() worker.store.printStorePath(drvPath)); } + auto resolutionGoal = worker.makeDerivationResolutionGoal(drvPath, *drv, buildMode); + { + Goals waitees{resolutionGoal}; + co_await await(std::move(waitees)); + } + if (nrFailed != 0) { + co_return doneFailure({BuildResult::Failure::DependencyFailed, "Build failed due to failed dependency"}); + } + + if (resolutionGoal->resolvedDrv) { + auto & [pathResolved, drvResolved] = *resolutionGoal->resolvedDrv; + + auto resolvedDrvGoal = + worker.makeDerivationGoal(pathResolved, drvResolved, wantedOutput, buildMode, /*storeDerivation=*/true); + { + Goals waitees{resolvedDrvGoal}; + co_await await(std::move(waitees)); + } + + trace("resolved derivation finished"); + + auto resolvedResult = resolvedDrvGoal->buildResult; + + // No `std::visit` for coroutines yet + if (auto * successP = resolvedResult.tryGetSuccess()) { + auto & success = *successP; + auto outputHashes = staticOutputHashes(worker.evalStore, *drv); + auto resolvedHashes = staticOutputHashes(worker.store, drvResolved); + + auto outputHash = get(outputHashes, wantedOutput); + auto resolvedHash = get(resolvedHashes, wantedOutput); + if ((!outputHash) || (!resolvedHash)) + throw Error( + "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolve)", + worker.store.printStorePath(drvPath), + wantedOutput); + + auto realisation = [&] { + auto take1 = get(success.builtOutputs, wantedOutput); + if (take1) + return static_cast(*take1); + + /* The above `get` should work. But stateful tracking of + outputs in resolvedResult, this can get out of sync with the + store, which is our actual source of truth. For now we just + check the store directly if it fails. */ + auto take2 = worker.evalStore.queryRealisation( + DrvOutput{ + .drvHash = *resolvedHash, + .outputName = wantedOutput, + }); + if (take2) + return *take2; + + throw Error( + "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/realisation)", + worker.store.printStorePath(pathResolved), + wantedOutput); + }(); + + if (!drv->type().isImpure()) { + Realisation newRealisation{ + realisation, + { + .drvHash = *outputHash, + .outputName = wantedOutput, + }}; + newRealisation.signatures.clear(); + if (!drv->type().isFixed()) { + auto & drvStore = worker.evalStore.isValidPath(drvPath) ? worker.evalStore : worker.store; + newRealisation.dependentRealisations = + drvOutputReferences(worker.store, *drv, realisation.outPath, &drvStore); + } + worker.store.signRealisation(newRealisation); + worker.store.registerDrvOutput(newRealisation); + } + + auto status = success.status; + if (status == BuildResult::Success::AlreadyValid) + status = BuildResult::Success::ResolvesToAlreadyValid; + + co_return doneSuccess(status, std::move(realisation)); + } else if (resolvedResult.tryGetFailure()) { + co_return doneFailure({ + BuildResult::Failure::DependencyFailed, + "build of resolved derivation '%s' failed", + worker.store.printStorePath(pathResolved), + }); + } else + assert(false); + } + /* Give up on substitution for the output we want, actually build this derivation */ - auto g = worker.makeDerivationBuildingGoal(drvPath, *drv, buildMode); + auto g = worker.makeDerivationBuildingGoal(drvPath, *drv, buildMode, storeDerivation); /* We will finish with it ourselves, as if we were the derivational goal. */ g->preserveException = true; @@ -169,7 +257,16 @@ Goal::Co DerivationGoal::haveDerivation() /* In checking mode, the builder will not register any outputs. So we want to make sure the ones that we wanted to check are properly there. */ - success.builtOutputs = {{wantedOutput, assertPathValidity()}}; + success.builtOutputs = {{ + wantedOutput, + { + assertPathValidity(), + { + .drvHash = outputHash, + .outputName = wantedOutput, + }, + }, + }}; } else { /* Otherwise the builder will give us info for out output, but also for other outputs. Filter down to just our output so as @@ -188,12 +285,16 @@ Goal::Co DerivationGoal::haveDerivation() if (success.builtOutputs.count(wantedOutput) == 0) { debug( "BUG! wanted output '%s' not in builtOutputs, working around by adding it manually", wantedOutput); - auto realisation = assertPathValidity(); - realisation.id = DrvOutput{ - .drvHash = outputHash, - .outputName = wantedOutput, - }; - success.builtOutputs.emplace(wantedOutput, std::move(realisation)); + success.builtOutputs = {{ + wantedOutput, + { + assertPathValidity(), + { + .drvHash = outputHash, + .outputName = wantedOutput, + }, + }, + }}; } } } @@ -201,18 +302,6 @@ Goal::Co DerivationGoal::haveDerivation() co_return amDone(g->exitCode, g->ex); } -/** - * Used for `inputGoals` local variable below - */ -struct value_comparison -{ - template - bool operator()(const ref & lhs, const ref & rhs) const - { - return *lhs < *rhs; - } -}; - Goal::Co DerivationGoal::repairClosure() { assert(!drv->type().isImpure()); @@ -297,18 +386,20 @@ Goal::Co DerivationGoal::repairClosure() co_return doneSuccess(BuildResult::Success::AlreadyValid, assertPathValidity()); } -std::optional> DerivationGoal::checkPathValidity() +std::optional> DerivationGoal::checkPathValidity() { if (drv->type().isImpure()) return std::nullopt; auto drvOutput = DrvOutput{outputHash, wantedOutput}; - std::optional mRealisation; + std::optional mRealisation; if (auto * mOutput = get(drv->outputs, wantedOutput)) { if (auto mPath = mOutput->path(worker.store, drv->name, wantedOutput)) { - mRealisation = Realisation{drvOutput, std::move(*mPath)}; + mRealisation = UnkeyedRealisation{ + .outPath = std::move(*mPath), + }; } } else { throw Error( @@ -336,7 +427,14 @@ std::optional> DerivationGoal::checkPathValid // derivation, and the output path is valid, but we don't have // its realisation stored (probably because it has been built // without the `ca-derivations` experimental flag). - worker.store.registerDrvOutput(*mRealisation); + worker.store.registerDrvOutput( + Realisation{ + *mRealisation, + { + .drvHash = outputHash, + .outputName = wantedOutput, + }, + }); } return {{*mRealisation, status}}; @@ -344,7 +442,7 @@ std::optional> DerivationGoal::checkPathValid return std::nullopt; } -Realisation DerivationGoal::assertPathValidity() +UnkeyedRealisation DerivationGoal::assertPathValidity() { auto checkResult = checkPathValidity(); if (!(checkResult && checkResult->second == PathStatus::Valid)) @@ -352,11 +450,20 @@ Realisation DerivationGoal::assertPathValidity() return checkResult->first; } -Goal::Done DerivationGoal::doneSuccess(BuildResult::Success::Status status, Realisation builtOutput) +Goal::Done DerivationGoal::doneSuccess(BuildResult::Success::Status status, UnkeyedRealisation builtOutput) { buildResult.inner = BuildResult::Success{ .status = status, - .builtOutputs = {{wantedOutput, std::move(builtOutput)}}, + .builtOutputs = {{ + wantedOutput, + { + std::move(builtOutput), + DrvOutput{ + .drvHash = outputHash, + .outputName = wantedOutput, + }, + }, + }}, }; logger->result( diff --git a/src/libstore/build/derivation-resolution-goal.cc b/src/libstore/build/derivation-resolution-goal.cc new file mode 100644 index 00000000000..6cb9702f4f6 --- /dev/null +++ b/src/libstore/build/derivation-resolution-goal.cc @@ -0,0 +1,191 @@ +#include "nix/store/build/derivation-resolution-goal.hh" +#include "nix/store/build/worker.hh" +#include "nix/util/util.hh" + +#include + +namespace nix { + +DerivationResolutionGoal::DerivationResolutionGoal( + const StorePath & drvPath, const Derivation & drv, Worker & worker, BuildMode buildMode) + : Goal(worker, resolveDerivation()) + , drvPath(drvPath) + , drv{std::make_unique(drv)} + , buildMode{buildMode} +{ + name = fmt("resolving derivation '%s'", worker.store.printStorePath(drvPath)); + trace("created"); +} + +std::string DerivationResolutionGoal::key() +{ + return "dc$" + std::string(drvPath.name()) + "$" + worker.store.printStorePath(drvPath); +} + +/** + * Used for `inputGoals` local variable below + */ +struct value_comparison +{ + template + bool operator()(const ref & lhs, const ref & rhs) const + { + return *lhs < *rhs; + } +}; + +Goal::Co DerivationResolutionGoal::resolveDerivation() +{ + Goals waitees; + + std::map, GoalPtr, value_comparison> inputGoals; + + { + std::function, const DerivedPathMap::ChildNode &)> + addWaiteeDerivedPath; + + addWaiteeDerivedPath = [&](ref inputDrv, + const DerivedPathMap::ChildNode & inputNode) { + if (!inputNode.value.empty()) { + auto g = worker.makeGoal( + DerivedPath::Built{ + .drvPath = inputDrv, + .outputs = inputNode.value, + }, + buildMode == bmRepair ? bmRepair : bmNormal); + inputGoals.insert_or_assign(inputDrv, g); + waitees.insert(std::move(g)); + } + for (const auto & [outputName, childNode] : inputNode.childMap) + addWaiteeDerivedPath( + make_ref(SingleDerivedPath::Built{inputDrv, outputName}), childNode); + }; + + for (const auto & [inputDrvPath, inputNode] : drv->inputDrvs.map) { + /* Ensure that pure, non-fixed-output derivations don't + depend on impure derivations. */ + if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && !drv->type().isImpure() + && !drv->type().isFixed()) { + auto inputDrv = worker.evalStore.readDerivation(inputDrvPath); + if (inputDrv.type().isImpure()) + throw Error( + "pure derivation '%s' depends on impure derivation '%s'", + worker.store.printStorePath(drvPath), + worker.store.printStorePath(inputDrvPath)); + } + + addWaiteeDerivedPath(makeConstantStorePathRef(inputDrvPath), inputNode); + } + } + + co_await await(std::move(waitees)); + + trace("all inputs realised"); + + if (nrFailed != 0) { + auto msg = + fmt("Cannot build '%s'.\n" + "Reason: " ANSI_RED "%d %s failed" ANSI_NORMAL ".", + Magenta(worker.store.printStorePath(drvPath)), + nrFailed, + nrFailed == 1 ? "dependency" : "dependencies"); + msg += showKnownOutputs(worker.store, *drv); + co_return amDone(ecFailed, {BuildError(BuildResult::Failure::DependencyFailed, msg)}); + } + + /* Gather information necessary for computing the closure and/or + running the build hook. */ + + /* Determine the full set of input paths. */ + + /* First, the input derivations. */ + { + auto & fullDrv = *drv; + + auto drvType = fullDrv.type(); + bool resolveDrv = + std::visit( + overloaded{ + [&](const DerivationType::InputAddressed & ia) { + /* must resolve if deferred. */ + return ia.deferred; + }, + [&](const DerivationType::ContentAddressed & ca) { + return !fullDrv.inputDrvs.map.empty() + && (ca.fixed + /* Can optionally resolve if fixed, which is good + for avoiding unnecessary rebuilds. */ + ? experimentalFeatureSettings.isEnabled(Xp::CaDerivations) + /* Must resolve if floating and there are any inputs + drvs. */ + : true); + }, + [&](const DerivationType::Impure &) { return true; }}, + drvType.raw) + /* no inputs are outputs of dynamic derivations */ + || std::ranges::any_of(fullDrv.inputDrvs.map.begin(), fullDrv.inputDrvs.map.end(), [](auto & pair) { + return !pair.second.childMap.empty(); + }); + + if (resolveDrv && !fullDrv.inputDrvs.map.empty()) { + experimentalFeatureSettings.require(Xp::CaDerivations); + + /* We are be able to resolve this derivation based on the + now-known results of dependencies. If so, we become a + stub goal aliasing that resolved derivation goal. */ + std::optional attempt = fullDrv.tryResolve( + worker.store, + [&](ref drvPath, const std::string & outputName) -> std::optional { + auto mEntry = get(inputGoals, drvPath); + if (!mEntry) + return std::nullopt; + + auto & buildResult = (*mEntry)->buildResult; + return std::visit( + overloaded{ + [](const BuildResult::Failure &) -> std::optional { return std::nullopt; }, + [&](const BuildResult::Success & success) -> std::optional { + auto i = get(success.builtOutputs, outputName); + if (!i) + return std::nullopt; + + return i->outPath; + }, + }, + buildResult.inner); + }); + if (!attempt) { + /* TODO (impure derivations-induced tech debt) (see below): + The above attempt should have found it, but because we manage + inputDrvOutputs statefully, sometimes it gets out of sync with + the real source of truth (store). So we query the store + directly if there's a problem. */ + attempt = fullDrv.tryResolve(worker.store, &worker.evalStore); + } + assert(attempt); + + auto pathResolved = writeDerivation(worker.store, *attempt, NoRepair, /*readOnly =*/true); + + auto msg = + fmt("resolved derivation: '%s' -> '%s'", + worker.store.printStorePath(drvPath), + worker.store.printStorePath(pathResolved)); + act = std::make_unique( + *logger, + lvlInfo, + actBuildWaiting, + msg, + Logger::Fields{ + worker.store.printStorePath(drvPath), + worker.store.printStorePath(pathResolved), + }); + + resolvedDrv = + std::make_unique>(std::move(pathResolved), *std::move(attempt)); + } + } + + co_return amDone(ecSuccess, std::nullopt); +} + +} // namespace nix diff --git a/src/libstore/build/derivation-trampoline-goal.cc b/src/libstore/build/derivation-trampoline-goal.cc index 205f5c427ee..963156aa584 100644 --- a/src/libstore/build/derivation-trampoline-goal.cc +++ b/src/libstore/build/derivation-trampoline-goal.cc @@ -31,7 +31,7 @@ DerivationTrampolineGoal::DerivationTrampolineGoal( void DerivationTrampolineGoal::commonInit() { name = - fmt("outer obtaining drv from '%s' and then building outputs %s", + fmt("obtaining derivation from '%s' and then building outputs %s", drvReq->to_string(worker.store), std::visit( overloaded{ @@ -58,18 +58,12 @@ static StorePath pathPartOfReq(const SingleDerivedPath & req) std::string DerivationTrampolineGoal::key() { - /* Ensure that derivations get built in order of their name, - i.e. a derivation named "aardvark" always comes before "baboon". And - substitution goals, derivation goals, and derivation building goals always happen before - derivation goals (due to "bt$"). */ - return "bt$" + std::string(pathPartOfReq(*drvReq).name()) + "$" + DerivedPath::Built{ + return "da$" + std::string(pathPartOfReq(*drvReq).name()) + "$" + DerivedPath::Built{ .drvPath = drvReq, .outputs = wantedOutputs, }.to_string(worker.store); } -void DerivationTrampolineGoal::timedOut(Error && ex) {} - Goal::Co DerivationTrampolineGoal::init() { trace("need to load derivation from file"); @@ -151,7 +145,7 @@ Goal::Co DerivationTrampolineGoal::haveDerivation(StorePath drvPath, Derivation /* Build this step! */ for (auto & output : resolvedWantedOutputs) { - auto g = upcast_goal(worker.makeDerivationGoal(drvPath, drv, output, buildMode)); + auto g = upcast_goal(worker.makeDerivationGoal(drvPath, drv, output, buildMode, false)); g->preserveException = true; /* We will finish with it ourselves, as if we were the derivational goal. */ concreteDrvGoals.insert(std::move(g)); diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc index b6ace47847d..8d0a307beda 100644 --- a/src/libstore/build/drv-output-substitution-goal.cc +++ b/src/libstore/build/drv-output-substitution-goal.cc @@ -8,8 +8,7 @@ namespace nix { -DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal( - const DrvOutput & id, Worker & worker, RepairFlag repair, std::optional ca) +DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(const DrvOutput & id, Worker & worker) : Goal(worker, init()) , id(id) { @@ -43,10 +42,10 @@ Goal::Co DrvOutputSubstitutionGoal::init() outPipe->createAsyncPipe(worker.ioport.get()); #endif - auto promise = std::make_shared>>(); + auto promise = std::make_shared>>(); sub->queryRealisation( - id, {[outPipe(outPipe), promise(promise)](std::future> res) { + id, {[outPipe(outPipe), promise(promise)](std::future> res) { try { Finally updateStats([&]() { outPipe->writeSide.close(); }); promise->set_value(res.get()); @@ -75,7 +74,7 @@ Goal::Co DrvOutputSubstitutionGoal::init() * The realisation corresponding to the given output id. * Will be filled once we can get it. */ - std::shared_ptr outputInfo; + std::shared_ptr outputInfo; try { outputInfo = promise->get_future().get(); @@ -113,7 +112,21 @@ Goal::Co DrvOutputSubstitutionGoal::init() if (failed) continue; - co_return realisationFetched(std::move(waitees), outputInfo, sub); + waitees.insert(worker.makePathSubstitutionGoal(outputInfo->outPath)); + + co_await await(std::move(waitees)); + + trace("output path substituted"); + + if (nrFailed > 0) { + debug("The output path of the derivation output '%s' could not be substituted", id.to_string()); + co_return amDone(nrNoSubstituters > 0 ? ecNoSubstituters : ecFailed); + } + + worker.store.registerDrvOutput({*outputInfo, id}); + + trace("finished"); + co_return amDone(ecSuccess); } /* None left. Terminate this goal and let someone else deal @@ -131,30 +144,8 @@ Goal::Co DrvOutputSubstitutionGoal::init() co_return amDone(substituterFailed ? ecFailed : ecNoSubstituters); } -Goal::Co DrvOutputSubstitutionGoal::realisationFetched( - Goals waitees, std::shared_ptr outputInfo, nix::ref sub) -{ - waitees.insert(worker.makePathSubstitutionGoal(outputInfo->outPath)); - - co_await await(std::move(waitees)); - - trace("output path substituted"); - - if (nrFailed > 0) { - debug("The output path of the derivation output '%s' could not be substituted", id.to_string()); - co_return amDone(nrNoSubstituters > 0 ? ecNoSubstituters : ecFailed); - } - - worker.store.registerDrvOutput(*outputInfo); - - trace("finished"); - co_return amDone(ecSuccess); -} - std::string DrvOutputSubstitutionGoal::key() { - /* "a$" ensures substitution goals happen before derivation - goals. */ return "a$" + std::string(id.to_string()); } diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index a4d3cbb555e..b2e321c7238 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -282,16 +282,18 @@ Goal::Co PathSubstitutionGoal::tryToRun( try { promise.get_future().get(); } catch (std::exception & e) { - printError(e.what()); - /* Cause the parent build to fail unless --fallback is given, or the substitute has disappeared. The latter case behaves the same as the substitute never having existed in the first place. */ try { throw; - } catch (SubstituteGone &) { + } catch (SubstituteGone & sg) { + /* Missing NARs are expected when they've been garbage collected. + This is not a failure, so log as a warning instead of an error. */ + logWarning({.msg = sg.info().msg}); } catch (...) { + printError(e.what()); substituterFailed = true; } diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 3e6e0bef01f..3663a2c919f 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -4,6 +4,7 @@ #include "nix/store/build/substitution-goal.hh" #include "nix/store/build/drv-output-substitution-goal.hh" #include "nix/store/build/derivation-goal.hh" +#include "nix/store/build/derivation-resolution-goal.hh" #include "nix/store/build/derivation-building-goal.hh" #include "nix/store/build/derivation-trampoline-goal.hh" #ifndef _WIN32 // TODO Enable building on Windows @@ -75,15 +76,26 @@ std::shared_ptr Worker::makeDerivationTrampolineGoal( } std::shared_ptr Worker::makeDerivationGoal( - const StorePath & drvPath, const Derivation & drv, const OutputName & wantedOutput, BuildMode buildMode) + const StorePath & drvPath, + const Derivation & drv, + const OutputName & wantedOutput, + BuildMode buildMode, + bool storeDerivation) { - return initGoalIfNeeded(derivationGoals[drvPath][wantedOutput], drvPath, drv, wantedOutput, *this, buildMode); + return initGoalIfNeeded( + derivationGoals[drvPath][wantedOutput], drvPath, drv, wantedOutput, *this, buildMode, storeDerivation); +} + +std::shared_ptr +Worker::makeDerivationResolutionGoal(const StorePath & drvPath, const Derivation & drv, BuildMode buildMode) +{ + return initGoalIfNeeded(derivationResolutionGoals[drvPath], drvPath, drv, *this, buildMode); } -std::shared_ptr -Worker::makeDerivationBuildingGoal(const StorePath & drvPath, const Derivation & drv, BuildMode buildMode) +std::shared_ptr Worker::makeDerivationBuildingGoal( + const StorePath & drvPath, const Derivation & drv, BuildMode buildMode, bool storeDerivation) { - return initGoalIfNeeded(derivationBuildingGoals[drvPath], drvPath, drv, *this, buildMode); + return initGoalIfNeeded(derivationBuildingGoals[drvPath], drvPath, drv, *this, buildMode, storeDerivation); } std::shared_ptr @@ -92,10 +104,9 @@ Worker::makePathSubstitutionGoal(const StorePath & path, RepairFlag repair, std: return initGoalIfNeeded(substitutionGoals[path], path, *this, repair, ca); } -std::shared_ptr -Worker::makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair, std::optional ca) +std::shared_ptr Worker::makeDrvOutputSubstitutionGoal(const DrvOutput & id) { - return initGoalIfNeeded(drvOutputSubstitutionGoals[id], id, *this, repair, ca); + return initGoalIfNeeded(drvOutputSubstitutionGoals[id], id, *this); } GoalPtr Worker::makeGoal(const DerivedPath & req, BuildMode buildMode) @@ -158,6 +169,8 @@ void Worker::removeGoal(GoalPtr goal) nix::removeGoal(drvGoal, derivationTrampolineGoals.map); else if (auto drvGoal = std::dynamic_pointer_cast(goal)) nix::removeGoal(drvGoal, derivationGoals); + else if (auto drvResolutionGoal = std::dynamic_pointer_cast(goal)) + nix::removeGoal(drvResolutionGoal, derivationResolutionGoals); else if (auto drvBuildingGoal = std::dynamic_pointer_cast(goal)) nix::removeGoal(drvBuildingGoal, derivationBuildingGoals); else if (auto subGoal = std::dynamic_pointer_cast(goal)) @@ -515,15 +528,9 @@ bool Worker::pathContentsGood(const StorePath & path) return i->second; printInfo("checking path '%s'...", store.printStorePath(path)); auto info = store.queryPathInfo(path); - bool res; - if (!pathExists(store.printStorePath(path))) - res = false; - else { - auto current = hashPath( - {store.getFSAccessor(), CanonPath(path.to_string())}, - FileIngestionMethod::NixArchive, - info->narHash.algo) - .first; + bool res = false; + if (auto accessor = store.getFSAccessor(path, /*requireValidPath=*/false)) { + auto current = hashPath({ref{accessor}}, FileIngestionMethod::NixArchive, info->narHash.algo).first; Hash nullHash(HashAlgorithm::SHA256); res = info->narHash == nullHash || info->narHash == current; } diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index 22ed8d8076e..4db37d43a79 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -10,6 +10,12 @@ namespace nix { +RegisterBuiltinBuilder::BuiltinBuilders & RegisterBuiltinBuilder::builtinBuilders() +{ + static RegisterBuiltinBuilder::BuiltinBuilders builders; + return builders; +} + namespace { struct State diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index df056954e27..126fb922eba 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -33,6 +33,7 @@ static void builtinFetchurl(const BuiltinBuilderContext & ctx) /* Note: have to use a fresh fileTransfer here because we're in a forked process. */ + debug("[pid=%d] builtin:fetchurl creating fresh FileTransfer instance", getpid()); auto fileTransfer = makeFileTransfer(); auto fetch = [&](const std::string & url) { @@ -40,6 +41,18 @@ static void builtinFetchurl(const BuiltinBuilderContext & ctx) FileTransferRequest request(VerbatimURL{url}); request.decompress = false; +#if NIX_WITH_AWS_AUTH + // Use pre-resolved credentials if available + if (ctx.awsCredentials && request.uri.scheme() == "s3") { + debug("[pid=%d] Using pre-resolved AWS credentials from parent process", getpid()); + request.usernameAuth = UsernameAuth{ + .username = ctx.awsCredentials->accessKeyId, + .password = ctx.awsCredentials->secretAccessKey, + }; + request.preResolvedAwsSessionToken = ctx.awsCredentials->sessionToken; + } +#endif + auto decompressor = makeDecompressionSink(unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink); fileTransfer->download(std::move(request), *decompressor); decompressor->finish(); diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc index 9a57e3aa618..497c2c5b47c 100644 --- a/src/libstore/content-address.cc +++ b/src/libstore/content-address.cc @@ -1,6 +1,7 @@ #include "nix/util/args.hh" #include "nix/store/content-address.hh" #include "nix/util/split.hh" +#include "nix/util/json-utils.hh" namespace nix { @@ -300,3 +301,36 @@ Hash ContentAddressWithReferences::getHash() const } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +ContentAddressMethod adl_serializer::from_json(const json & json) +{ + return ContentAddressMethod::parse(getString(json)); +} + +void adl_serializer::to_json(json & json, const ContentAddressMethod & m) +{ + json = m.render(); +} + +ContentAddress adl_serializer::from_json(const json & json) +{ + auto obj = getObject(json); + return { + .method = adl_serializer::from_json(valueAt(obj, "method")), + .hash = valueAt(obj, "hash"), + }; +} + +void adl_serializer::to_json(json & json, const ContentAddress & ca) +{ + json = { + {"method", ca.method}, + {"hash", ca.hash}, + }; +} + +} // namespace nlohmann diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 768dc93994c..f71f66db5d8 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -313,7 +313,7 @@ static void performOp( switch (op) { case WorkerProto::Op::IsValidPath: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); bool result = store->isValidPath(path); logger->stopWork(); @@ -340,7 +340,7 @@ static void performOp( } case WorkerProto::Op::HasSubstitutes: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); StorePathSet paths; // FIXME paths.insert(path); @@ -360,7 +360,7 @@ static void performOp( } case WorkerProto::Op::QueryPathHash: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); auto hash = store->queryPathInfo(path)->narHash; logger->stopWork(); @@ -372,7 +372,7 @@ static void performOp( case WorkerProto::Op::QueryReferrers: case WorkerProto::Op::QueryValidDerivers: case WorkerProto::Op::QueryDerivationOutputs: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); StorePathSet paths; if (op == WorkerProto::Op::QueryReferences) @@ -390,7 +390,7 @@ static void performOp( } case WorkerProto::Op::QueryDerivationOutputNames: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); auto names = store->readDerivation(path).outputNames(); logger->stopWork(); @@ -399,7 +399,7 @@ static void performOp( } case WorkerProto::Op::QueryDerivationOutputMap: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); auto outputs = store->queryPartialDerivationOutputMap(path); logger->stopWork(); @@ -408,11 +408,11 @@ static void performOp( } case WorkerProto::Op::QueryDeriver: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); auto info = store->queryPathInfo(path); logger->stopWork(); - conn.to << (info->deriver ? store->printStorePath(*info->deriver) : ""); + WorkerProto::write(*store, conn, info->deriver); break; } @@ -421,7 +421,7 @@ static void performOp( logger->startWork(); auto path = store->queryPathFromHashPart(hashPart); logger->stopWork(); - conn.to << (path ? store->printStorePath(*path) : ""); + WorkerProto::write(*store, conn, path); break; } @@ -506,7 +506,7 @@ static void performOp( store->addToStoreFromDump(*dumpSource, baseName, FileSerialisationMethod::NixArchive, method, hashAlgo); logger->stopWork(); - conn.to << store->printStorePath(path); + WorkerProto::write(*store, wconn, path); } break; } @@ -543,7 +543,7 @@ static void performOp( NoRepair); }); logger->stopWork(); - conn.to << store->printStorePath(path); + WorkerProto::write(*store, wconn, path); break; } @@ -592,7 +592,7 @@ static void performOp( } case WorkerProto::Op::BuildDerivation: { - auto drvPath = store->parseStorePath(readString(conn.from)); + auto drvPath = WorkerProto::Serialise::read(*store, rconn); BasicDerivation drv; /* * Note: unlike wopEnsurePath, this operation reads a @@ -669,7 +669,7 @@ static void performOp( } case WorkerProto::Op::EnsurePath: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); store->ensurePath(path); logger->stopWork(); @@ -678,7 +678,7 @@ static void performOp( } case WorkerProto::Op::AddTempRoot: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); store->addTempRoot(path); logger->stopWork(); @@ -734,15 +734,17 @@ static void performOp( conn.to << size; for (auto & [target, links] : roots) - for (auto & link : links) - conn.to << link << store->printStorePath(target); + for (auto & link : links) { + conn.to << link; + WorkerProto::write(*store, wconn, target); + } break; } case WorkerProto::Op::CollectGarbage: { GCOptions options; - options.action = (GCOptions::GCAction) readInt(conn.from); + options.action = WorkerProto::Serialise::read(*store, rconn); options.pathsToDelete = WorkerProto::Serialise::read(*store, rconn); conn.from >> options.ignoreLiveness >> options.maxFreed; options.censor = !trusted; @@ -801,7 +803,7 @@ static void performOp( } case WorkerProto::Op::QuerySubstitutablePathInfo: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); SubstitutablePathInfos infos; store->querySubstitutablePathInfos({{path, std::nullopt}}, infos); @@ -810,7 +812,8 @@ static void performOp( if (i == infos.end()) conn.to << 0; else { - conn.to << 1 << (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); + conn.to << 1; + WorkerProto::write(*store, wconn, i->second.deriver); WorkerProto::write(*store, wconn, i->second.references); conn.to << i->second.downloadSize << i->second.narSize; } @@ -831,8 +834,8 @@ static void performOp( logger->stopWork(); conn.to << infos.size(); for (auto & i : infos) { - conn.to << store->printStorePath(i.first) - << (i.second.deriver ? store->printStorePath(*i.second.deriver) : ""); + WorkerProto::write(*store, wconn, i.first); + WorkerProto::write(*store, wconn, i.second.deriver); WorkerProto::write(*store, wconn, i.second.references); conn.to << i.second.downloadSize << i.second.narSize; } @@ -848,7 +851,7 @@ static void performOp( } case WorkerProto::Op::QueryPathInfo: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); std::shared_ptr info; logger->startWork(); info = store->queryPathInfo(path); @@ -882,7 +885,7 @@ static void performOp( } case WorkerProto::Op::AddSignatures: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); StringSet sigs = readStrings(conn.from); logger->startWork(); store->addSignatures(path, sigs); @@ -892,21 +895,20 @@ static void performOp( } case WorkerProto::Op::NarFromPath: { - auto path = store->parseStorePath(readString(conn.from)); + auto path = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); logger->stopWork(); - dumpPath(store->toRealPath(path), conn.to); + store->narFromPath(path, conn.to); break; } case WorkerProto::Op::AddToStoreNar: { bool repair, dontCheckSigs; - auto path = store->parseStorePath(readString(conn.from)); - auto deriver = readString(conn.from); + auto path = WorkerProto::Serialise::read(*store, rconn); + auto deriver = WorkerProto::Serialise>::read(*store, rconn); auto narHash = Hash::parseAny(readString(conn.from), HashAlgorithm::SHA256); - ValidPathInfo info{path, narHash}; - if (deriver != "") - info.deriver = store->parseStorePath(deriver); + ValidPathInfo info{path, {*store, narHash}}; + info.deriver = std::move(deriver); info.references = WorkerProto::Serialise::read(*store, rconn); conn.from >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(conn.from); @@ -964,9 +966,9 @@ static void performOp( case WorkerProto::Op::RegisterDrvOutput: { logger->startWork(); if (GET_PROTOCOL_MINOR(conn.protoVersion) < 31) { - auto outputId = DrvOutput::parse(readString(conn.from)); + auto outputId = WorkerProto::Serialise::read(*store, rconn); auto outputPath = StorePath(readString(conn.from)); - store->registerDrvOutput(Realisation{.id = outputId, .outPath = outputPath}); + store->registerDrvOutput(Realisation{{.outPath = outputPath}, outputId}); } else { auto realisation = WorkerProto::Serialise::read(*store, rconn); store->registerDrvOutput(realisation); @@ -977,7 +979,7 @@ static void performOp( case WorkerProto::Op::QueryRealisation: { logger->startWork(); - auto outputId = DrvOutput::parse(readString(conn.from)); + auto outputId = WorkerProto::Serialise::read(*store, rconn); auto info = store->queryRealisation(outputId); logger->stopWork(); if (GET_PROTOCOL_MINOR(conn.protoVersion) < 31) { @@ -988,7 +990,7 @@ static void performOp( } else { std::set realisations; if (info) - realisations.insert(*info); + realisations.insert({*info, outputId}); WorkerProto::write(*store, wconn, realisations); } break; @@ -1034,6 +1036,7 @@ void processConnection(ref store, FdSource && from, FdSink && to, Trusted #ifndef _WIN32 // TODO need graceful async exit support on Windows? auto monitor = !recursive ? std::make_unique(from.fd) : nullptr; (void) monitor; // suppress warning + ReceiveInterrupts receiveInterrupts; #endif /* Exchange the greeting. */ diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index 698485c0df4..2ead0c444c9 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -2,15 +2,18 @@ #include "nix/util/json-utils.hh" #include "nix/store/parsed-derivations.hh" #include "nix/store/derivations.hh" +#include "nix/store/derived-path.hh" #include "nix/store/store-api.hh" #include "nix/util/types.hh" #include "nix/util/util.hh" #include "nix/store/globals.hh" +#include "nix/util/variant-wrapper.hh" #include #include #include #include +#include namespace nix { @@ -22,9 +25,9 @@ getStringAttr(const StringMap & env, const StructuredAttrs * parsed, const std:: if (i == parsed->structuredAttrs.end()) return {}; else { - if (!i->is_string()) + if (!i->second.is_string()) throw Error("attribute '%s' of must be a string", name); - return i->get(); + return i->second.get(); } } else { auto i = env.find(name); @@ -42,9 +45,9 @@ static bool getBoolAttr(const StringMap & env, const StructuredAttrs * parsed, c if (i == parsed->structuredAttrs.end()) return def; else { - if (!i->is_boolean()) + if (!i->second.is_boolean()) throw Error("attribute '%s' must be a Boolean", name); - return i->get(); + return i->second.get(); } } else { auto i = env.find(name); @@ -63,10 +66,11 @@ getStringsAttr(const StringMap & env, const StructuredAttrs * parsed, const std: if (i == parsed->structuredAttrs.end()) return {}; else { - if (!i->is_array()) + if (!i->second.is_array()) throw Error("attribute '%s' must be a list of strings", name); + auto & a = getArray(i->second); Strings res; - for (auto j = i->begin(); j != i->end(); ++j) { + for (auto j = a.begin(); j != a.end(); ++j) { if (!j->is_string()) throw Error("attribute '%s' must be a list of strings", name); res.push_back(j->get()); @@ -89,14 +93,38 @@ getStringSetAttr(const StringMap & env, const StructuredAttrs * parsed, const st return ss ? (std::optional{StringSet{ss->begin(), ss->end()}}) : (std::optional{}); } -using OutputChecks = DerivationOptions::OutputChecks; +template +using OutputChecks = DerivationOptions::OutputChecks; -using OutputChecksVariant = std::variant>; +template +using OutputChecksVariant = std::variant, std::map>>; -DerivationOptions DerivationOptions::fromStructuredAttrs( - const StringMap & env, const std::optional & parsed, bool shouldWarn) +DerivationOptions derivationOptionsFromStructuredAttrs( + const StoreDirConfig & store, + const StringMap & env, + const StructuredAttrs * parsed, + bool shouldWarn, + const ExperimentalFeatureSettings & mockXpSettings) { - return fromStructuredAttrs(env, parsed ? &*parsed : nullptr); + /* Use the SingleDerivedPath version with empty inputDrvs, then + resolve. */ + DerivedPathMap emptyInputDrvs{}; + auto singleDerivedPathOptions = + derivationOptionsFromStructuredAttrs(store, emptyInputDrvs, env, parsed, shouldWarn, mockXpSettings); + + /* "Resolve" all SingleDerivedPath inputs to StorePath. */ + auto resolved = tryResolve( + singleDerivedPathOptions, + [&](ref drvPath, const std::string & outputName) -> std::optional { + // there should be nothing to resolve + assert(false); + }); + + /* Since we should never need to call the call back, there should be + no way it fails. */ + assert(resolved); + + return *resolved; } static void flatten(const nlohmann::json & value, StringSet & res) @@ -110,45 +138,104 @@ static void flatten(const nlohmann::json & value, StringSet & res) throw Error("'exportReferencesGraph' value is not an array or a string"); } -DerivationOptions -DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAttrs * parsed, bool shouldWarn) +DerivationOptions derivationOptionsFromStructuredAttrs( + const StoreDirConfig & store, + const DerivedPathMap & inputDrvs, + const StringMap & env, + const StructuredAttrs * parsed, + bool shouldWarn, + const ExperimentalFeatureSettings & mockXpSettings) { - DerivationOptions defaults = {}; + DerivationOptions defaults = {}; + + std::map placeholders; + if (mockXpSettings.isEnabled(Xp::CaDerivations)) { + /* Initialize placeholder map from inputDrvs */ + auto initPlaceholders = [&](this const auto & initPlaceholders, + ref basePath, + const DerivedPathMap::ChildNode & node) -> void { + for (const auto & outputName : node.value) { + auto built = SingleDerivedPath::Built{ + .drvPath = basePath, + .output = outputName, + }; + placeholders.insert_or_assign( + DownstreamPlaceholder::fromSingleDerivedPathBuilt(built, mockXpSettings).render(), + std::move(built)); + } + + for (const auto & [outputName, childNode] : node.childMap) { + initPlaceholders( + make_ref(SingleDerivedPath::Built{ + .drvPath = basePath, + .output = outputName, + }), + childNode); + } + }; + + for (const auto & [drvPath, outputs] : inputDrvs.map) { + auto basePath = make_ref(SingleDerivedPath::Opaque{drvPath}); + initPlaceholders(basePath, outputs); + } + } + + auto parseSingleDerivedPath = [&](const std::string & pathS) -> SingleDerivedPath { + if (auto it = placeholders.find(pathS); it != placeholders.end()) + return it->second; + else + return SingleDerivedPath::Opaque{store.toStorePath(pathS).first}; + }; + + auto parseRef = [&](const std::string & pathS) -> DrvRef { + if (auto it = placeholders.find(pathS); it != placeholders.end()) + return it->second; + if (store.isStorePath(pathS)) + return SingleDerivedPath::Opaque{store.toStorePath(pathS).first}; + else + return pathS; + }; if (shouldWarn && parsed) { - if (get(parsed->structuredAttrs, "allowedReferences")) { + auto & structuredAttrs = parsed->structuredAttrs; + + if (get(structuredAttrs, "allowedReferences")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'allowedReferences'; use 'outputChecks' instead"); } - if (get(parsed->structuredAttrs, "allowedRequisites")) { + if (get(structuredAttrs, "allowedRequisites")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'allowedRequisites'; use 'outputChecks' instead"); } - if (get(parsed->structuredAttrs, "disallowedRequisites")) { + if (get(structuredAttrs, "disallowedRequisites")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'disallowedRequisites'; use 'outputChecks' instead"); } - if (get(parsed->structuredAttrs, "disallowedReferences")) { + if (get(structuredAttrs, "disallowedReferences")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'disallowedReferences'; use 'outputChecks' instead"); } - if (get(parsed->structuredAttrs, "maxSize")) { + if (get(structuredAttrs, "maxSize")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'maxSize'; use 'outputChecks' instead"); } - if (get(parsed->structuredAttrs, "maxClosureSize")) { + if (get(structuredAttrs, "maxClosureSize")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'maxClosureSize'; use 'outputChecks' instead"); } } return { - .outputChecks = [&]() -> OutputChecksVariant { + .outputChecks = [&]() -> OutputChecksVariant { if (parsed) { - std::map res; - if (auto outputChecks = get(parsed->structuredAttrs, "outputChecks")) { - for (auto & [outputName, output] : getObject(*outputChecks)) { - OutputChecks checks; + auto & structuredAttrs = parsed->structuredAttrs; + + std::map> res; + if (auto * outputChecks = get(structuredAttrs, "outputChecks")) { + for (auto & [outputName, output_] : getObject(*outputChecks)) { + OutputChecks checks; + + auto & output = getObject(output_); if (auto maxSize = get(output, "maxSize")) checks.maxSize = maxSize->get(); @@ -156,37 +243,62 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt if (auto maxClosureSize = get(output, "maxClosureSize")) checks.maxClosureSize = maxClosureSize->get(); - auto get_ = [&output = output](const std::string & name) -> std::optional { + auto get_ = + [&](const std::string & name) -> std::optional>> { if (auto i = get(output, name)) { - StringSet res; + std::set> res; for (auto j = i->begin(); j != i->end(); ++j) { if (!j->is_string()) throw Error("attribute '%s' must be a list of strings", name); - res.insert(j->get()); + res.insert(parseRef(j->get())); } return res; } return {}; }; - checks.allowedReferences = get_("allowedReferences"); - checks.allowedRequisites = get_("allowedRequisites"); - checks.disallowedReferences = get_("disallowedReferences").value_or(StringSet{}); - checks.disallowedRequisites = get_("disallowedRequisites").value_or(StringSet{}); - ; - - res.insert_or_assign(outputName, std::move(checks)); + res.insert_or_assign( + outputName, + OutputChecks{ + .maxSize = [&]() -> std::optional { + if (auto maxSize = get(output, "maxSize")) + return maxSize->get(); + else + return std::nullopt; + }(), + .maxClosureSize = [&]() -> std::optional { + if (auto maxClosureSize = get(output, "maxClosureSize")) + return maxClosureSize->get(); + else + return std::nullopt; + }(), + .allowedReferences = get_("allowedReferences"), + .disallowedReferences = + get_("disallowedReferences").value_or(std::set>{}), + .allowedRequisites = get_("allowedRequisites"), + .disallowedRequisites = + get_("disallowedRequisites").value_or(std::set>{}), + }); } } return res; } else { - return OutputChecks{ + auto parseRefSet = [&](const std::optional optionalStringSet) + -> std::optional>> { + if (!optionalStringSet) + return std::nullopt; + auto range = *optionalStringSet | std::views::transform(parseRef); + return std::set>(range.begin(), range.end()); + }; + return OutputChecks{ // legacy non-structured-attributes case .ignoreSelfRefs = true, - .allowedReferences = getStringSetAttr(env, parsed, "allowedReferences"), - .disallowedReferences = getStringSetAttr(env, parsed, "disallowedReferences").value_or(StringSet{}), - .allowedRequisites = getStringSetAttr(env, parsed, "allowedRequisites"), - .disallowedRequisites = getStringSetAttr(env, parsed, "disallowedRequisites").value_or(StringSet{}), + .allowedReferences = parseRefSet(getStringSetAttr(env, parsed, "allowedReferences")), + .disallowedReferences = parseRefSet(getStringSetAttr(env, parsed, "disallowedReferences")) + .value_or(std::set>{}), + .allowedRequisites = parseRefSet(getStringSetAttr(env, parsed, "allowedRequisites")), + .disallowedRequisites = parseRefSet(getStringSetAttr(env, parsed, "disallowedRequisites")) + .value_or(std::set>{}), }; } }(), @@ -195,7 +307,9 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt std::map res; if (parsed) { - if (auto udr = get(parsed->structuredAttrs, "unsafeDiscardReferences")) { + auto & structuredAttrs = parsed->structuredAttrs; + + if (auto * udr = get(structuredAttrs, "unsafeDiscardReferences")) { for (auto & [outputName, output] : getObject(*udr)) { if (!output.is_boolean()) throw Error("attribute 'unsafeDiscardReferences.\"%s\"' must be a Boolean", outputName); @@ -223,16 +337,19 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt }(), .exportReferencesGraph = [&] { - std::map ret; + std::map> ret; if (parsed) { - auto e = optionalValueAt(parsed->structuredAttrs, "exportReferencesGraph"); + auto * e = optionalValueAt(parsed->structuredAttrs, "exportReferencesGraph"); if (!e || !e->is_object()) return ret; - for (auto & [key, value] : getObject(*e)) { + for (auto & [key, storePathsJson] : getObject(*e)) { StringSet ss; - flatten(value, ss); - ret.insert_or_assign(key, std::move(ss)); + flatten(storePathsJson, ss); + std::set storePaths; + for (auto & s : ss) + storePaths.insert(parseSingleDerivedPath(s)); + ret.insert_or_assign(key, std::move(storePaths)); } } else { auto s = getOr(env, "exportReferencesGraph", ""); @@ -246,7 +363,7 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt throw Error("invalid file name '%s' in 'exportReferencesGraph'", fileName); auto & storePathS = *i++; - ret.insert_or_assign(std::move(fileName), StringSet{storePathS}); + ret.insert_or_assign(std::move(fileName), std::set{parseSingleDerivedPath(storePathS)}); } } return ret; @@ -264,28 +381,8 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt }; } -std::map -DerivationOptions::getParsedExportReferencesGraph(const StoreDirConfig & store) const -{ - std::map res; - - for (auto & [fileName, ss] : exportReferencesGraph) { - StorePathSet storePaths; - for (auto & storePathS : ss) { - if (!store.isInStore(storePathS)) - throw BuildError( - BuildResult::Failure::InputRejected, - "'exportReferencesGraph' contains a non-store path '%1%'", - storePathS); - storePaths.insert(store.toStorePath(storePathS).first); - } - res.insert_or_assign(fileName, storePaths); - } - - return res; -} - -StringSet DerivationOptions::getRequiredSystemFeatures(const BasicDerivation & drv) const +template +StringSet DerivationOptions::getRequiredSystemFeatures(const BasicDerivation & drv) const { // FIXME: cache this? StringSet res; @@ -296,7 +393,8 @@ StringSet DerivationOptions::getRequiredSystemFeatures(const BasicDerivation & d return res; } -bool DerivationOptions::canBuildLocally(Store & localStore, const BasicDerivation & drv) const +template +bool DerivationOptions::canBuildLocally(Store & localStore, const BasicDerivation & drv) const { if (drv.platform != settings.thisSystem.get() && !settings.extraPlatforms.get().count(drv.platform) && !drv.isBuiltin()) @@ -312,40 +410,194 @@ bool DerivationOptions::canBuildLocally(Store & localStore, const BasicDerivatio return true; } -bool DerivationOptions::willBuildLocally(Store & localStore, const BasicDerivation & drv) const +template +bool DerivationOptions::willBuildLocally(Store & localStore, const BasicDerivation & drv) const { return preferLocalBuild && canBuildLocally(localStore, drv); } -bool DerivationOptions::substitutesAllowed() const +template +bool DerivationOptions::substitutesAllowed() const { return settings.alwaysAllowSubstitutes ? true : allowSubstitutes; } -bool DerivationOptions::useUidRange(const BasicDerivation & drv) const +template +bool DerivationOptions::useUidRange(const BasicDerivation & drv) const { return getRequiredSystemFeatures(drv).count("uid-range"); } +std::optional> tryResolve( + const DerivationOptions & drvOptions, + std::function(ref drvPath, const std::string & outputName)> + queryResolutionChain) +{ + auto tryResolvePath = [&](const SingleDerivedPath & input) -> std::optional { + return std::visit( + overloaded{ + [](const SingleDerivedPath::Opaque & p) -> std::optional { return p.path; }, + [&](const SingleDerivedPath::Built & p) -> std::optional { + return queryResolutionChain(p.drvPath, p.output); + }}, + input.raw()); + }; + + auto tryResolveRef = [&](const DrvRef & ref) -> std::optional> { + return std::visit( + overloaded{ + [](const OutputName & outputName) -> std::optional> { return outputName; }, + [&](const SingleDerivedPath & input) -> std::optional> { + return tryResolvePath(input); + }}, + ref); + }; + + auto tryResolveRefSet = + [&](const std::set> & refSet) -> std::optional>> { + std::set> resolvedSet; + for (const auto & ref : refSet) { + auto resolvedRef = tryResolveRef(ref); + if (!resolvedRef) + return std::nullopt; + resolvedSet.insert(*resolvedRef); + } + return resolvedSet; + }; + + // Helper function to try resolving OutputChecks using functional style + auto tryResolveOutputChecks = [&](const DerivationOptions::OutputChecks & checks) + -> std::optional::OutputChecks> { + std::optional>> resolvedAllowedReferences; + if (checks.allowedReferences) { + resolvedAllowedReferences = tryResolveRefSet(*checks.allowedReferences); + if (!resolvedAllowedReferences) + return std::nullopt; + } + + std::optional>> resolvedAllowedRequisites; + if (checks.allowedRequisites) { + resolvedAllowedRequisites = tryResolveRefSet(*checks.allowedRequisites); + if (!resolvedAllowedRequisites) + return std::nullopt; + } + + auto resolvedDisallowedReferences = tryResolveRefSet(checks.disallowedReferences); + if (!resolvedDisallowedReferences) + return std::nullopt; + + auto resolvedDisallowedRequisites = tryResolveRefSet(checks.disallowedRequisites); + if (!resolvedDisallowedRequisites) + return std::nullopt; + + return DerivationOptions::OutputChecks{ + .ignoreSelfRefs = checks.ignoreSelfRefs, + .maxSize = checks.maxSize, + .maxClosureSize = checks.maxClosureSize, + .allowedReferences = resolvedAllowedReferences, + .disallowedReferences = *resolvedDisallowedReferences, + .allowedRequisites = resolvedAllowedRequisites, + .disallowedRequisites = *resolvedDisallowedRequisites, + }; + }; + + // Helper function to resolve exportReferencesGraph using functional style + auto tryResolveExportReferencesGraph = [&](const std::map> & exportGraph) + -> std::optional>> { + std::map> resolved; + for (const auto & [name, inputPaths] : exportGraph) { + std::set resolvedPaths; + for (const auto & inputPath : inputPaths) { + auto resolvedPath = tryResolvePath(inputPath); + if (!resolvedPath) + return std::nullopt; + resolvedPaths.insert(*resolvedPath); + } + resolved.emplace(name, std::move(resolvedPaths)); + } + return resolved; + }; + + // Resolve outputChecks using functional style with std::visit + auto resolvedOutputChecks = std::visit( + overloaded{ + [&](const DerivationOptions::OutputChecks & checks) + -> std::optional::OutputChecks, + std::map::OutputChecks>>> { + auto resolved = tryResolveOutputChecks(checks); + if (!resolved) + return std::nullopt; + return std::variant< + DerivationOptions::OutputChecks, + std::map::OutputChecks>>(*resolved); + }, + [&](const std::map::OutputChecks> & checksMap) + -> std::optional::OutputChecks, + std::map::OutputChecks>>> { + std::map::OutputChecks> resolvedMap; + for (const auto & [outputName, checks] : checksMap) { + auto resolved = tryResolveOutputChecks(checks); + if (!resolved) + return std::nullopt; + resolvedMap.emplace(outputName, *resolved); + } + return std::variant< + DerivationOptions::OutputChecks, + std::map::OutputChecks>>(resolvedMap); + }}, + drvOptions.outputChecks); + + if (!resolvedOutputChecks) + return std::nullopt; + + // Resolve exportReferencesGraph + auto resolvedExportGraph = tryResolveExportReferencesGraph(drvOptions.exportReferencesGraph); + if (!resolvedExportGraph) + return std::nullopt; + + // Return resolved DerivationOptions using designated initializers + return DerivationOptions{ + .outputChecks = *resolvedOutputChecks, + .unsafeDiscardReferences = drvOptions.unsafeDiscardReferences, + .passAsFile = drvOptions.passAsFile, + .exportReferencesGraph = *resolvedExportGraph, + .additionalSandboxProfile = drvOptions.additionalSandboxProfile, + .noChroot = drvOptions.noChroot, + .impureHostDeps = drvOptions.impureHostDeps, + .impureEnvVars = drvOptions.impureEnvVars, + .allowLocalNetworking = drvOptions.allowLocalNetworking, + .requiredSystemFeatures = drvOptions.requiredSystemFeatures, + .preferLocalBuild = drvOptions.preferLocalBuild, + .allowSubstitutes = drvOptions.allowSubstitutes, + }; +} + +template struct DerivationOptions; +template struct DerivationOptions; + } // namespace nix namespace nlohmann { using namespace nix; -DerivationOptions adl_serializer::from_json(const json & json) +DerivationOptions adl_serializer>::from_json(const json & json_) { + auto & json = getObject(json_); + return { - .outputChecks = [&]() -> OutputChecksVariant { + .outputChecks = [&]() -> OutputChecksVariant { auto outputChecks = getObject(valueAt(json, "outputChecks")); auto forAllOutputsOpt = optionalValueAt(outputChecks, "forAllOutputs"); auto perOutputOpt = optionalValueAt(outputChecks, "perOutput"); if (forAllOutputsOpt && !perOutputOpt) { - return static_cast(*forAllOutputsOpt); + return static_cast>(*forAllOutputsOpt); } else if (perOutputOpt && !forAllOutputsOpt) { - return static_cast>(*perOutputOpt); + return static_cast>>(*perOutputOpt); } else { throw Error("Exactly one of 'perOutput' or 'forAllOutputs' is required"); } @@ -353,6 +605,7 @@ DerivationOptions adl_serializer::from_json(const json & json .unsafeDiscardReferences = valueAt(json, "unsafeDiscardReferences"), .passAsFile = getStringSet(valueAt(json, "passAsFile")), + .exportReferencesGraph = valueAt(json, "exportReferencesGraph"), .additionalSandboxProfile = getString(valueAt(json, "additionalSandboxProfile")), .noChroot = getBoolean(valueAt(json, "noChroot")), @@ -366,16 +619,17 @@ DerivationOptions adl_serializer::from_json(const json & json }; } -void adl_serializer::to_json(json & json, const DerivationOptions & o) +void adl_serializer>::to_json( + json & json, const DerivationOptions & o) { json["outputChecks"] = std::visit( overloaded{ - [&](const OutputChecks & checks) { + [&](const OutputChecks & checks) { nlohmann::json outputChecks; outputChecks["forAllOutputs"] = checks; return outputChecks; }, - [&](const std::map & checksPerOutput) { + [&](const std::map> & checksPerOutput) { nlohmann::json outputChecks; outputChecks["perOutput"] = checksPerOutput; return outputChecks; @@ -385,6 +639,7 @@ void adl_serializer::to_json(json & json, const DerivationOpt json["unsafeDiscardReferences"] = o.unsafeDiscardReferences; json["passAsFile"] = o.passAsFile; + json["exportReferencesGraph"] = o.exportReferencesGraph; json["additionalSandboxProfile"] = o.additionalSandboxProfile; json["noChroot"] = o.noChroot; @@ -397,20 +652,28 @@ void adl_serializer::to_json(json & json, const DerivationOpt json["allowSubstitutes"] = o.allowSubstitutes; } -DerivationOptions::OutputChecks adl_serializer::from_json(const json & json) +OutputChecks adl_serializer>::from_json(const json & json_) { + auto & json = getObject(json_); + return { .ignoreSelfRefs = getBoolean(valueAt(json, "ignoreSelfRefs")), - .allowedReferences = nullableValueAt(json, "allowedReferences"), - .disallowedReferences = getStringSet(valueAt(json, "disallowedReferences")), - .allowedRequisites = nullableValueAt(json, "allowedRequisites"), - .disallowedRequisites = getStringSet(valueAt(json, "disallowedRequisites")), + .maxSize = ptrToOwned(getNullable(valueAt(json, "maxSize"))), + .maxClosureSize = ptrToOwned(getNullable(valueAt(json, "maxClosureSize"))), + .allowedReferences = + ptrToOwned>>(getNullable(valueAt(json, "allowedReferences"))), + .disallowedReferences = valueAt(json, "disallowedReferences"), + .allowedRequisites = + ptrToOwned>>(getNullable(valueAt(json, "allowedRequisites"))), + .disallowedRequisites = valueAt(json, "disallowedRequisites"), }; } -void adl_serializer::to_json(json & json, const DerivationOptions::OutputChecks & c) +void adl_serializer>::to_json(json & json, const OutputChecks & c) { json["ignoreSelfRefs"] = c.ignoreSelfRefs; + json["maxSize"] = c.maxSize; + json["maxClosureSize"] = c.maxClosureSize; json["allowedReferences"] = c.allowedReferences; json["disallowedReferences"] = c.disallowedReferences; json["allowedRequisites"] = c.allowedRequisites; diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 85581534c3f..1a5d683c865 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -14,6 +14,7 @@ #include #include #include +#include namespace nix { @@ -106,7 +107,7 @@ bool BasicDerivation::isBuiltin() const return builder.substr(0, 8) == "builtin:"; } -StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repair, bool readOnly) +static auto infoForDerivation(Store & store, const Derivation & drv) { auto references = drv.inputSrcs; for (auto & i : drv.inputDrvs.map) @@ -116,23 +117,44 @@ StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repa held during a garbage collection). */ auto suffix = std::string(drv.name) + drvExtension; auto contents = drv.unparse(store, false); - return readOnly || settings.readOnlyMode ? store.makeFixedOutputPathFromCA( - suffix, - TextInfo{ - .hash = hashString(HashAlgorithm::SHA256, contents), - .references = std::move(references), - }) - : ({ - StringSource s{contents}; - store.addToStoreFromDump( - s, - suffix, - FileSerialisationMethod::Flat, - ContentAddressMethod::Raw::Text, - HashAlgorithm::SHA256, - references, - repair); - }); + auto hash = hashString(HashAlgorithm::SHA256, contents); + auto ca = TextInfo{.hash = hash, .references = references}; + return std::tuple{ + suffix, + contents, + references, + store.makeFixedOutputPathFromCA(suffix, ca), + }; +} + +StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repair, bool readOnly) +{ + if (readOnly || settings.readOnlyMode) { + auto [_x, _y, _z, path] = infoForDerivation(store, drv); + return path; + } else + return store.writeDerivation(drv, repair); +} + +StorePath Store::writeDerivation(const Derivation & drv, RepairFlag repair) +{ + auto [suffix, contents, references, path] = infoForDerivation(*this, drv); + + if (isValidPath(path) && !repair) + return path; + + StringSource s{contents}; + auto path2 = addToStoreFromDump( + s, + suffix, + FileSerialisationMethod::Flat, + ContentAddressMethod::Raw::Text, + HashAlgorithm::SHA256, + references, + repair); + assert(path2 == path); + + return path; } StorePath writeDerivation( @@ -303,7 +325,7 @@ static DerivationOutput parseDerivationOutput( if (!hashAlgoStr.empty()) { ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr); if (method == ContentAddressMethod::Raw::Text) - xpSettings.require(Xp::DynamicDerivations); + xpSettings.require(Xp::DynamicDerivations, "text-hashed derivation output"); const auto hashAlgo = parseHashAlgo(hashAlgoStr); if (hashS == "impure"sv) { xpSettings.require(Xp::ImpureDerivations); @@ -441,7 +463,9 @@ Derivation parseDerivation( if (*versionS == "xp-dyn-drv"sv) { // Only version we have so far version = DerivationATermVersion::DynamicDerivations; - xpSettings.require(Xp::DynamicDerivations); + xpSettings.require(Xp::DynamicDerivations, [&] { + return fmt("derivation '%s', ATerm format version 'xp-dyn-drv'", name); + }); } else { throw FormatError("Unknown derivation ATerm format version '%s'", *versionS); } @@ -783,71 +807,63 @@ std::string outputPathName(std::string_view drvName, OutputNameView outputName) DerivationType BasicDerivation::type() const { - std::set inputAddressedOutputs, fixedCAOutputs, floatingCAOutputs, deferredIAOutputs, - impureOutputs; std::optional floatingHashAlgo; + std::optional ty; + + auto decide = [&](DerivationType newTy) { + if (!ty) + ty = newTy; + else if (ty.value() != newTy) + throw Error("can't mix derivation output types"); + else if (ty.value() == DerivationType::ContentAddressed{.sandboxed = false, .fixed = true}) + // FIXME: Experimental feature? + throw Error("only one fixed output is allowed for now"); + }; for (auto & i : outputs) { std::visit( overloaded{ - [&](const DerivationOutput::InputAddressed &) { inputAddressedOutputs.insert(i.first); }, - [&](const DerivationOutput::CAFixed &) { fixedCAOutputs.insert(i.first); }, + [&](const DerivationOutput::InputAddressed &) { + decide( + DerivationType::InputAddressed{ + .deferred = false, + }); + }, + [&](const DerivationOutput::CAFixed &) { + decide( + DerivationType::ContentAddressed{ + .sandboxed = false, + .fixed = true, + }); + if (i.first != "out"sv) + throw Error("single fixed output must be named \"out\""); + }, [&](const DerivationOutput::CAFloating & dof) { - floatingCAOutputs.insert(i.first); - if (!floatingHashAlgo) { + decide( + DerivationType::ContentAddressed{ + .sandboxed = true, + .fixed = false, + }); + if (!floatingHashAlgo) floatingHashAlgo = dof.hashAlgo; - } else { - if (*floatingHashAlgo != dof.hashAlgo) - throw Error("all floating outputs must use the same hash algorithm"); - } + else if (*floatingHashAlgo != dof.hashAlgo) + throw Error("all floating outputs must use the same hash algorithm"); }, - [&](const DerivationOutput::Deferred &) { deferredIAOutputs.insert(i.first); }, - [&](const DerivationOutput::Impure &) { impureOutputs.insert(i.first); }, + [&](const DerivationOutput::Deferred &) { + decide( + DerivationType::InputAddressed{ + .deferred = true, + }); + }, + [&](const DerivationOutput::Impure &) { decide(DerivationType::Impure{}); }, }, i.second.raw); } - if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() - && deferredIAOutputs.empty() && impureOutputs.empty()) + if (!ty) throw Error("must have at least one output"); - if (!inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() - && deferredIAOutputs.empty() && impureOutputs.empty()) - return DerivationType::InputAddressed{ - .deferred = false, - }; - - if (inputAddressedOutputs.empty() && !fixedCAOutputs.empty() && floatingCAOutputs.empty() - && deferredIAOutputs.empty() && impureOutputs.empty()) { - if (fixedCAOutputs.size() > 1) - // FIXME: Experimental feature? - throw Error("only one fixed output is allowed for now"); - if (*fixedCAOutputs.begin() != "out"sv) - throw Error("single fixed output must be named \"out\""); - return DerivationType::ContentAddressed{ - .sandboxed = false, - .fixed = true, - }; - } - - if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && !floatingCAOutputs.empty() - && deferredIAOutputs.empty() && impureOutputs.empty()) - return DerivationType::ContentAddressed{ - .sandboxed = true, - .fixed = false, - }; - - if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() - && !deferredIAOutputs.empty() && impureOutputs.empty()) - return DerivationType::InputAddressed{ - .deferred = true, - }; - - if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() - && deferredIAOutputs.empty() && !impureOutputs.empty()) - return DerivationType::Impure{}; - - throw Error("can't mix derivation output types"); + return ty.value(); } DrvHashes drvHashes; @@ -1202,6 +1218,145 @@ std::optional Derivation::tryResolve( return resolved; } +/** + * Process `InputAddressed`, `Deferred`, and `CAFixed` outputs. + * + * For `InputAddressed` outputs or `Deferred` outputs: + * + * - with `Regular` hash kind, validate `InputAddressed` outputs have + * the correct path (throws if mismatch). For `Deferred` outputs: + * - if `fillIn` is true, fill in the output path to make `InputAddressed` + * - if `fillIn` is false, throw an error + * Then validate or fill in the environment variable with the path. + * + * - with `Deferred` hash kind, validate that the output is either + * `InputAddressed` (error) or `Deferred` (correct). + * + * For `CAFixed` outputs, validate or fill in the environment variable + * with the computed path. + * + * @tparam fillIn If true, fill in missing output paths and environment + * variables. If false, validate that all paths are correct (throws on + * mismatch). + */ +template +static void processDerivationOutputPaths(Store & store, auto && drv, std::string_view drvName) +{ + std::optional hashesModulo; + + for (auto & [outputName, output] : drv.outputs) { + auto envHasRightPath = [&](const StorePath & actual, bool isDeferred = false) { + if constexpr (fillIn) { + auto j = drv.env.find(outputName); + /* Fill in mode: fill in missing or empty environment + variables */ + if (j == drv.env.end()) + drv.env.insert(j, {outputName, store.printStorePath(actual)}); + else if (j->second == "") + j->second = store.printStorePath(actual); + /* We know validation will succeed after fill-in, but + just to be extra sure, validate unconditionally */ + } + auto j = drv.env.find(outputName); + if (j == drv.env.end()) + throw Error( + "derivation has missing environment variable '%s', should be '%s' but is not present", + outputName, + store.printStorePath(actual)); + if (j->second != store.printStorePath(actual)) { + if (isDeferred) + warn( + "derivation has incorrect environment variable '%s', should be '%s' but is actually '%s'\nThis will be an error in future versions of Nix; compatibility of CA derivations will be broken.", + outputName, + store.printStorePath(actual), + j->second); + else + throw Error( + "derivation has incorrect environment variable '%s', should be '%s' but is actually '%s'", + outputName, + store.printStorePath(actual), + j->second); + } + }; + auto hash = [&](const Output & outputVariant) { + if (!hashesModulo) { + // somewhat expensive so we do lazily + hashesModulo = hashDerivationModulo(store, drv, true); + } + switch (hashesModulo->kind) { + case DrvHash::Kind::Regular: { + auto h = get(hashesModulo->hashes, outputName); + if (!h) + throw Error("derivation produced no hash for output '%s'", outputName); + auto outPath = store.makeOutputPath(outputName, *h, drvName); + + if constexpr (std::is_same_v) { + if (outputVariant.path == outPath) { + return; // Correct case + } + /* Error case, an explicitly wrong path is + always an error. */ + throw Error( + "derivation has incorrect output '%s', should be '%s'", + store.printStorePath(outputVariant.path), + store.printStorePath(outPath)); + } else if constexpr (std::is_same_v) { + if constexpr (fillIn) + /* Fill in output path for Deferred + outputs */ + output = DerivationOutput::InputAddressed{ + .path = outPath, + }; + else + /* Validation mode: deferred outputs + should have been filled in */ + warn( + "derivation has incorrect deferred output, should be '%s'.\nThis will be an error in future versions of Nix; compatibility of CA derivations will be broken.", + store.printStorePath(outPath)); + } else { + /* Will never happen, based on where + `hash` is called. */ + static_assert(false); + } + envHasRightPath(outPath); + break; + } + case DrvHash::Kind::Deferred: + if constexpr (std::is_same_v) { + /* Error case, an explicitly wrong path is + always an error. */ + throw Error( + "derivation has incorrect output '%s', should be deferred", + store.printStorePath(outputVariant.path)); + } else if constexpr (std::is_same_v) { + /* Correct: Deferred output with Deferred + hash kind. */ + } else { + /* Will never happen, based on where + `hash` is called. */ + static_assert(false); + } + break; + } + }; + std::visit( + overloaded{ + [&](const DerivationOutput::InputAddressed & o) { hash(o); }, + [&](const DerivationOutput::Deferred & o) { hash(o); }, + [&](const DerivationOutput::CAFixed & dof) { envHasRightPath(dof.path(store, drvName, outputName)); }, + [&](const auto &) { + // Nothing to do for other output types + }, + }, + output.raw); + } + + /* Don't need the answer, but do this anyways to assert is proper + combination. The code above is more general and naturally allows + combinations that are currently prohibited. */ + drv.type(); +} + void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const { assert(drvPath.isDerivation()); @@ -1209,85 +1364,65 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const drvName = drvName.substr(0, drvName.size() - drvExtension.size()); if (drvName != name) { - throw Error("Derivation '%s' has name '%s' which does not match its path", store.printStorePath(drvPath), name); + throw Error("derivation '%s' has name '%s' which does not match its path", store.printStorePath(drvPath), name); } - auto envHasRightPath = [&](const StorePath & actual, const std::string & varName) { - auto j = env.find(varName); - if (j == env.end() || store.parseStorePath(j->second) != actual) - throw Error( - "derivation '%s' has incorrect environment variable '%s', should be '%s'", - store.printStorePath(drvPath), - varName, - store.printStorePath(actual)); - }; + try { + checkInvariants(store); + } catch (Error & e) { + e.addTrace({}, "while checking derivation '%s'", store.printStorePath(drvPath)); + throw; + } +} - // Don't need the answer, but do this anyways to assert is proper - // combination. The code below is more general and naturally allows - // combinations that are currently prohibited. - type(); +void Derivation::checkInvariants(Store & store) const +{ + processDerivationOutputPaths(store, *this, name); +} - std::optional hashesModulo; - for (auto & i : outputs) { - std::visit( - overloaded{ - [&](const DerivationOutput::InputAddressed & doia) { - if (!hashesModulo) { - // somewhat expensive so we do lazily - hashesModulo = hashDerivationModulo(store, *this, true); - } - auto currentOutputHash = get(hashesModulo->hashes, i.first); - if (!currentOutputHash) - throw Error( - "derivation '%s' has unexpected output '%s' (local-store / hashesModulo) named '%s'", - store.printStorePath(drvPath), - store.printStorePath(doia.path), - i.first); - StorePath recomputed = store.makeOutputPath(i.first, *currentOutputHash, drvName); - if (doia.path != recomputed) - throw Error( - "derivation '%s' has incorrect output '%s', should be '%s'", - store.printStorePath(drvPath), - store.printStorePath(doia.path), - store.printStorePath(recomputed)); - envHasRightPath(doia.path, i.first); - }, - [&](const DerivationOutput::CAFixed & dof) { - auto path = dof.path(store, drvName, i.first); - envHasRightPath(path, i.first); - }, - [&](const DerivationOutput::CAFloating &) { - /* Nothing to check */ - }, - [&](const DerivationOutput::Deferred &) { - /* Nothing to check */ - }, - [&](const DerivationOutput::Impure &) { - /* Nothing to check */ - }, - }, - i.second.raw); +void Derivation::fillInOutputPaths(Store & store) +{ + processDerivationOutputPaths(store, *this, name); +} + +Derivation Derivation::parseJsonAndValidate(Store & store, const nlohmann::json & json) +{ + auto drv = static_cast(json); + + drv.fillInOutputPaths(store); + + try { + drv.checkInvariants(store); + } catch (Error & e) { + e.addTrace({}, "while checking derivation from JSON with name '%s'", drv.name); + throw; } + + return drv; } const Hash impureOutputHash = hashString(HashAlgorithm::SHA256, "impure"); -nlohmann::json DerivationOutput::toJSON() const +} // namespace nix + +namespace nlohmann { + +using namespace nix; + +void adl_serializer::to_json(json & res, const DerivationOutput & o) { - nlohmann::json res = nlohmann::json::object(); + res = nlohmann::json::object(); std::visit( overloaded{ [&](const DerivationOutput::InputAddressed & doi) { res["path"] = doi.path; }, [&](const DerivationOutput::CAFixed & dof) { - /* it would be nice to output the path for user convenience, but - this would require us to know the store dir. */ + res = dof.ca; + // FIXME print refs? + /* it would be nice to output the path for user convenience, but + this would require us to know the store dir. */ #if 0 res["path"] = dof.path(store, drvName, outputName); #endif - res["method"] = std::string{dof.ca.method.render()}; - res["hashAlgo"] = printHashAlgo(dof.ca.hash.algo); - res["hash"] = dof.ca.hash.to_string(HashFormat::Base16, false); - // FIXME print refs? }, [&](const DerivationOutput::CAFloating & dof) { res["method"] = std::string{dof.method.render()}; @@ -1300,12 +1435,11 @@ nlohmann::json DerivationOutput::toJSON() const res["impure"] = true; }, }, - raw); - return res; + o.raw); } DerivationOutput -DerivationOutput::fromJSON(const nlohmann::json & _json, const ExperimentalFeatureSettings & xpSettings) +adl_serializer::from_json(const json & _json, const ExperimentalFeatureSettings & xpSettings) { std::set keys; auto & json = getObject(_json); @@ -1316,7 +1450,7 @@ DerivationOutput::fromJSON(const nlohmann::json & _json, const ExperimentalFeatu auto methodAlgo = [&]() -> std::pair { ContentAddressMethod method = ContentAddressMethod::parse(getString(valueAt(json, "method"))); if (method == ContentAddressMethod::Raw::Text) - xpSettings.require(Xp::DynamicDerivations); + xpSettings.require(Xp::DynamicDerivations, "text-hashed derivation output in JSON"); auto hashAlgo = parseHashAlgo(getString(valueAt(json, "hashAlgo"))); return {std::move(method), std::move(hashAlgo)}; @@ -1328,15 +1462,12 @@ DerivationOutput::fromJSON(const nlohmann::json & _json, const ExperimentalFeatu }; } - else if (keys == (std::set{"method", "hashAlgo", "hash"})) { - auto [method, hashAlgo] = methodAlgo(); + else if (keys == (std::set{"method", "hash"})) { auto dof = DerivationOutput::CAFixed{ - .ca = - ContentAddress{ - .method = std::move(method), - .hash = Hash::parseNonSRIUnprefixed(getString(valueAt(json, "hash")), hashAlgo), - }, + .ca = static_cast(_json), }; + if (dof.ca.method == ContentAddressMethod::Raw::Text) + xpSettings.require(Xp::DynamicDerivations, "text-hashed derivation output in JSON"); /* We no longer produce this (denormalized) field (for the reasons described above), so we don't need to check it. */ #if 0 @@ -1373,32 +1504,34 @@ DerivationOutput::fromJSON(const nlohmann::json & _json, const ExperimentalFeatu } } -nlohmann::json Derivation::toJSON() const +void adl_serializer::to_json(json & res, const Derivation & d) { - nlohmann::json res = nlohmann::json::object(); + res = nlohmann::json::object(); - res["name"] = name; + res["name"] = d.name; - res["version"] = 3; + res["version"] = expectedJsonVersionDerivation; { nlohmann::json & outputsObj = res["outputs"]; outputsObj = nlohmann::json::object(); - for (auto & [outputName, output] : outputs) { + for (auto & [outputName, output] : d.outputs) { outputsObj[outputName] = output; } } { - auto & inputsList = res["inputSrcs"]; - inputsList = nlohmann::json ::array(); - for (auto & input : inputSrcs) - inputsList.emplace_back(input); - } + auto & inputsObj = res["inputs"]; + inputsObj = nlohmann::json::object(); - { - std::function::ChildNode &)> doInput; - doInput = [&](const auto & inputNode) { + { + auto & inputsList = inputsObj["srcs"]; + inputsList = nlohmann::json::array(); + for (auto & input : d.inputSrcs) + inputsList.emplace_back(input); + } + + auto doInput = [&](this const auto & doInput, const auto & inputNode) -> nlohmann::json { auto value = nlohmann::json::object(); value["outputs"] = inputNode.value; { @@ -1409,27 +1542,24 @@ nlohmann::json Derivation::toJSON() const } return value; }; - { - auto & inputDrvsObj = res["inputDrvs"]; - inputDrvsObj = nlohmann::json::object(); - for (auto & [inputDrv, inputNode] : inputDrvs.map) { - inputDrvsObj[inputDrv.to_string()] = doInput(inputNode); - } + + auto & inputDrvsObj = inputsObj["drvs"]; + inputDrvsObj = nlohmann::json::object(); + for (auto & [inputDrv, inputNode] : d.inputDrvs.map) { + inputDrvsObj[inputDrv.to_string()] = doInput(inputNode); } } - res["system"] = platform; - res["builder"] = builder; - res["args"] = args; - res["env"] = env; - - if (structuredAttrs) - res["structuredAttrs"] = structuredAttrs->structuredAttrs; + res["system"] = d.platform; + res["builder"] = d.builder; + res["args"] = d.args; + res["env"] = d.env; - return res; + if (d.structuredAttrs) + res["structuredAttrs"] = d.structuredAttrs->structuredAttrs; } -Derivation Derivation::fromJSON(const nlohmann::json & _json, const ExperimentalFeatureSettings & xpSettings) +Derivation adl_serializer::from_json(const json & _json, const ExperimentalFeatureSettings & xpSettings) { using nlohmann::detail::value_t; @@ -1439,13 +1569,19 @@ Derivation Derivation::fromJSON(const nlohmann::json & _json, const Experimental res.name = getString(valueAt(json, "name")); - if (valueAt(json, "version") != 3) - throw Error("Only derivation format version 3 is currently supported."); + { + auto version = getUnsigned(valueAt(json, "version")); + if (valueAt(json, "version") != expectedJsonVersionDerivation) + throw Error( + "Unsupported derivation JSON format version %d, only format version %d is currently supported.", + version, + expectedJsonVersionDerivation); + } try { auto outputs = getObject(valueAt(json, "outputs")); for (auto & [outputName, output] : outputs) { - res.outputs.insert_or_assign(outputName, DerivationOutput::fromJSON(output, xpSettings)); + res.outputs.insert_or_assign(outputName, adl_serializer::from_json(output, xpSettings)); } } catch (Error & e) { e.addTrace({}, "while reading key 'outputs'"); @@ -1453,32 +1589,39 @@ Derivation Derivation::fromJSON(const nlohmann::json & _json, const Experimental } try { - auto inputSrcs = getArray(valueAt(json, "inputSrcs")); - for (auto & input : inputSrcs) - res.inputSrcs.insert(input); - } catch (Error & e) { - e.addTrace({}, "while reading key 'inputSrcs'"); - throw; - } + auto inputsObj = getObject(valueAt(json, "inputs")); + + try { + auto inputSrcs = getArray(valueAt(inputsObj, "srcs")); + for (auto & input : inputSrcs) + res.inputSrcs.insert(input); + } catch (Error & e) { + e.addTrace({}, "while reading key 'srcs'"); + throw; + } - try { - std::function::ChildNode(const nlohmann::json &)> doInput; - doInput = [&](const auto & _json) { - auto & json = getObject(_json); - DerivedPathMap::ChildNode node; - node.value = getStringSet(valueAt(json, "outputs")); - auto drvs = getObject(valueAt(json, "dynamicOutputs")); - for (auto & [outputId, childNode] : drvs) { - xpSettings.require(Xp::DynamicDerivations); - node.childMap[outputId] = doInput(childNode); - } - return node; - }; - auto drvs = getObject(valueAt(json, "inputDrvs")); - for (auto & [inputDrvPath, inputOutputs] : drvs) - res.inputDrvs.map[StorePath{inputDrvPath}] = doInput(inputOutputs); + try { + auto doInput = [&](this const auto & doInput, const auto & _json) -> DerivedPathMap::ChildNode { + auto & json = getObject(_json); + DerivedPathMap::ChildNode node; + node.value = getStringSet(valueAt(json, "outputs")); + auto drvs = getObject(valueAt(json, "dynamicOutputs")); + for (auto & [outputId, childNode] : drvs) { + xpSettings.require( + Xp::DynamicDerivations, [&] { return fmt("dynamic output '%s' in JSON", outputId); }); + node.childMap[outputId] = doInput(childNode); + } + return node; + }; + auto drvs = getObject(valueAt(inputsObj, "drvs")); + for (auto & [inputDrvPath, inputOutputs] : drvs) + res.inputDrvs.map[StorePath{inputDrvPath}] = doInput(inputOutputs); + } catch (Error & e) { + e.addTrace({}, "while reading key 'drvs'"); + throw; + } } catch (Error & e) { - e.addTrace({}, "while reading key 'inputDrvs'"); + e.addTrace({}, "while reading key 'inputs'"); throw; } @@ -1500,30 +1643,4 @@ Derivation Derivation::fromJSON(const nlohmann::json & _json, const Experimental return res; } -} // namespace nix - -namespace nlohmann { - -using namespace nix; - -DerivationOutput adl_serializer::from_json(const json & json) -{ - return DerivationOutput::fromJSON(json); -} - -void adl_serializer::to_json(json & json, const DerivationOutput & c) -{ - json = c.toJSON(); -} - -Derivation adl_serializer::from_json(const json & json) -{ - return Derivation::fromJSON(json); -} - -void adl_serializer::to_json(json & json, const Derivation & c) -{ - json = c.toJSON(); -} - } // namespace nlohmann diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc index 2cf720b8221..251e112514e 100644 --- a/src/libstore/derived-path.cc +++ b/src/libstore/derived-path.cc @@ -85,7 +85,11 @@ void drvRequireExperiment(const SingleDerivedPath & drv, const ExperimentalFeatu [&](const SingleDerivedPath::Opaque &) { // plain drv path; no experimental features required. }, - [&](const SingleDerivedPath::Built &) { xpSettings.require(Xp::DynamicDerivations); }, + [&](const SingleDerivedPath::Built & b) { + xpSettings.require(Xp::DynamicDerivations, [&] { + return fmt("building output '%s' of '%s'", b.output, b.drvPath->getBaseStorePath().to_string()); + }); + }, }, drv.raw()); } @@ -248,20 +252,26 @@ void adl_serializer::to_json(json & json, const DerivedPath: }; } -SingleDerivedPath::Built adl_serializer::from_json(const json & json0) +SingleDerivedPath::Built +adl_serializer::from_json(const json & json0, const ExperimentalFeatureSettings & xpSettings) { auto & json = getObject(json0); + auto drvPath = make_ref(static_cast(valueAt(json, "drvPath"))); + drvRequireExperiment(*drvPath, xpSettings); return { - .drvPath = make_ref(static_cast(valueAt(json, "drvPath"))), + .drvPath = std::move(drvPath), .output = getString(valueAt(json, "output")), }; } -DerivedPath::Built adl_serializer::from_json(const json & json0) +DerivedPath::Built +adl_serializer::from_json(const json & json0, const ExperimentalFeatureSettings & xpSettings) { auto & json = getObject(json0); + auto drvPath = make_ref(static_cast(valueAt(json, "drvPath"))); + drvRequireExperiment(*drvPath, xpSettings); return { - .drvPath = make_ref(static_cast(valueAt(json, "drvPath"))), + .drvPath = std::move(drvPath), .outputs = adl_serializer::from_json(valueAt(json, "outputs")), }; } @@ -276,20 +286,21 @@ void adl_serializer::to_json(json & json, const DerivedPath & sdp) std::visit([&](const auto & buildable) { json = buildable; }, sdp.raw()); } -SingleDerivedPath adl_serializer::from_json(const json & json) +SingleDerivedPath +adl_serializer::from_json(const json & json, const ExperimentalFeatureSettings & xpSettings) { if (json.is_string()) return static_cast(json); else - return static_cast(json); + return adl_serializer::from_json(json, xpSettings); } -DerivedPath adl_serializer::from_json(const json & json) +DerivedPath adl_serializer::from_json(const json & json, const ExperimentalFeatureSettings & xpSettings) { if (json.is_string()) return static_cast(json); else - return static_cast(json); + return adl_serializer::from_json(json, xpSettings); } } // namespace nlohmann diff --git a/src/libstore/downstream-placeholder.cc b/src/libstore/downstream-placeholder.cc index b3ac1c8c42c..73ed2b74a7b 100644 --- a/src/libstore/downstream-placeholder.cc +++ b/src/libstore/downstream-placeholder.cc @@ -1,5 +1,6 @@ #include "nix/store/downstream-placeholder.hh" #include "nix/store/derivations.hh" +#include "nix/util/json-utils.hh" namespace nix { @@ -24,7 +25,8 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation( OutputNameView outputName, const ExperimentalFeatureSettings & xpSettings) { - xpSettings.require(Xp::DynamicDerivations); + xpSettings.require( + Xp::DynamicDerivations, [&] { return fmt("placeholder for unknown derivation output '%s'", outputName); }); auto compressed = compressHash(placeholder.hash, 20); auto clearText = "nix-computed-output:" + compressed.to_string(HashFormat::Nix32, false) + ":" + std::string{outputName}; @@ -48,3 +50,45 @@ DownstreamPlaceholder DownstreamPlaceholder::fromSingleDerivedPathBuilt( } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +template +DrvRef adl_serializer>::from_json(const json & json) +{ + // OutputName case: { "drvPath": "self", "output": } + if (json.type() == nlohmann::json::value_t::object) { + auto & obj = getObject(json); + if (auto * drvPath_ = get(obj, "drvPath")) { + auto & drvPath = *drvPath_; + if (drvPath.type() == nlohmann::json::value_t::string && getString(drvPath) == "self") { + return getString(valueAt(obj, "output")); + } + } + } + + // Input case + return adl_serializer::from_json(json); +} + +template +void adl_serializer>::to_json(json & json, const DrvRef & ref) +{ + std::visit( + overloaded{ + [&](const OutputName & outputName) { + json = nlohmann::json::object(); + json["drvPath"] = "self"; + json["output"] = outputName; + }, + [&](const Item & item) { json = item; }, + }, + ref); +} + +template struct adl_serializer>; +template struct adl_serializer>; + +} // namespace nlohmann diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index c739db6ba54..375ed7b2d24 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -2,7 +2,9 @@ #include "nix/util/archive.hh" #include "nix/util/callback.hh" #include "nix/util/memory-source-accessor.hh" +#include "nix/util/json-utils.hh" #include "nix/store/dummy-store-impl.hh" +#include "nix/store/realisation.hh" #include @@ -15,6 +17,16 @@ std::string DummyStoreConfig::doc() ; } +bool DummyStore::PathInfoAndContents::operator==(const PathInfoAndContents & other) const +{ + return info == other.info && contents->root == other.contents->root; +} + +bool DummyStore::operator==(const DummyStore & other) const +{ + return contents == other.contents && derivations == other.derivations && buildTrace == other.buildTrace; +} + namespace { class WholeStoreViewAccessor : public SourceAccessor @@ -136,12 +148,40 @@ struct DummyStoreImpl : DummyStore void queryPathInfoUncached( const StorePath & path, Callback> callback) noexcept override { - bool visited = contents.cvisit(path, [&](const auto & kv) { - callback(std::make_shared(StorePath{kv.first}, kv.second.info)); - }); + if (path.isDerivation()) { + if (auto accessor_ = getMemoryFSAccessor(path)) { + ref accessor = ref{std::move(accessor_)}; + /* compute path info on demand */ + auto narHash = + hashPath({accessor, CanonPath::root}, FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); + auto info = std::make_shared(path, UnkeyedValidPathInfo{*this, narHash.hash}); + info->narSize = narHash.numBytesDigested; + info->ca = ContentAddress{ + .method = ContentAddressMethod::Raw::Text, + .hash = hashString( + HashAlgorithm::SHA256, + std::get(accessor->root->raw).contents), + }; + callback(std::move(info)); + return; + } + } else { + if (contents.cvisit(path, [&](const auto & kv) { + callback(std::make_shared(StorePath{kv.first}, kv.second.info)); + })) + return; + } - if (!visited) - callback(nullptr); + callback(nullptr); + } + + /** + * Do this to avoid `queryPathInfoUncached` computing `PathInfo` + * that we don't need just to return a `bool`. + */ + bool isValidPathUncached(const StorePath & path) override + { + return path.isDerivation() ? derivations.contains(path) : Store::isValidPathUncached(path); } /** @@ -168,18 +208,25 @@ struct DummyStoreImpl : DummyStore if (checkSigs) throw Error("checking signatures is not supported for '%s' store", config->getHumanReadableURI()); - auto temp = make_ref(); - MemorySink tempSink{*temp}; + auto accessor = make_ref(); + MemorySink tempSink{*accessor}; parseDump(tempSink, source); auto path = info.path; - auto accessor = make_ref(std::move(*temp)); - contents.insert( - {path, - PathInfoAndContents{ - std::move(info), - accessor, - }}); + if (info.path.isDerivation()) { + warn("back compat supporting `addToStore` for inserting derivations in dummy store"); + writeDerivation( + parseDerivation(*this, accessor->readFile(CanonPath::root), Derivation::nameFromPath(info.path))); + return; + } + + contents.insert({ + path, + PathInfoAndContents{ + std::move(info), + accessor, + }, + }); wholeStoreView->addObject(path.to_string(), accessor); } @@ -192,6 +239,9 @@ struct DummyStoreImpl : DummyStore const StorePathSet & references = StorePathSet(), RepairFlag repair = NoRepair) override { + if (isDerivation(name)) + throw Error("Do not insert derivation into dummy store with `addToStoreFromDump`"); + if (config->readOnly) unsupported("addToStoreFromDump"); @@ -238,45 +288,89 @@ struct DummyStoreImpl : DummyStore auto path = info.path; auto accessor = make_ref(std::move(*temp)); - contents.insert( - {path, - PathInfoAndContents{ - std::move(info), - accessor, - }}); + contents.insert({ + path, + PathInfoAndContents{ + std::move(info), + accessor, + }, + }); wholeStoreView->addObject(path.to_string(), accessor); return path; } + StorePath writeDerivation(const Derivation & drv, RepairFlag repair = NoRepair) override + { + auto drvPath = ::nix::writeDerivation(*this, drv, repair, /*readonly=*/true); + + if (!derivations.contains(drvPath) || repair) { + if (config->readOnly) + unsupported("writeDerivation"); + derivations.insert({drvPath, drv}); + } + + return drvPath; + } + + Derivation readDerivation(const StorePath & drvPath) override + { + if (std::optional res = getConcurrent(derivations, drvPath)) + return *res; + else + throw Error("derivation '%s' is not valid", printStorePath(drvPath)); + } + + /** + * No such thing as an "invalid derivation" with the dummy store + */ + Derivation readInvalidDerivation(const StorePath & drvPath) override + { + return readDerivation(drvPath); + } + void registerDrvOutput(const Realisation & output) override { - unsupported("registerDrvOutput"); + buildTrace.insert_or_visit({output.id.drvHash, {{output.id.outputName, output}}}, [&](auto & kv) { + kv.second.insert_or_assign(output.id.outputName, output); + }); } - void narFromPath(const StorePath & path, Sink & sink) override + void queryRealisationUncached( + const DrvOutput & drvOutput, Callback> callback) noexcept override { - bool visited = contents.cvisit(path, [&](const auto & kv) { - const auto & [info, accessor] = kv.second; - SourcePath sourcePath(accessor); - dumpPath(sourcePath, sink, FileSerialisationMethod::NixArchive); + bool visited = false; + buildTrace.cvisit(drvOutput.drvHash, [&](const auto & kv) { + if (auto it = kv.second.find(drvOutput.outputName); it != kv.second.end()) { + visited = true; + callback(std::make_shared(it->second)); + } }); if (!visited) - throw Error("path '%s' is not valid", printStorePath(path)); + callback(nullptr); } - void - queryRealisationUncached(const DrvOutput &, Callback> callback) noexcept override + std::shared_ptr getMemoryFSAccessor(const StorePath & path, bool requireValidPath = true) { - callback(nullptr); + std::shared_ptr res; + if (path.isDerivation()) + derivations.cvisit(path, [&](const auto & kv) { + /* compute path info on demand */ + auto res2 = make_ref(); + res2->root = MemorySourceAccessor::File::Regular{ + .contents = kv.second.unparse(*this, false), + }; + res = std::move(res2).get_ptr(); + }); + else + contents.cvisit(path, [&](const auto & kv) { res = kv.second.contents.get_ptr(); }); + return res; } - std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath) override + std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath = true) override { - std::shared_ptr res; - contents.cvisit(path, [&](const auto & kv) { res = kv.second.contents.get_ptr(); }); - return res; + return getMemoryFSAccessor(path, requireValidPath); } ref getFSAccessor(bool requireValidPath) override @@ -293,3 +387,100 @@ ref DummyStore::Config::openDummyStore() const static RegisterStoreImplementation regDummyStore; } // namespace nix + +namespace nlohmann { + +using namespace nix; + +DummyStore::PathInfoAndContents adl_serializer::from_json(const json & json) +{ + auto & obj = getObject(json); + return DummyStore::PathInfoAndContents{ + .info = valueAt(obj, "info"), + .contents = make_ref(valueAt(obj, "contents")), + }; +} + +void adl_serializer::to_json(json & json, const DummyStore::PathInfoAndContents & val) +{ + json = { + {"info", val.info}, + {"contents", *val.contents}, + }; +} + +ref adl_serializer>::from_json(const json & json) +{ + auto & obj = getObject(json); + auto cfg = make_ref(DummyStore::Config::Params{}); + const_cast(cfg->storeDir_).set(getString(valueAt(obj, "store"))); + cfg->readOnly = true; + return cfg; +} + +void adl_serializer::to_json(json & json, const DummyStoreConfig & val) +{ + json = { + {"store", val.storeDir}, + }; +} + +ref adl_serializer>::from_json(const json & json) +{ + auto & obj = getObject(json); + ref res = adl_serializer>::from_json(valueAt(obj, "config"))->openDummyStore(); + for (auto & [k, v] : getObject(valueAt(obj, "contents"))) + res->contents.insert({StorePath{k}, v}); + for (auto & [k, v] : getObject(valueAt(obj, "derivations"))) + res->derivations.insert({StorePath{k}, v}); + for (auto & [k0, v] : getObject(valueAt(obj, "buildTrace"))) { + for (auto & [k1, v2] : getObject(v)) { + UnkeyedRealisation realisation = v2; + res->buildTrace.insert_or_visit( + { + Hash::parseExplicitFormatUnprefixed(k0, HashAlgorithm::SHA256, HashFormat::Base64), + {{k1, realisation}}, + }, + [&](auto & kv) { kv.second.insert_or_assign(k1, realisation); }); + } + } + return res; +} + +void adl_serializer::to_json(json & json, const DummyStore & val) +{ + json = { + {"config", *val.config}, + {"contents", + [&] { + auto obj = json::object(); + val.contents.cvisit_all([&](const auto & kv) { + auto & [k, v] = kv; + obj[k.to_string()] = v; + }); + return obj; + }()}, + {"derivations", + [&] { + auto obj = json::object(); + val.derivations.cvisit_all([&](const auto & kv) { + auto & [k, v] = kv; + obj[k.to_string()] = v; + }); + return obj; + }()}, + {"buildTrace", + [&] { + auto obj = json::object(); + val.buildTrace.cvisit_all([&](const auto & kv) { + auto & [k, v] = kv; + auto & obj2 = obj[k.to_string(HashFormat::Base64, false)] = json::object(); + for (auto & [k2, v2] : kv.second) + obj2[k2] = v2; + }); + return obj; + }()}, + }; +} + +} // namespace nlohmann diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 17491055f2d..7b6193c657d 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -113,7 +113,7 @@ StorePaths importPaths(Store & store, Source & source, CheckSigsFlag checkSigs) if (!store.isValidPath(path)) { auto narHash = hashString(HashAlgorithm::SHA256, saved.s); - ValidPathInfo info{path, narHash}; + ValidPathInfo info{path, {store, narHash}}; if (deriver != "") info.deriver = store.parseStorePath(deriver); info.references = references; diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 805213f0abe..7be3389e073 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -2,15 +2,16 @@ #include "nix/store/globals.hh" #include "nix/util/config-global.hh" #include "nix/store/store-api.hh" -#include "nix/store/s3.hh" #include "nix/util/compression.hh" #include "nix/util/finally.hh" #include "nix/util/callback.hh" #include "nix/util/signals.hh" #include "store-config-private.hh" -#if NIX_WITH_S3_SUPPORT -# include +#include "nix/store/s3-url.hh" +#include +#if NIX_WITH_AWS_AUTH +# include "nix/store/aws-creds.hh" #endif #ifdef __linux__ @@ -47,7 +48,7 @@ struct curlFileTransfer : public FileTransfer std::random_device rd; std::mt19937 mt19937; - struct TransferItem : public std::enable_shared_from_this + struct TransferItem : public std::enable_shared_from_this, public FileTransfer::Item { curlFileTransfer & fileTransfer; FileTransferRequest request; @@ -59,6 +60,7 @@ struct curlFileTransfer : public FileTransfer // buffer to accompany the `req` above char errbuf[CURL_ERROR_SIZE]; bool active = false; // whether the handle has been added to the multi object + bool paused = false; // whether the request has been paused previously std::string statusMsg; unsigned int attempt = 0; @@ -99,7 +101,7 @@ struct curlFileTransfer : public FileTransfer , act(*logger, lvlTalkative, actFileTransfer, - fmt("%sing '%s'", request.verb(), request.uri), + fmt("%s '%s'", request.verb(/*continuous=*/true), request.uri), {request.uri.to_string()}, request.parentAct) , callback(std::move(callback)) @@ -115,7 +117,13 @@ struct curlFileTransfer : public FileTransfer successful response. */ if (successfulStatuses.count(httpStatus)) { writtenToSink += data.size(); - this->request.dataCallback(data); + PauseTransfer needsPause = this->request.dataCallback(data); + if (needsPause == PauseTransfer::Yes) { + /* Smuggle the boolean flag into writeCallback. Note that + the finalSink might get called multiple times if there's + decompression going on. */ + paused = true; + } } } else this->result.data.append(data); @@ -144,21 +152,30 @@ struct curlFileTransfer : public FileTransfer curl_slist_free_all(requestHeaders); try { if (!done) - fail(FileTransferError(Interrupted, {}, "download of '%s' was interrupted", request.uri)); + fail(FileTransferError( + Interrupted, {}, "%s of '%s' was interrupted", Uncolored(request.noun()), request.uri)); } catch (...) { ignoreExceptionInDestructor(); } } - void failEx(std::exception_ptr ex) + void failEx(std::exception_ptr ex) noexcept { assert(!done); done = true; + try { + std::rethrow_exception(ex); + } catch (nix::Error & e) { + /* Add more context to the error message. */ + e.addTrace({}, "during %s of '%s'", Uncolored(request.noun()), request.uri.to_string()); + } catch (...) { + /* Can't add more context to the error. */ + } callback.rethrow(ex); } template - void fail(T && e) + void fail(T && e) noexcept { failEx(std::make_exception_ptr(std::forward(e))); } @@ -167,32 +184,38 @@ struct curlFileTransfer : public FileTransfer std::shared_ptr decompressionSink; std::optional errorSink; - std::exception_ptr writeException; + std::exception_ptr callbackException; - size_t writeCallback(void * contents, size_t size, size_t nmemb) - { - try { - size_t realSize = size * nmemb; - result.bodySize += realSize; - - if (!decompressionSink) { - decompressionSink = makeDecompressionSink(encoding, finalSink); - if (!successfulStatuses.count(getHTTPStatus())) { - // In this case we want to construct a TeeSink, to keep - // the response around (which we figure won't be big - // like an actual download should be) to improve error - // messages. - errorSink = StringSink{}; - } + size_t writeCallback(void * contents, size_t size, size_t nmemb) noexcept + try { + size_t realSize = size * nmemb; + result.bodySize += realSize; + + if (!decompressionSink) { + decompressionSink = makeDecompressionSink(encoding, finalSink); + if (!successfulStatuses.count(getHTTPStatus())) { + // In this case we want to construct a TeeSink, to keep + // the response around (which we figure won't be big + // like an actual download should be) to improve error + // messages. + errorSink = StringSink{}; } + } - (*decompressionSink)({(char *) contents, realSize}); - - return realSize; - } catch (...) { - writeException = std::current_exception(); - return 0; + (*decompressionSink)({(char *) contents, realSize}); + if (paused) { + /* The callback has signaled that the transfer needs to be + paused. Already consumed data won't be returned twice unlike + when returning CURL_WRITEFUNC_PAUSE. + https://curl-library.cool.haxx.narkive.com/larE1cRA/curl-easy-pause-documentation-question + */ + curl_easy_pause(req, CURLPAUSE_RECV); } + + return realSize; + } catch (...) { + callbackException = std::current_exception(); + return 0; } static size_t writeCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp) @@ -208,8 +231,8 @@ struct curlFileTransfer : public FileTransfer result.urls.push_back(effectiveUriCStr); } - size_t headerCallback(void * contents, size_t size, size_t nmemb) - { + size_t headerCallback(void * contents, size_t size, size_t nmemb) noexcept + try { size_t realSize = size * nmemb; std::string line((char *) contents, realSize); printMsg(lvlVomit, "got header for '%s': %s", request.uri, trim(line)); @@ -262,6 +285,15 @@ struct curlFileTransfer : public FileTransfer } } return realSize; + } catch (...) { +#if LIBCURL_VERSION_NUM >= 0x075700 + /* https://curl.se/libcurl/c/CURLOPT_HEADERFUNCTION.html: + You can also abort the transfer by returning CURL_WRITEFUNC_ERROR. */ + callbackException = std::current_exception(); + return CURL_WRITEFUNC_ERROR; +#else + return realSize; +#endif } static size_t headerCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp) @@ -269,14 +301,17 @@ struct curlFileTransfer : public FileTransfer return ((TransferItem *) userp)->headerCallback(contents, size, nmemb); } - int progressCallback(curl_off_t dltotal, curl_off_t dlnow) - { - try { - act.progress(dlnow, dltotal); - } catch (nix::Interrupted &) { - assert(getInterrupted()); - } + int progressCallback(curl_off_t dltotal, curl_off_t dlnow) noexcept + try { + act.progress(dlnow, dltotal); return getInterrupted(); + } catch (nix::Interrupted &) { + assert(getInterrupted()); + return 1; + } catch (...) { + /* Something unexpected has happened like logger throwing an exception. */ + callbackException = std::current_exception(); + return 1; } static int progressCallbackWrapper( @@ -287,27 +322,28 @@ struct curlFileTransfer : public FileTransfer return item.progressCallback(isUpload ? ultotal : dltotal, isUpload ? ulnow : dlnow); } - static int debugCallback(CURL * handle, curl_infotype type, char * data, size_t size, void * userptr) - { + static int debugCallback(CURL * handle, curl_infotype type, char * data, size_t size, void * userptr) noexcept + try { if (type == CURLINFO_TEXT) vomit("curl: %s", chomp(std::string(data, size))); return 0; + } catch (...) { + /* Swallow the exception. Nothing left to do. */ + return 0; } - size_t readOffset = 0; - - size_t readCallback(char * buffer, size_t size, size_t nitems) - { - if (readOffset == request.data->length()) - return 0; - auto count = std::min(size * nitems, request.data->length() - readOffset); - assert(count); - memcpy(buffer, request.data->data() + readOffset, count); - readOffset += count; - return count; + size_t readCallback(char * buffer, size_t size, size_t nitems) noexcept + try { + auto data = request.data; + return data->source->read(buffer, nitems * size); + } catch (EndOfFile &) { + return 0; + } catch (...) { + callbackException = std::current_exception(); + return CURL_READFUNC_ABORT; } - static size_t readCallbackWrapper(char * buffer, size_t size, size_t nitems, void * userp) + static size_t readCallbackWrapper(char * buffer, size_t size, size_t nitems, void * userp) noexcept { return ((TransferItem *) userp)->readCallback(buffer, size, nitems); } @@ -321,23 +357,38 @@ struct curlFileTransfer : public FileTransfer } #endif - size_t seekCallback(curl_off_t offset, int origin) - { + size_t seekCallback(curl_off_t offset, int origin) noexcept + try { + auto source = request.data->source; if (origin == SEEK_SET) { - readOffset = offset; + source->restart(); + source->skip(offset); } else if (origin == SEEK_CUR) { - readOffset += offset; + source->skip(offset); } else if (origin == SEEK_END) { - readOffset = request.data->length() + offset; + NullSink sink{}; + source->drainInto(sink); } return CURL_SEEKFUNC_OK; + } catch (...) { + callbackException = std::current_exception(); + return CURL_SEEKFUNC_FAIL; } - static size_t seekCallbackWrapper(void * clientp, curl_off_t offset, int origin) + static size_t seekCallbackWrapper(void * clientp, curl_off_t offset, int origin) noexcept { return ((TransferItem *) clientp)->seekCallback(offset, origin); } + void unpause() + { + /* Unpausing an already unpaused transfer is a no-op. */ + if (paused) { + curl_easy_pause(req, CURLPAUSE_CONT); + paused = false; + } + } + void init() { if (!req) @@ -383,28 +434,30 @@ struct curlFileTransfer : public FileTransfer if (settings.downloadSpeed.get() > 0) curl_easy_setopt(req, CURLOPT_MAX_RECV_SPEED_LARGE, (curl_off_t) (settings.downloadSpeed.get() * 1024)); - if (request.head) + if (request.method == HttpMethod::Head) curl_easy_setopt(req, CURLOPT_NOBODY, 1); + if (request.method == HttpMethod::Delete) + curl_easy_setopt(req, CURLOPT_CUSTOMREQUEST, "DELETE"); + if (request.data) { - if (request.post) + if (request.method == HttpMethod::Post) { curl_easy_setopt(req, CURLOPT_POST, 1L); - else + curl_easy_setopt(req, CURLOPT_POSTFIELDSIZE_LARGE, (curl_off_t) request.data->sizeHint); + } else if (request.method == HttpMethod::Put) { curl_easy_setopt(req, CURLOPT_UPLOAD, 1L); + curl_easy_setopt(req, CURLOPT_INFILESIZE_LARGE, (curl_off_t) request.data->sizeHint); + } else { + unreachable(); + } curl_easy_setopt(req, CURLOPT_READFUNCTION, readCallbackWrapper); curl_easy_setopt(req, CURLOPT_READDATA, this); - curl_easy_setopt(req, CURLOPT_INFILESIZE_LARGE, (curl_off_t) request.data->length()); curl_easy_setopt(req, CURLOPT_SEEKFUNCTION, seekCallbackWrapper); curl_easy_setopt(req, CURLOPT_SEEKDATA, this); } - if (request.verifyTLS) { - if (settings.caFile != "") - curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.get().c_str()); - } else { - curl_easy_setopt(req, CURLOPT_SSL_VERIFYPEER, 0); - curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0); - } + if (settings.caFile != "") + curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.get().c_str()); #if !defined(_WIN32) && LIBCURL_VERSION_NUM >= 0x071000 curl_easy_setopt(req, CURLOPT_SOCKOPTFUNCTION, cloexec_callback); @@ -426,6 +479,24 @@ struct curlFileTransfer : public FileTransfer curl_easy_setopt(req, CURLOPT_ERRORBUFFER, errbuf); errbuf[0] = 0; + // Set up username/password authentication if provided + if (request.usernameAuth) { + curl_easy_setopt(req, CURLOPT_USERNAME, request.usernameAuth->username.c_str()); + if (request.usernameAuth->password) { + curl_easy_setopt(req, CURLOPT_PASSWORD, request.usernameAuth->password->c_str()); + } + } + +#if NIX_WITH_AWS_AUTH + // Set up AWS SigV4 signing if this is an S3 request + // Note: AWS SigV4 support guaranteed available (curl >= 7.75.0 checked at build time) + // The username/password (access key ID and secret key) are set via the general + // usernameAuth mechanism above. + if (request.awsSigV4Provider) { + curl_easy_setopt(req, CURLOPT_AWS_SIGV4, request.awsSigV4Provider->c_str()); + } +#endif + result.data.clear(); result.bodySize = 0; } @@ -440,7 +511,7 @@ struct curlFileTransfer : public FileTransfer debug( "finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes, duration = %.2f s", - request.verb(), + request.noun(), request.uri, code, httpStatus, @@ -453,7 +524,7 @@ struct curlFileTransfer : public FileTransfer try { decompressionSink->finish(); } catch (...) { - writeException = std::current_exception(); + callbackException = std::current_exception(); } } @@ -462,8 +533,8 @@ struct curlFileTransfer : public FileTransfer httpStatus = 304; } - if (writeException) - failEx(writeException); + if (callbackException) + failEx(callbackException); else if (code == CURLE_OK && successfulStatuses.count(httpStatus)) { result.cached = httpStatus == 304; @@ -540,7 +611,7 @@ struct curlFileTransfer : public FileTransfer Interrupted, std::move(response), "%s of '%s' was interrupted", - request.verb(), + request.noun(), request.uri) : httpStatus != 0 ? FileTransferError( @@ -577,7 +648,14 @@ struct curlFileTransfer : public FileTransfer decompressionSink.reset(); errorSink.reset(); embargo = std::chrono::steady_clock::now() + std::chrono::milliseconds(ms); - fileTransfer.enqueueItem(shared_from_this()); + try { + fileTransfer.enqueueItem(ref{shared_from_this()}); + } catch (const nix::Error & e) { + // If enqueue fails (e.g., during shutdown), fail the transfer properly + // instead of letting the exception propagate, which would leave done=false + // and cause the destructor to attempt a second callback invocation + fail(std::move(exc)); + } } else fail(std::move(exc)); } @@ -588,24 +666,24 @@ struct curlFileTransfer : public FileTransfer { struct EmbargoComparator { - bool operator()(const std::shared_ptr & i1, const std::shared_ptr & i2) + bool operator()(const ref & i1, const ref & i2) { return i1->embargo > i2->embargo; } }; - std:: - priority_queue, std::vector>, EmbargoComparator> - incoming; + std::priority_queue, std::vector>, EmbargoComparator> incoming; + std::vector> unpause; private: bool quitting = false; public: void quit() { quitting = true; - /* We wil not be processing any more incoming requests */ + /* We will not be processing any more incoming requests */ while (!incoming.empty()) incoming.pop(); + unpause.clear(); } bool isQuitting() @@ -768,12 +846,23 @@ struct curlFileTransfer : public FileTransfer } for (auto & item : incoming) { - debug("starting %s of %s", item->request.verb(), item->request.uri); + debug("starting %s of %s", item->request.noun(), item->request.uri); item->init(); curl_multi_add_handle(curlm, item->req); item->active = true; items[item->req] = item; } + + /* NOTE: Unpausing may invoke callbacks to flush all buffers. */ + auto unpause = [&]() { + auto state(state_.lock()); + auto res = state->unpause; + state->unpause.clear(); + return res; + }(); + + for (auto & item : unpause) + item->unpause(); } debug("download thread shutting down"); @@ -798,9 +887,10 @@ struct curlFileTransfer : public FileTransfer } } - void enqueueItem(std::shared_ptr item) + ItemHandle enqueueItem(ref item) { - if (item->request.data && item->request.uri.scheme() != "http" && item->request.uri.scheme() != "https") + if (item->request.data && item->request.uri.scheme() != "http" && item->request.uri.scheme() != "https" + && item->request.uri.scheme() != "s3") throw nix::Error("uploading to '%s' is not supported", item->request.uri.to_string()); { @@ -812,43 +902,34 @@ struct curlFileTransfer : public FileTransfer #ifndef _WIN32 // TODO need graceful async exit support on Windows? writeFull(wakeupPipe.writeSide.get(), " "); #endif + + return ItemHandle(static_cast(*item)); } - void enqueueFileTransfer(const FileTransferRequest & request, Callback callback) override + ItemHandle enqueueFileTransfer(const FileTransferRequest & request, Callback callback) override { - /* Ugly hack to support s3:// URIs. */ + /* Handle s3:// URIs by converting to HTTPS and optionally adding auth */ if (request.uri.scheme() == "s3") { - // FIXME: do this on a worker thread - try { -#if NIX_WITH_S3_SUPPORT - auto parsed = ParsedS3URL::parse(request.uri.parsed()); - - std::string profile = parsed.profile.value_or(""); - std::string region = parsed.region.value_or(Aws::Region::US_EAST_1); - std::string scheme = parsed.scheme.value_or(""); - std::string endpoint = parsed.getEncodedEndpoint().value_or(""); - - S3Helper s3Helper(profile, region, scheme, endpoint); - - // FIXME: implement ETag - auto s3Res = s3Helper.getObject(parsed.bucket, encodeUrlPath(parsed.key)); - FileTransferResult res; - if (!s3Res.data) - throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri); - res.data = std::move(*s3Res.data); - res.urls.push_back(request.uri.to_string()); - callback(std::move(res)); -#else - throw nix::Error( - "cannot download '%s' because Nix is not built with S3 support", request.uri.to_string()); -#endif - } catch (...) { - callback.rethrow(); - } - return; + auto modifiedRequest = request; + modifiedRequest.setupForS3(); + return enqueueItem(make_ref(*this, std::move(modifiedRequest), std::move(callback))); } - enqueueItem(std::make_shared(*this, request, std::move(callback))); + return enqueueItem(make_ref(*this, request, std::move(callback))); + } + + void unpauseTransfer(ref item) + { + auto state(state_.lock()); + state->unpause.push_back(std::move(item)); +#ifndef _WIN32 // TODO need graceful async exit support on Windows? + writeFull(wakeupPipe.writeSide.get(), " "); +#endif + } + + void unpauseTransfer(ItemHandle handle) override + { + unpauseTransfer(ref{static_cast(handle.item.get()).shared_from_this()}); } }; @@ -877,6 +958,36 @@ std::shared_ptr resetFileTransfer() return prev; } +void FileTransferRequest::setupForS3() +{ + auto parsedS3 = ParsedS3URL::parse(uri.parsed()); + // Update the request URI to use HTTPS (works without AWS SDK) + uri = parsedS3.toHttpsUrl(); + +#if NIX_WITH_AWS_AUTH + // Auth-specific code only compiled when AWS support is available + awsSigV4Provider = "aws:amz:" + parsedS3.region.value_or("us-east-1") + ":s3"; + + // check if the request already has pre-resolved credentials + std::optional sessionToken; + if (usernameAuth) { + debug("Using pre-resolved AWS credentials from parent process"); + sessionToken = preResolvedAwsSessionToken; + } else if (auto creds = getAwsCredentialsProvider()->maybeGetCredentials(parsedS3)) { + usernameAuth = UsernameAuth{ + .username = creds->accessKeyId, + .password = creds->secretAccessKey, + }; + sessionToken = creds->sessionToken; + } + if (sessionToken) + headers.emplace_back("x-amz-security-token", *sessionToken); +#else + // When built without AWS support, just try as public bucket + debug("S3 request without authentication (built without AWS support)"); +#endif +} + std::future FileTransfer::enqueueFileTransfer(const FileTransferRequest & request) { auto promise = std::make_shared>(); @@ -901,6 +1012,11 @@ FileTransferResult FileTransfer::upload(const FileTransferRequest & request) return enqueueFileTransfer(request).get(); } +FileTransferResult FileTransfer::deleteResource(const FileTransferRequest & request) +{ + return enqueueFileTransfer(request).get(); +} + void FileTransfer::download( FileTransferRequest && request, Sink & sink, std::function resultCallback) { @@ -915,6 +1031,7 @@ void FileTransfer::download( struct State { bool quit = false; + bool paused = false; std::exception_ptr exc; std::string data; std::condition_variable avail, request; @@ -930,31 +1047,38 @@ void FileTransfer::download( state->request.notify_one(); }); - request.dataCallback = [_state](std::string_view data) { + request.dataCallback = [_state, uri = request.uri.to_string()](std::string_view data) -> PauseTransfer { auto state(_state->lock()); if (state->quit) - return; - - /* If the buffer is full, then go to sleep until the calling - thread wakes us up (i.e. when it has removed data from the - buffer). We don't wait forever to prevent stalling the - download thread. (Hopefully sleeping will throttle the - sender.) */ - if (state->data.size() > fileTransferSettings.downloadBufferSize) { - debug("download buffer is full; going to sleep"); - static bool haveWarned = false; - warnOnce(haveWarned, "download buffer is full; consider increasing the 'download-buffer-size' setting"); - state.wait_for(state->request, std::chrono::seconds(10)); - } + return PauseTransfer::No; /* Append data to the buffer and wake up the calling thread. */ state->data.append(data); state->avail.notify_one(); + + if (state->data.size() <= fileTransferSettings.downloadBufferSize) + return PauseTransfer::No; + + /* dataCallback gets called multiple times by an intermediate sink. Only + issue the debug message the first time around. */ + if (!state->paused) + debug( + "pausing transfer for '%s': download buffer is full (%d > %d)", + uri, + state->data.size(), + fileTransferSettings.downloadBufferSize); + + state->paused = true; + + /* Technically the buffer might become larger than + downloadBufferSize, but with sinks there's no way to avoid + consuming data. */ + return PauseTransfer::Yes; }; - enqueueFileTransfer( + auto handle = enqueueFileTransfer( request, {[_state, resultCallback{std::move(resultCallback)}](std::future fut) { auto state(_state->lock()); state->quit = true; @@ -987,6 +1111,10 @@ void FileTransfer::download( return; } + if (state->paused) { + unpauseTransfer(handle); + state->paused = false; + } state.wait(state->avail); if (state->data.empty()) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index f200926e842..37f148cbc43 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -5,6 +5,7 @@ #include "nix/util/finally.hh" #include "nix/util/unix-domain-socket.hh" #include "nix/util/signals.hh" +#include "nix/util/util.hh" #include "nix/store/posix-fs-canonicalise.hh" #include "store-config-private.hh" @@ -911,9 +912,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) #endif ; - printInfo( - "note: currently hard linking saves %.2f MiB", - ((unsharedSize - actualSize - overhead) / (1024.0 * 1024.0))); + printInfo("note: hard linking is currently saving %s", renderSize(unsharedSize - actualSize - overhead)); } /* While we're at it, vacuum the database. */ diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index e2873a87bec..72fea31775e 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -116,7 +116,7 @@ void loadConfFile(AbstractConfig & config) } }; - applyConfigFile(settings.nixConfDir + "/nix.conf"); + applyConfigFile((settings.nixConfDir / "nix.conf").string()); /* We only want to send overrides to the daemon, i.e. stuff from ~/.nix/nix.conf or the command line. */ @@ -145,7 +145,7 @@ std::vector getUserConfigFiles() std::vector files; auto dirs = getConfigDirs(); for (auto & dir : dirs) { - files.insert(files.end(), dir + "/nix.conf"); + files.insert(files.end(), (dir / "nix.conf").string()); } return files; } @@ -258,6 +258,15 @@ Path Settings::getDefaultSSLCertFile() return ""; } +const ExternalBuilder * Settings::findExternalDerivationBuilderIfSupported(const Derivation & drv) +{ + if (auto it = std::ranges::find_if( + externalBuilders.get(), [&](const auto & handler) { return handler.systems.contains(drv.platform); }); + it != externalBuilders.get().end()) + return &*it; + return nullptr; +} + std::string nixVersion = PACKAGE_VERSION; const std::string determinateNixVersion = DETERMINATE_NIX_VERSION; @@ -381,8 +390,6 @@ unsigned int MaxBuildJobsSetting::parse(const std::string & str) const } } -NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE(Settings::ExternalBuilder, systems, program, args); - template<> Settings::ExternalBuilders BaseSetting::parse(const std::string & str) const { @@ -481,7 +488,7 @@ void initLibStore(bool loadConfig) /* On macOS, don't use the per-session TMPDIR (as set e.g. by sshd). This breaks build users because they don't have access to the TMPDIR, in particular in ‘nix-store --serve’. */ - if (hasPrefix(defaultTempDir(), "/var/folders/")) + if (hasPrefix(defaultTempDir().string(), "/var/folders/")) unsetenv("TMPDIR"); #endif diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 5d4fba16331..ef6ae92a44d 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -4,7 +4,6 @@ #include "nix/store/nar-info-disk-cache.hh" #include "nix/util/callback.hh" #include "nix/store/store-registration.hh" -#include "nix/util/compression.hh" namespace nix { @@ -51,212 +50,221 @@ std::string HttpBinaryCacheStoreConfig::doc() ; } -class HttpBinaryCacheStore : public virtual BinaryCacheStore +HttpBinaryCacheStore::HttpBinaryCacheStore(ref config) + : Store{*config} // TODO it will actually mutate the configuration + , BinaryCacheStore{*config} + , config{config} { - struct State - { - bool enabled = true; - std::chrono::steady_clock::time_point disabledUntil; - }; - - Sync _state; + diskCache = getNarInfoDiskCache(); +} -public: +void HttpBinaryCacheStore::init() +{ + // FIXME: do this lazily? + // For consistent cache key handling, use the reference without parameters + // This matches what's used in Store::queryPathInfo() lookups + auto cacheKey = config->getReference().render(/*withParams=*/false); + + if (auto cacheInfo = diskCache->upToDateCacheExists(cacheKey)) { + config->wantMassQuery.setDefault(cacheInfo->wantMassQuery); + config->priority.setDefault(cacheInfo->priority); + } else { + try { + BinaryCacheStore::init(); + } catch (UploadToHTTP &) { + throw Error("'%s' does not appear to be a binary cache", config->cacheUri.to_string()); + } + diskCache->createCache(cacheKey, config->storeDir, config->wantMassQuery, config->priority); + } +} - using Config = HttpBinaryCacheStoreConfig; +std::optional HttpBinaryCacheStore::getCompressionMethod(const std::string & path) +{ + if (hasSuffix(path, ".narinfo") && !config->narinfoCompression.get().empty()) + return config->narinfoCompression; + else if (hasSuffix(path, ".ls") && !config->lsCompression.get().empty()) + return config->lsCompression; + else if (hasPrefix(path, "log/") && !config->logCompression.get().empty()) + return config->logCompression; + else + return std::nullopt; +} - ref config; +void HttpBinaryCacheStore::maybeDisable() +{ + auto state(_state.lock()); + if (state->enabled && settings.tryFallback) { + int t = 60; + printError("disabling binary cache '%s' for %s seconds", config->getHumanReadableURI(), t); + state->enabled = false; + state->disabledUntil = std::chrono::steady_clock::now() + std::chrono::seconds(t); + } +} - HttpBinaryCacheStore(ref config) - : Store{*config} // TODO it will actually mutate the configuration - , BinaryCacheStore{*config} - , config{config} - { - diskCache = getNarInfoDiskCache(); +void HttpBinaryCacheStore::checkEnabled() +{ + auto state(_state.lock()); + if (state->enabled) + return; + if (std::chrono::steady_clock::now() > state->disabledUntil) { + state->enabled = true; + debug("re-enabling binary cache '%s'", config->getHumanReadableURI()); + return; } + throw SubstituterDisabled("substituter '%s' is disabled", config->getHumanReadableURI()); +} - void init() override - { - // FIXME: do this lazily? - if (auto cacheInfo = diskCache->upToDateCacheExists(config->cacheUri.to_string())) { - config->wantMassQuery.setDefault(cacheInfo->wantMassQuery); - config->priority.setDefault(cacheInfo->priority); - } else { - try { - BinaryCacheStore::init(); - } catch (UploadToHTTP &) { - throw Error("'%s' does not appear to be a binary cache", config->cacheUri.to_string()); - } - diskCache->createCache( - config->cacheUri.to_string(), config->storeDir, config->wantMassQuery, config->priority); - } +bool HttpBinaryCacheStore::fileExists(const std::string & path) +{ + checkEnabled(); + + try { + FileTransferRequest request(makeRequest(path)); + request.method = HttpMethod::Head; + getFileTransfer()->download(request); + return true; + } catch (FileTransferError & e) { + /* S3 buckets return 403 if a file doesn't exist and the + bucket is unlistable, so treat 403 as 404. */ + if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) + return false; + maybeDisable(); + throw; } +} -protected: +void HttpBinaryCacheStore::upload( + std::string_view path, + RestartableSource & source, + uint64_t sizeHint, + std::string_view mimeType, + std::optional headers) +{ + auto req = makeRequest(path); + req.method = HttpMethod::Put; - void maybeDisable() - { - auto state(_state.lock()); - if (state->enabled && settings.tryFallback) { - int t = 60; - printError("disabling binary cache '%s' for %s seconds", config->getHumanReadableURI(), t); - state->enabled = false; - state->disabledUntil = std::chrono::steady_clock::now() + std::chrono::seconds(t); - } + if (headers) { + req.headers.reserve(req.headers.size() + headers->size()); + std::ranges::move(std::move(*headers), std::back_inserter(req.headers)); } - void checkEnabled() - { - auto state(_state.lock()); - if (state->enabled) - return; - if (std::chrono::steady_clock::now() > state->disabledUntil) { - state->enabled = true; - debug("re-enabling binary cache '%s'", config->getHumanReadableURI()); - return; - } - throw SubstituterDisabled("substituter '%s' is disabled", config->getHumanReadableURI()); - } + req.data = {sizeHint, source}; + req.mimeType = mimeType; - bool fileExists(const std::string & path) override - { - checkEnabled(); + getFileTransfer()->upload(req); +} - try { - FileTransferRequest request(makeRequest(path)); - request.head = true; - getFileTransfer()->download(request); - return true; - } catch (FileTransferError & e) { - /* S3 buckets return 403 if a file doesn't exist and the - bucket is unlistable, so treat 403 as 404. */ - if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - return false; - maybeDisable(); - throw; +void HttpBinaryCacheStore::upsertFile( + const std::string & path, RestartableSource & source, const std::string & mimeType, uint64_t sizeHint) +{ + try { + if (auto compressionMethod = getCompressionMethod(path)) { + CompressedSource compressed(source, *compressionMethod); + Headers headers = {{"Content-Encoding", *compressionMethod}}; + upload(path, compressed, compressed.size(), mimeType, std::move(headers)); + } else { + upload(path, source, sizeHint, mimeType, std::nullopt); } + } catch (FileTransferError & e) { + UploadToHTTP err(e.message()); + err.addTrace({}, "while uploading to HTTP binary cache at '%s'", config->cacheUri.to_string()); + throw err; } +} - void upsertFile( - const std::string & path, - std::shared_ptr> istream, - const std::string & mimeType) override - { - auto req = makeRequest(path); - - auto data = StreamToSourceAdapter(istream).drain(); - - // Determine compression method based on file type - std::string compressionMethod; - if (hasSuffix(path, ".narinfo")) - compressionMethod = config->narinfoCompression; - else if (hasSuffix(path, ".ls")) - compressionMethod = config->lsCompression; - else if (hasPrefix(path, "log/")) - compressionMethod = config->logCompression; - - // Apply compression if configured - if (!compressionMethod.empty()) { - data = compress(compressionMethod, data); - req.headers.emplace_back("Content-Encoding", compressionMethod); - } +FileTransferRequest HttpBinaryCacheStore::makeRequest(std::string_view path) +{ + /* Otherwise the last path fragment will get discarded. */ + auto cacheUriWithTrailingSlash = config->cacheUri; + if (!cacheUriWithTrailingSlash.path.empty()) + cacheUriWithTrailingSlash.path.push_back(""); + + /* path is not a path, but a full relative or absolute + URL, e.g. we've seen in the wild NARINFO files have a URL + field which is + `nar/15f99rdaf26k39knmzry4xd0d97wp6yfpnfk1z9avakis7ipb9yg.nar?hash=zphkqn2wg8mnvbkixnl2aadkbn0rcnfj` + (note the query param) and that gets passed here. */ + auto result = parseURLRelative(path, cacheUriWithTrailingSlash); + + /* For S3 URLs, preserve query parameters from the base URL when the + relative path doesn't have its own query parameters. This is needed + to preserve S3-specific parameters like endpoint and region. */ + if (config->cacheUri.scheme == "s3" && result.query.empty()) { + result.query = config->cacheUri.query; + } - req.data = std::move(data); - req.mimeType = mimeType; + return FileTransferRequest(result); +} - try { - getFileTransfer()->upload(req); - } catch (FileTransferError & e) { - throw UploadToHTTP( - "while uploading to HTTP binary cache at '%s': %s", config->cacheUri.to_string(), e.msg()); - } +void HttpBinaryCacheStore::getFile(const std::string & path, Sink & sink) +{ + checkEnabled(); + auto request(makeRequest(path)); + try { + getFileTransfer()->download(std::move(request), sink); + } catch (FileTransferError & e) { + if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) + throw NoSuchBinaryCacheFile( + "file '%s' does not exist in binary cache '%s'", path, config->getHumanReadableURI()); + maybeDisable(); + throw; } +} - FileTransferRequest makeRequest(const std::string & path) - { - /* Otherwise the last path fragment will get discarded. */ - auto cacheUriWithTrailingSlash = config->cacheUri; - if (!cacheUriWithTrailingSlash.path.empty()) - cacheUriWithTrailingSlash.path.push_back(""); - - /* path is not a path, but a full relative or absolute - URL, e.g. we've seen in the wild NARINFO files have a URL - field which is - `nar/15f99rdaf26k39knmzry4xd0d97wp6yfpnfk1z9avakis7ipb9yg.nar?hash=zphkqn2wg8mnvbkixnl2aadkbn0rcnfj` - (note the query param) and that gets passed here. */ - return FileTransferRequest(parseURLRelative(path, cacheUriWithTrailingSlash)); - } +void HttpBinaryCacheStore::getFile(const std::string & path, Callback> callback) noexcept +{ + auto callbackPtr = std::make_shared(std::move(callback)); - void getFile(const std::string & path, Sink & sink) override - { + try { checkEnabled(); - auto request(makeRequest(path)); - try { - getFileTransfer()->download(std::move(request), sink); - } catch (FileTransferError & e) { - if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - throw NoSuchBinaryCacheFile( - "file '%s' does not exist in binary cache '%s'", path, config->getHumanReadableURI()); - maybeDisable(); - throw; - } - } - void getFile(const std::string & path, Callback> callback) noexcept override - { - auto callbackPtr = std::make_shared(std::move(callback)); + auto request(makeRequest(path)); - try { - checkEnabled(); - - auto request(makeRequest(path)); - - getFileTransfer()->enqueueFileTransfer( - request, {[callbackPtr, this](std::future result) { - try { - (*callbackPtr)(std::move(result.get().data)); - } catch (FileTransferError & e) { - if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - return (*callbackPtr)({}); - maybeDisable(); - callbackPtr->rethrow(); - } catch (...) { - callbackPtr->rethrow(); - } - }}); - - } catch (...) { - callbackPtr->rethrow(); - return; - } + getFileTransfer()->enqueueFileTransfer(request, {[callbackPtr, this](std::future result) { + try { + (*callbackPtr)(std::move(result.get().data)); + } catch (FileTransferError & e) { + if (e.error == FileTransfer::NotFound + || e.error == FileTransfer::Forbidden) + return (*callbackPtr)({}); + maybeDisable(); + callbackPtr->rethrow(); + } catch (...) { + callbackPtr->rethrow(); + } + }}); + + } catch (...) { + callbackPtr->rethrow(); + return; } +} - std::optional getNixCacheInfo() override - { - try { - auto result = getFileTransfer()->download(makeRequest(cacheInfoFile)); - return result.data; - } catch (FileTransferError & e) { - if (e.error == FileTransfer::NotFound) - return std::nullopt; - maybeDisable(); - throw; - } +std::optional HttpBinaryCacheStore::getNixCacheInfo() +{ + try { + auto result = getFileTransfer()->download(makeRequest(cacheInfoFile)); + return result.data; + } catch (FileTransferError & e) { + if (e.error == FileTransfer::NotFound) + return std::nullopt; + maybeDisable(); + throw; } +} - /** - * This isn't actually necessary read only. We support "upsert" now, so we - * have a notion of authentication via HTTP POST/PUT. - * - * For now, we conservatively say we don't know. - * - * \todo try to expose our HTTP authentication status. - */ - std::optional isTrustedClient() override - { - return std::nullopt; - } -}; +/** + * This isn't actually necessary read only. We support "upsert" now, so we + * have a notion of authentication via HTTP POST/PUT. + * + * For now, we conservatively say we don't know. + * + * \todo try to expose our HTTP authentication status. + */ +std::optional HttpBinaryCacheStore::isTrustedClient() +{ + return std::nullopt; +} ref HttpBinaryCacheStore::Config::openStore() const { diff --git a/src/libstore/include/nix/store/aws-creds.hh b/src/libstore/include/nix/store/aws-creds.hh index 16643c55552..30f6592a06f 100644 --- a/src/libstore/include/nix/store/aws-creds.hh +++ b/src/libstore/include/nix/store/aws-creds.hh @@ -2,9 +2,10 @@ ///@file #include "nix/store/config.hh" -#if NIX_WITH_CURL_S3 +#if NIX_WITH_AWS_AUTH # include "nix/store/s3-url.hh" +# include "nix/util/ref.hh" # include "nix/util/error.hh" # include @@ -33,41 +34,53 @@ struct AwsCredentials } }; -/** - * Exception thrown when AWS authentication fails - */ -MakeError(AwsAuthError, Error); +class AwsAuthError : public Error +{ + std::optional errorCode; -/** - * Get AWS credentials for the given profile. - * This function automatically caches credential providers to avoid - * creating multiple providers for the same profile. - * - * @param profile The AWS profile name (empty string for default profile) - * @return AWS credentials - * @throws AwsAuthError if credentials cannot be resolved - */ -AwsCredentials getAwsCredentials(const std::string & profile = ""); +public: + using Error::Error; + AwsAuthError(int errorCode); -/** - * Invalidate cached credentials for a profile (e.g., on authentication failure). - * The next request for this profile will create a new provider. - * - * @param profile The AWS profile name to invalidate - */ -void invalidateAwsCredentials(const std::string & profile); + std::optional getErrorCode() const + { + return errorCode; + } +}; + +class AwsCredentialProvider +{ +public: + /** + * Get AWS credentials for the given URL. + * + * @param url The S3 url to get the credentials for + * @return AWS credentials + * @throws AwsAuthError if credentials cannot be resolved + */ + virtual AwsCredentials getCredentials(const ParsedS3URL & url) = 0; + + std::optional maybeGetCredentials(const ParsedS3URL & url) + { + try { + return getCredentials(url); + } catch (AwsAuthError & e) { + return std::nullopt; + } + } + + virtual ~AwsCredentialProvider() {} +}; /** - * Clear all cached credential providers. - * Typically called during application cleanup. + * Create a new instancee of AwsCredentialProvider. */ -void clearAwsCredentialsCache(); +ref makeAwsCredentialsProvider(); /** - * Pre-resolve AWS credentials for S3 URLs. - * Used to cache credentials in parent process before forking. + * Get a reference to the global AwsCredentialProvider. */ -AwsCredentials preResolveAwsCredentials(const ParsedS3URL & s3Url); +ref getAwsCredentialsProvider(); } // namespace nix #endif diff --git a/src/libstore/include/nix/store/binary-cache-store.hh b/src/libstore/include/nix/store/binary-cache-store.hh index c316b1199b4..e7b3d07ebb6 100644 --- a/src/libstore/include/nix/store/binary-cache-store.hh +++ b/src/libstore/include/nix/store/binary-cache-store.hh @@ -65,7 +65,9 @@ struct BinaryCacheStoreConfig : virtual StoreConfig * @note subclasses must implement at least one of the two * virtual getFile() methods. */ -struct BinaryCacheStore : virtual Store, virtual LogStore +struct alignas(8) /* Work around ASAN failures on i686-linux. */ + BinaryCacheStore : virtual Store, + virtual LogStore { using Config = BinaryCacheStoreConfig; @@ -80,25 +82,45 @@ private: protected: - // The prefix under which realisation infos will be stored - const std::string realisationsPrefix = "realisations"; + /** + * The prefix under which realisation infos will be stored + */ + constexpr const static std::string realisationsPrefix = "realisations"; - const std::string cacheInfoFile = "nix-cache-info"; + constexpr const static std::string cacheInfoFile = "nix-cache-info"; BinaryCacheStore(Config &); + /** + * Compute the path to the given realisation + * + * It's `${realisationsPrefix}/${drvOutput}.doi`. + */ + std::string makeRealisationPath(const DrvOutput & id); + public: virtual bool fileExists(const std::string & path) = 0; virtual void upsertFile( - const std::string & path, std::shared_ptr> istream, const std::string & mimeType) = 0; + const std::string & path, RestartableSource & source, const std::string & mimeType, uint64_t sizeHint) = 0; + + void upsertFile( + const std::string & path, + // FIXME: use std::string_view + std::string && data, + const std::string & mimeType, + uint64_t sizeHint); void upsertFile( const std::string & path, // FIXME: use std::string_view std::string && data, - const std::string & mimeType); + const std::string & mimeType) + { + auto size = data.size(); + upsertFile(path, std::move(data), mimeType, size); + } /** * Dump the contents of the specified file to a sink. @@ -175,7 +197,7 @@ public: void registerDrvOutput(const Realisation & info) override; void queryRealisationUncached( - const DrvOutput &, Callback> callback) noexcept override; + const DrvOutput &, Callback> callback) noexcept override; void narFromPath(const StorePath & path, Sink & sink) override; diff --git a/src/libstore/include/nix/store/build-result.hh b/src/libstore/include/nix/store/build-result.hh index 3333132884d..bbf4de6310a 100644 --- a/src/libstore/include/nix/store/build-result.hh +++ b/src/libstore/include/nix/store/build-result.hh @@ -7,8 +7,7 @@ #include "nix/store/derived-path.hh" #include "nix/store/realisation.hh" - -#include +#include "nix/util/json-impls.hh" namespace nix { @@ -188,7 +187,7 @@ struct KeyedBuildResult : BuildResult } }; -void to_json(nlohmann::json & json, const BuildResult & buildResult); -void to_json(nlohmann::json & json, const KeyedBuildResult & buildResult); - } // namespace nix + +JSON_IMPL(nix::BuildResult) +JSON_IMPL(nix::KeyedBuildResult) diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index 30a8e081c84..4d1d55efc59 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -1,12 +1,15 @@ #pragma once ///@file +#include + #include "nix/store/build-result.hh" #include "nix/store/derivation-options.hh" #include "nix/store/build/derivation-building-misc.hh" #include "nix/store/derivations.hh" #include "nix/store/parsed-derivations.hh" #include "nix/util/processes.hh" +#include "nix/util/json-impls.hh" #include "nix/store/restricted-store.hh" #include "nix/store/build/derivation-env-desugar.hh" @@ -59,14 +62,14 @@ struct DerivationBuilderParams /** * The derivation stored at drvPath. */ - const Derivation & drv; + const BasicDerivation & drv; /** * The derivation options of `drv`. * * @todo this should be part of `Derivation`. */ - const DerivationOptions & drvOptions; + const DerivationOptions & drvOptions; // The remainder is state held during the build. @@ -184,9 +187,28 @@ struct DerivationBuilder : RestrictionContext virtual bool killChild() = 0; }; +struct ExternalBuilder +{ + StringSet systems; + Path program; + std::vector args; +}; + #ifndef _WIN32 // TODO enable `DerivationBuilder` on Windows std::unique_ptr makeDerivationBuilder( LocalStore & store, std::unique_ptr miscMethods, DerivationBuilderParams params); + +/** + * @param handler Must be chosen such that it supports the given + * derivation. + */ +std::unique_ptr makeExternalDerivationBuilder( + LocalStore & store, + std::unique_ptr miscMethods, + DerivationBuilderParams params, + const ExternalBuilder & handler); #endif } // namespace nix + +JSON_IMPL(nix::ExternalBuilder) diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index edb49602489..be95c796b05 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -29,8 +29,17 @@ typedef enum { rpAccept, rpDecline, rpPostpone } HookReply; */ struct DerivationBuildingGoal : public Goal { + /** + * @param storeDerivation Whether to store the derivation in + * `worker.store`. This is useful for newly-resolved derivations. In this + * case, the derivation was not created a priori, e.g. purely (or close + * enough) from evaluation of the Nix language, but also depends on the + * exact content produced by upstream builds. It is strongly advised to + * have a permanent record of such a resolved derivation in order to + * faithfully reconstruct the build history. + */ DerivationBuildingGoal( - const StorePath & drvPath, const Derivation & drv, Worker & worker, BuildMode buildMode = bmNormal); + const StorePath & drvPath, const Derivation & drv, Worker & worker, BuildMode buildMode, bool storeDerivation); ~DerivationBuildingGoal(); private: @@ -43,8 +52,6 @@ private: */ std::unique_ptr drv; - std::unique_ptr drvOptions; - /** * The remainder is state held during the build. */ @@ -100,13 +107,14 @@ private: /** * The states. */ - Co gaveUpOnSubstitution(); + Co gaveUpOnSubstitution(bool storeDerivation); Co tryToBuild(); /** * Is the build hook willing to perform the build? */ - HookReply tryBuildHook(const std::map & initialOutputs); + HookReply tryBuildHook( + const std::map & initialOutputs, const DerivationOptions & drvOptions); /** * Open a log file and a pipe to it. diff --git a/src/libstore/include/nix/store/build/derivation-env-desugar.hh b/src/libstore/include/nix/store/build/derivation-env-desugar.hh index 6e2efa6bb4d..a10ec9fa873 100644 --- a/src/libstore/include/nix/store/build/derivation-env-desugar.hh +++ b/src/libstore/include/nix/store/build/derivation-env-desugar.hh @@ -8,6 +8,7 @@ namespace nix { class Store; struct Derivation; +template struct DerivationOptions; /** @@ -77,7 +78,10 @@ struct DesugaredEnv * just part of `Derivation`. */ static DesugaredEnv create( - Store & store, const Derivation & drv, const DerivationOptions & drvOptions, const StorePathSet & inputPaths); + Store & store, + const Derivation & drv, + const DerivationOptions & drvOptions, + const StorePathSet & inputPaths); }; } // namespace nix diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index e05bf1c0b73..0fe610987fc 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -40,12 +40,16 @@ struct DerivationGoal : public Goal */ OutputName wantedOutput; + /** + * @param storeDerivation See `DerivationBuildingGoal`. This is just passed along. + */ DerivationGoal( const StorePath & drvPath, const Derivation & drv, const OutputName & wantedOutput, Worker & worker, - BuildMode buildMode = bmNormal); + BuildMode buildMode, + bool storeDerivation); ~DerivationGoal() = default; void timedOut(Error && ex) override @@ -80,7 +84,7 @@ private: /** * The states. */ - Co haveDerivation(); + Co haveDerivation(bool storeDerivation); /** * Return `std::nullopt` if the output is unknown, e.g. un unbuilt @@ -89,17 +93,17 @@ private: * of the wanted output, and a `PathStatus` with the * current status of that output. */ - std::optional> checkPathValidity(); + std::optional> checkPathValidity(); /** * Aborts if any output is not valid or corrupt, and otherwise * returns a 'Realisation' for the wanted output. */ - Realisation assertPathValidity(); + UnkeyedRealisation assertPathValidity(); Co repairClosure(); - Done doneSuccess(BuildResult::Success::Status status, Realisation builtOutput); + Done doneSuccess(BuildResult::Success::Status status, UnkeyedRealisation builtOutput); Done doneFailure(BuildError ex); }; diff --git a/src/libstore/include/nix/store/build/derivation-resolution-goal.hh b/src/libstore/include/nix/store/build/derivation-resolution-goal.hh new file mode 100644 index 00000000000..fb4c2a34635 --- /dev/null +++ b/src/libstore/include/nix/store/build/derivation-resolution-goal.hh @@ -0,0 +1,81 @@ +#pragma once +///@file + +#include "nix/store/derivations.hh" +#include "nix/store/derivation-options.hh" +#include "nix/store/build/derivation-building-misc.hh" +#include "nix/store/store-api.hh" +#include "nix/store/build/goal.hh" + +namespace nix { + +struct BuilderFailureError; + +/** + * A goal for resolving a derivation. Resolving a derivation (@see + * `Derivation::tryResolve`) simplifies its inputs, replacing + * `inputDrvs` with `inputSrcs`. + * + * Conceptually, we resolve all derivations. For input-addressed + * derivations (that don't transtively depend on content-addressed + * derivations), however, we don't actually use the resolved derivation, + * because the output paths would appear invalid (if we tried to verify + * them), since they are computed from the original, unresolved inputs. + * + * That said, if we ever made the new flavor of input-addressing as described + * in issue #9259, then the input-addressing would be based on the resolved + * inputs, and we like the CA case *would* use the output of this goal. + * + * (The point of this discussion is not to randomly stuff information on + * a yet-unimplemented feature (issue #9259) in the codebase, but + * rather, to illustrate that there is no inherent tension between + * explicit derivation resolution and input-addressing in general. That + * tension only exists with the type of input-addressing we've + * historically used.) + */ +struct DerivationResolutionGoal : public Goal +{ + DerivationResolutionGoal(const StorePath & drvPath, const Derivation & drv, Worker & worker, BuildMode buildMode); + + /** + * If the derivation needed to be resolved, this is resulting + * resolved derivations and its path. + */ + std::unique_ptr> resolvedDrv; + + void timedOut(Error && ex) override {} + +private: + + /** + * The path of the derivation. + */ + StorePath drvPath; + + /** + * The derivation stored at drvPath. + */ + std::unique_ptr drv; + + /** + * The remainder is state held during the build. + */ + + BuildMode buildMode; + + std::unique_ptr act; + + std::string key() override; + + /** + * The states. + */ + Co resolveDerivation(); + + JobCategory jobCategory() const override + { + return JobCategory::Administration; + }; +}; + +} // namespace nix diff --git a/src/libstore/include/nix/store/build/derivation-trampoline-goal.hh b/src/libstore/include/nix/store/build/derivation-trampoline-goal.hh index 79b74f4c1f0..bfed67f6370 100644 --- a/src/libstore/include/nix/store/build/derivation-trampoline-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-trampoline-goal.hh @@ -109,7 +109,7 @@ struct DerivationTrampolineGoal : public Goal virtual ~DerivationTrampolineGoal(); - void timedOut(Error && ex) override; + void timedOut(Error && ex) override {} std::string key() override; diff --git a/src/libstore/include/nix/store/build/drv-output-substitution-goal.hh b/src/libstore/include/nix/store/build/drv-output-substitution-goal.hh index b423364274e..6310e0d2ccc 100644 --- a/src/libstore/include/nix/store/build/drv-output-substitution-goal.hh +++ b/src/libstore/include/nix/store/build/drv-output-substitution-goal.hh @@ -29,17 +29,9 @@ class DrvOutputSubstitutionGoal : public Goal DrvOutput id; public: - DrvOutputSubstitutionGoal( - const DrvOutput & id, - Worker & worker, - RepairFlag repair = NoRepair, - std::optional ca = std::nullopt); - - typedef void (DrvOutputSubstitutionGoal::*GoalState)(); - GoalState state; + DrvOutputSubstitutionGoal(const DrvOutput & id, Worker & worker); Co init(); - Co realisationFetched(Goals waitees, std::shared_ptr outputInfo, nix::ref sub); void timedOut(Error && ex) override { diff --git a/src/libstore/include/nix/store/build/goal.hh b/src/libstore/include/nix/store/build/goal.hh index f009b60a10b..f048c75687c 100644 --- a/src/libstore/include/nix/store/build/goal.hh +++ b/src/libstore/include/nix/store/build/goal.hh @@ -456,6 +456,18 @@ public: */ virtual void timedOut(Error && ex) = 0; + /** + * Used for comparisons. The order matters a bit for scheduling. We + * want: + * + * 1. Substitution + * 2. Derivation administrativia + * 3. Actual building + * + * Also, ensure that derivations get processed in order of their + * name, i.e. a derivation named "aardvark" always comes before + * "baboon". + */ virtual std::string key() = 0; /** diff --git a/src/libstore/include/nix/store/build/substitution-goal.hh b/src/libstore/include/nix/store/build/substitution-goal.hh index 5f6cb6a18c7..5f33b9aa5d7 100644 --- a/src/libstore/include/nix/store/build/substitution-goal.hh +++ b/src/libstore/include/nix/store/build/substitution-goal.hh @@ -58,10 +58,6 @@ public: unreachable(); }; - /** - * We prepend "a$" to the key name to ensure substitution goals - * happen before derivation goals. - */ std::string key() override { return "a$" + std::string(storePath.name()) + "$" + worker.store.printStorePath(storePath); diff --git a/src/libstore/include/nix/store/build/worker.hh b/src/libstore/include/nix/store/build/worker.hh index a6de780c1e7..173f7b222b7 100644 --- a/src/libstore/include/nix/store/build/worker.hh +++ b/src/libstore/include/nix/store/build/worker.hh @@ -16,6 +16,7 @@ namespace nix { /* Forward definition. */ struct DerivationTrampolineGoal; struct DerivationGoal; +struct DerivationResolutionGoal; struct DerivationBuildingGoal; struct PathSubstitutionGoal; class DrvOutputSubstitutionGoal; @@ -111,6 +112,7 @@ private: DerivedPathMap>> derivationTrampolineGoals; std::map>> derivationGoals; + std::map> derivationResolutionGoals; std::map> derivationBuildingGoals; std::map> substitutionGoals; std::map> drvOutputSubstitutionGoals; @@ -208,34 +210,37 @@ private: std::shared_ptr initGoalIfNeeded(std::weak_ptr & goal_weak, Args &&... args); std::shared_ptr makeDerivationTrampolineGoal( - ref drvReq, const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal); + ref drvReq, const OutputsSpec & wantedOutputs, BuildMode buildMode); public: std::shared_ptr makeDerivationTrampolineGoal( - const StorePath & drvPath, - const OutputsSpec & wantedOutputs, - const Derivation & drv, - BuildMode buildMode = bmNormal); + const StorePath & drvPath, const OutputsSpec & wantedOutputs, const Derivation & drv, BuildMode buildMode); std::shared_ptr makeDerivationGoal( const StorePath & drvPath, const Derivation & drv, const OutputName & wantedOutput, - BuildMode buildMode = bmNormal); + BuildMode buildMode, + bool storeDerivation); + + /** + * @ref DerivationResolutionGoal "derivation resolution goal" + */ + std::shared_ptr + makeDerivationResolutionGoal(const StorePath & drvPath, const Derivation & drv, BuildMode buildMode); /** - * @ref DerivationBuildingGoal "derivation goal" + * @ref DerivationBuildingGoal "derivation building goal" */ - std::shared_ptr - makeDerivationBuildingGoal(const StorePath & drvPath, const Derivation & drv, BuildMode buildMode = bmNormal); + std::shared_ptr makeDerivationBuildingGoal( + const StorePath & drvPath, const Derivation & drv, BuildMode buildMode, bool storeDerivation); /** * @ref PathSubstitutionGoal "substitution goal" */ std::shared_ptr makePathSubstitutionGoal( const StorePath & storePath, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); - std::shared_ptr makeDrvOutputSubstitutionGoal( - const DrvOutput & id, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); + std::shared_ptr makeDrvOutputSubstitutionGoal(const DrvOutput & id); /** * Make a goal corresponding to the `DerivedPath`. diff --git a/src/libstore/include/nix/store/builtins.hh b/src/libstore/include/nix/store/builtins.hh index 0cdd3a2bcf0..fee11e59e9f 100644 --- a/src/libstore/include/nix/store/builtins.hh +++ b/src/libstore/include/nix/store/builtins.hh @@ -2,6 +2,11 @@ ///@file #include "nix/store/derivations.hh" +#include "nix/store/config.hh" + +#if NIX_WITH_AWS_AUTH +# include "nix/store/aws-creds.hh" +#endif #include @@ -16,6 +21,14 @@ struct BuiltinBuilderContext std::string netrcData; std::string caFileData; Path tmpDirInSandbox; + +#if NIX_WITH_AWS_AUTH + /** + * Pre-resolved AWS credentials for S3 URLs in builtin:fetchurl. + * When present, these should be used instead of creating new credential providers. + */ + std::optional awsCredentials; +#endif }; using BuiltinBuilder = std::function; @@ -24,11 +37,7 @@ struct RegisterBuiltinBuilder { typedef std::map BuiltinBuilders; - static BuiltinBuilders & builtinBuilders() - { - static BuiltinBuilders builders; - return builders; - } + static BuiltinBuilders & builtinBuilders(); RegisterBuiltinBuilder(const std::string & name, BuiltinBuilder && fun) { diff --git a/src/libstore/include/nix/store/content-address.hh b/src/libstore/include/nix/store/content-address.hh index 0a3dc79bd9c..41ccc69aeb3 100644 --- a/src/libstore/include/nix/store/content-address.hh +++ b/src/libstore/include/nix/store/content-address.hh @@ -6,6 +6,7 @@ #include "nix/store/path.hh" #include "nix/util/file-content-address.hh" #include "nix/util/variant-wrapper.hh" +#include "nix/util/json-impls.hh" namespace nix { @@ -308,4 +309,15 @@ struct ContentAddressWithReferences Hash getHash() const; }; +template<> +struct json_avoids_null : std::true_type +{}; + +template<> +struct json_avoids_null : std::true_type +{}; + } // namespace nix + +JSON_IMPL(nix::ContentAddressMethod) +JSON_IMPL(nix::ContentAddress) diff --git a/src/libstore/include/nix/store/derivation-options.hh b/src/libstore/include/nix/store/derivation-options.hh index 88694f730c1..d733df1599b 100644 --- a/src/libstore/include/nix/store/derivation-options.hh +++ b/src/libstore/include/nix/store/derivation-options.hh @@ -8,7 +8,8 @@ #include "nix/util/types.hh" #include "nix/util/json-impls.hh" -#include "nix/store/path.hh" +#include "nix/store/store-dir-config.hh" +#include "nix/store/downstream-placeholder.hh" namespace nix { @@ -17,6 +18,9 @@ struct StoreDirConfig; struct BasicDerivation; struct StructuredAttrs; +template +struct DerivedPathMap; + /** * This represents all the special options on a `Derivation`. * @@ -34,6 +38,7 @@ struct StructuredAttrs; * separately. That would be nice to separate concerns, and not make any * environment variable names magical. */ +template struct DerivationOptions { struct OutputChecks @@ -41,13 +46,15 @@ struct DerivationOptions bool ignoreSelfRefs = false; std::optional maxSize, maxClosureSize; + using DrvRef = nix::DrvRef; + /** * env: allowedReferences * * A value of `nullopt` indicates that the check is skipped. * This means that all references are allowed. */ - std::optional allowedReferences; + std::optional> allowedReferences; /** * env: disallowedReferences @@ -55,21 +62,21 @@ struct DerivationOptions * No needed for `std::optional`, because skipping the check is * the same as disallowing the references. */ - StringSet disallowedReferences; + std::set disallowedReferences; /** * env: allowedRequisites * * See `allowedReferences` */ - std::optional allowedRequisites; + std::optional> allowedRequisites; /** * env: disallowedRequisites * * See `disallowedReferences` */ - StringSet disallowedRequisites; + std::set disallowedRequisites; bool operator==(const OutputChecks &) const = default; }; @@ -116,23 +123,7 @@ struct DerivationOptions * attributes give to the builder. The set of paths in the original JSON * is replaced with a list of `PathInfo` in JSON format. */ - std::map exportReferencesGraph; - - /** - * Once a derivations is resolved, the strings in in - * `exportReferencesGraph` should all be store paths (with possible - * suffix paths, but those are discarded). - * - * @return The parsed path set for for each key in the map. - * - * @todo Ideally, `exportReferencesGraph` would just store - * `StorePath`s for this, but we can't just do that, because for CA - * derivations they is actually in general `DerivedPath`s (via - * placeholder strings) until the derivation is resolved and exact - * inputs store paths are known. We can use better types for that - * too, but that is a longer project. - */ - std::map getParsedExportReferencesGraph(const StoreDirConfig & store) const; + std::map> exportReferencesGraph; /** * env: __sandboxProfile @@ -185,18 +176,6 @@ struct DerivationOptions bool operator==(const DerivationOptions &) const = default; - /** - * Parse this information from its legacy encoding as part of the - * environment. This should not be used with nice greenfield formats - * (e.g. JSON) but is necessary for supporting old formats (e.g. - * ATerm). - */ - static DerivationOptions - fromStructuredAttrs(const StringMap & env, const StructuredAttrs * parsed, bool shouldWarn = true); - - static DerivationOptions - fromStructuredAttrs(const StringMap & env, const std::optional & parsed, bool shouldWarn = true); - /** * @param drv Must be the same derivation we parsed this from. In * the future we'll flip things around so a `BasicDerivation` has @@ -222,7 +201,49 @@ struct DerivationOptions bool useUidRange(const BasicDerivation & drv) const; }; +extern template struct DerivationOptions; +extern template struct DerivationOptions; + +struct DerivationOutput; + +/** + * Parse this information from its legacy encoding as part of the + * environment. This should not be used with nice greenfield formats + * (e.g. JSON) but is necessary for supporting old formats (e.g. + * ATerm). + */ +DerivationOptions derivationOptionsFromStructuredAttrs( + const StoreDirConfig & store, + const DerivedPathMap & inputDrvs, + const StringMap & env, + const StructuredAttrs * parsed, + bool shouldWarn = true, + const ExperimentalFeatureSettings & mockXpSettings = experimentalFeatureSettings); + +DerivationOptions derivationOptionsFromStructuredAttrs( + const StoreDirConfig & store, + const StringMap & env, + const StructuredAttrs * parsed, + bool shouldWarn = true, + const ExperimentalFeatureSettings & mockXpSettings = experimentalFeatureSettings); + +/** + * This is the counterpart of `Derivation::tryResolve`. In particular, + * it takes the same sort of callback, which is used to reolve + * non-constant deriving paths. + * + * We need this function when resolving a derivation, and we will use + * this as part of that if/when `Derivation` includes + * `DerivationOptions` + */ +std::optional> tryResolve( + const DerivationOptions & drvOptions, + std::function(ref drvPath, const std::string & outputName)> + queryResolutionChain); + }; // namespace nix -JSON_IMPL(DerivationOptions); -JSON_IMPL(DerivationOptions::OutputChecks) +JSON_IMPL(nix::DerivationOptions); +JSON_IMPL(nix::DerivationOptions); +JSON_IMPL(nix::DerivationOptions::OutputChecks) +JSON_IMPL(nix::DerivationOptions::OutputChecks) diff --git a/src/libstore/include/nix/store/derivations.hh b/src/libstore/include/nix/store/derivations.hh index b1739682c1d..e4c3e29e877 100644 --- a/src/libstore/include/nix/store/derivations.hh +++ b/src/libstore/include/nix/store/derivations.hh @@ -135,13 +135,6 @@ struct DerivationOutput */ std::optional path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const; - - nlohmann::json toJSON() const; - /** - * @param xpSettings Stop-gap to avoid globals during unit tests. - */ - static DerivationOutput - fromJSON(const nlohmann::json & json, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); }; typedef std::map DerivationOutputs; @@ -285,7 +278,7 @@ struct BasicDerivation Path builder; Strings args; /** - * Must not contain the key `__json`, at least in order to serialize to A-Term. + * Must not contain the key `__json`, at least in order to serialize to ATerm. */ StringPairs env; std::optional structuredAttrs; @@ -376,9 +369,48 @@ struct Derivation : BasicDerivation * This is mainly a matter of checking the outputs, where our C++ * representation supports all sorts of combinations we do not yet * allow. + * + * This overload does not validate the derivation name or add path + * context to errors. Use this when you don't have a `StorePath` or + * when you want to handle error context yourself. + * + * @param store The store to use for validation + */ + void checkInvariants(Store & store) const; + + /** + * This overload does everything the base `checkInvariants` does, + * but also validates that the derivation name matches the path, and + * improves any error messages that occur using the derivation path. + * + * @param store The store to use for validation + * @param drvPath The path to this derivation */ void checkInvariants(Store & store, const StorePath & drvPath) const; + /** + * Fill in output paths as needed. + * + * For input-addressed derivations (ready or deferred), it computes + * the derivation hash modulo and based on the result: + * + * - If `Regular`: converts `Deferred` outputs to `InputAddressed`, + * and ensures all `InputAddressed` outputs (whether preexisting + * or newly computed) have the right computed paths. Likewise + * defines (if absent or the empty string) or checks (if + * preexisting and non-empty) environment variables for each + * output with their path. + * + * - If `Deferred`: converts `InputAddressed` to `Deferred`. + * + * Also for fixed-output content-addressed derivations, likewise + * updates output paths in env vars. + * + * @param store The store to use for path computation + * @param drvName The derivation name (without .drv extension) + */ + void fillInOutputPaths(Store & store); + Derivation() = default; Derivation(const BasicDerivation & bd) @@ -391,9 +423,28 @@ struct Derivation : BasicDerivation { } - nlohmann::json toJSON() const; - static Derivation - fromJSON(const nlohmann::json & json, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + /** + * Parse a derivation from JSON, and also perform various + * conveniences such as: + * + * 1. Filling in output paths in as needed/required. + * + * 2. Checking invariants in general. + * + * In the future it might also do things like: + * + * - assist with the migration from older JSON formats. + * + * - (a somewhat example of the above) initialize + * `DerivationOptions` from their traditional encoding inside the + * `env` and `structuredAttrs`. + * + * @param store The store to use for path computation and validation + * @param json The JSON representation of the derivation + * @return A validated derivation with output paths filled in + * @throws Error if parsing fails, output paths can't be computed, or validation fails + */ + static Derivation parseJsonAndValidate(Store & store, const nlohmann::json & json); bool operator==(const Derivation &) const = default; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. @@ -546,7 +597,13 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva */ std::string hashPlaceholder(const OutputNameView outputName); +/** + * The expected JSON version for derivation serialization. + * Used by `nix derivation show` and `nix derivation add`. + */ +constexpr unsigned expectedJsonVersionDerivation = 4; + } // namespace nix -JSON_IMPL(nix::DerivationOutput) -JSON_IMPL(nix::Derivation) +JSON_IMPL_WITH_XP_FEATURES(nix::DerivationOutput) +JSON_IMPL_WITH_XP_FEATURES(nix::Derivation) diff --git a/src/libstore/include/nix/store/derived-path.hh b/src/libstore/include/nix/store/derived-path.hh index da50c5b98dc..1e806371ad6 100644 --- a/src/libstore/include/nix/store/derived-path.hh +++ b/src/libstore/include/nix/store/derived-path.hh @@ -295,7 +295,7 @@ void drvRequireExperiment( } // namespace nix JSON_IMPL(nix::SingleDerivedPath::Opaque) -JSON_IMPL(nix::SingleDerivedPath::Built) -JSON_IMPL(nix::SingleDerivedPath) -JSON_IMPL(nix::DerivedPath::Built) -JSON_IMPL(nix::DerivedPath) +JSON_IMPL_WITH_XP_FEATURES(nix::SingleDerivedPath::Built) +JSON_IMPL_WITH_XP_FEATURES(nix::SingleDerivedPath) +JSON_IMPL_WITH_XP_FEATURES(nix::DerivedPath::Built) +JSON_IMPL_WITH_XP_FEATURES(nix::DerivedPath) diff --git a/src/libstore/include/nix/store/downstream-placeholder.hh b/src/libstore/include/nix/store/downstream-placeholder.hh index ee4d9e3c29b..ba3e9faeff7 100644 --- a/src/libstore/include/nix/store/downstream-placeholder.hh +++ b/src/libstore/include/nix/store/downstream-placeholder.hh @@ -2,11 +2,23 @@ ///@file #include "nix/util/hash.hh" +#include "nix/util/json-impls.hh" #include "nix/store/path.hh" #include "nix/store/derived-path.hh" namespace nix { +/** + * A reference is either to a to-be-registered output (by name), + * or to an already-registered store object (by `Input`). + * + * `Ref +using DrvRef = std::variant; + /** * Downstream Placeholders are opaque and almost certainly unique values * used to allow derivations to refer to store objects which are yet to @@ -92,3 +104,17 @@ public: }; } // namespace nix + +namespace nlohmann { + +template +struct adl_serializer> +{ + static nix::DrvRef from_json(const json & json); + static void to_json(json & json, const nix::DrvRef & t); +}; + +extern template struct adl_serializer>; +extern template struct adl_serializer>; + +} // namespace nlohmann diff --git a/src/libstore/include/nix/store/dummy-store-impl.hh b/src/libstore/include/nix/store/dummy-store-impl.hh index e05bb94ff76..ac7ab9c680f 100644 --- a/src/libstore/include/nix/store/dummy-store-impl.hh +++ b/src/libstore/include/nix/store/dummy-store-impl.hh @@ -2,6 +2,7 @@ ///@file #include "nix/store/dummy-store.hh" +#include "nix/store/derivations.hh" #include @@ -22,19 +23,47 @@ struct DummyStore : virtual Store { UnkeyedValidPathInfo info; ref contents; + + bool operator==(const PathInfoAndContents &) const; }; /** - * This is map conceptually owns the file system objects for each + * This map conceptually owns the file system objects for each * store object. */ boost::concurrent_flat_map contents; + /** + * This map conceptually owns every derivation, allowing us to + * avoid "on-disk drv format" serialization round-trips. + */ + boost::concurrent_flat_map derivations; + + /** + * The build trace maps the pair of a content-addressing (fixed or + * floating) derivations an one of its output to a + * (content-addressed) store object. + * + * It is [curried](https://en.wikipedia.org/wiki/Currying), so we + * instead having a single output with a `DrvOutput` key, we have an + * outer map for the derivation, and inner maps for the outputs of a + * given derivation. + */ + boost::concurrent_flat_map> buildTrace; + DummyStore(ref config) : Store{*config} , config(config) { } + + bool operator==(const DummyStore &) const; }; +template<> +struct json_avoids_null : std::true_type +{}; + } // namespace nix + +JSON_IMPL(nix::DummyStore::PathInfoAndContents) diff --git a/src/libstore/include/nix/store/dummy-store.hh b/src/libstore/include/nix/store/dummy-store.hh index 95c09078c98..febf351c975 100644 --- a/src/libstore/include/nix/store/dummy-store.hh +++ b/src/libstore/include/nix/store/dummy-store.hh @@ -2,6 +2,9 @@ ///@file #include "nix/store/store-api.hh" +#include "nix/util/json-impls.hh" + +#include namespace nix { @@ -63,4 +66,33 @@ struct DummyStoreConfig : public std::enable_shared_from_this, } }; +template<> +struct json_avoids_null : std::true_type +{}; + +template<> +struct json_avoids_null> : std::true_type +{}; + +template<> +struct json_avoids_null : std::true_type +{}; + +template<> +struct json_avoids_null> : std::true_type +{}; + } // namespace nix + +namespace nlohmann { + +template<> +JSON_IMPL_INNER_TO(nix::DummyStoreConfig); +template<> +JSON_IMPL_INNER_FROM(nix::ref); +template<> +JSON_IMPL_INNER_TO(nix::DummyStore); +template<> +JSON_IMPL_INNER_FROM(nix::ref); + +} // namespace nlohmann diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 3e011fddc52..fa8a649e2b3 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -11,6 +11,12 @@ #include "nix/util/serialise.hh" #include "nix/util/url.hh" +#include "nix/store/config.hh" +#if NIX_WITH_AWS_AUTH +# include "nix/store/aws-creds.hh" +#endif +#include "nix/store/s3-url.hh" + namespace nix { struct FileTransferSettings : Config @@ -64,12 +70,12 @@ struct FileTransferSettings : Config Setting downloadBufferSize{ this, - 64 * 1024 * 1024, + 1 * 1024 * 1024, "download-buffer-size", R"( The size of Nix's internal download buffer in bytes during `curl` transfers. If data is not processed quickly enough to exceed the size of this buffer, downloads may stall. - The default is 67108864 (64 MiB). + The default is 1048576 (1 MiB). )"}; }; @@ -77,21 +83,84 @@ extern FileTransferSettings fileTransferSettings; extern const unsigned int RETRY_TIME_MS_DEFAULT; +/** + * HTTP methods supported by FileTransfer. + */ +enum struct HttpMethod { + Get, + Put, + Head, + Post, + Delete, +}; + +/** + * Username and optional password for HTTP basic authentication. + * These are used with curl's CURLOPT_USERNAME and CURLOPT_PASSWORD options + * for various protocols including HTTP, FTP, and others. + */ +struct UsernameAuth +{ + std::string username; + std::optional password; +}; + +enum class PauseTransfer : bool { + No = false, + Yes = true, +}; + struct FileTransferRequest { VerbatimURL uri; Headers headers; std::string expectedETag; - bool verifyTLS = true; - bool head = false; - bool post = false; + HttpMethod method = HttpMethod::Get; size_t tries = fileTransferSettings.tries; unsigned int baseRetryTimeMs = RETRY_TIME_MS_DEFAULT; ActivityId parentAct; bool decompress = true; - std::optional data; + + struct UploadData + { + UploadData(StringSource & s) + : sizeHint(s.s.length()) + , source(&s) + { + } + + UploadData(std::size_t sizeHint, RestartableSource & source) + : sizeHint(sizeHint) + , source(&source) + { + } + + std::size_t sizeHint = 0; + RestartableSource * source = nullptr; + }; + + std::optional data; std::string mimeType; - std::function dataCallback; + + /** + * Callbacked invoked with a chunk of received data. + * Can pause the transfer by returning PauseTransfer::Yes. No data must be consumed + * if transfer is paused. + */ + std::function dataCallback; + + /** + * Optional username and password for HTTP basic authentication. + * When provided, these credentials will be used with curl's CURLOPT_USERNAME/PASSWORD option. + */ + std::optional usernameAuth; +#if NIX_WITH_AWS_AUTH + /** + * Pre-resolved AWS session token for S3 requests. + * When provided along with usernameAuth, this will be used instead of fetching fresh credentials. + */ + std::optional preResolvedAwsSessionToken; +#endif FileTransferRequest(VerbatimURL uri) : uri(std::move(uri)) @@ -99,10 +168,48 @@ struct FileTransferRequest { } - std::string verb() const + /** + * Returns the method description for logging purposes. + */ + std::string verb(bool continuous = false) const + { + switch (method) { + case HttpMethod::Head: + case HttpMethod::Get: + return continuous ? "downloading" : "download"; + case HttpMethod::Put: + case HttpMethod::Post: + assert(data); + return continuous ? "uploading" : "upload"; + case HttpMethod::Delete: + return continuous ? "deleting" : "delete"; + } + unreachable(); + } + + std::string noun() const { - return data ? "upload" : "download"; + switch (method) { + case HttpMethod::Head: + case HttpMethod::Get: + return "download"; + case HttpMethod::Put: + case HttpMethod::Post: + assert(data); + return "upload"; + case HttpMethod::Delete: + return "deletion"; + } + unreachable(); } + + void setupForS3(); + +private: + friend struct curlFileTransfer; +#if NIX_WITH_AWS_AUTH + std::optional awsSigV4Provider; +#endif }; struct FileTransferResult @@ -145,6 +252,25 @@ class Store; struct FileTransfer { +protected: + class Item + {}; + +public: + /** + * An opaque handle to the file transfer. Can be used to reference an in-flight transfer operations. + */ + struct ItemHandle + { + std::reference_wrapper item; + friend struct FileTransfer; + + ItemHandle(Item & item) + : item(item) + { + } + }; + virtual ~FileTransfer() {} /** @@ -152,7 +278,13 @@ struct FileTransfer * the download. The future may throw a FileTransferError * exception. */ - virtual void enqueueFileTransfer(const FileTransferRequest & request, Callback callback) = 0; + virtual ItemHandle + enqueueFileTransfer(const FileTransferRequest & request, Callback callback) = 0; + + /** + * Unpause a transfer that has been previously paused by a dataCallback. + */ + virtual void unpauseTransfer(ItemHandle handle) = 0; std::future enqueueFileTransfer(const FileTransferRequest & request); @@ -166,6 +298,11 @@ struct FileTransfer */ FileTransferResult upload(const FileTransferRequest & request); + /** + * Synchronously delete a resource. + */ + FileTransferResult deleteResource(const FileTransferRequest & request); + /** * Download a file, writing its data to a sink. The sink will be * invoked on the thread of the caller. diff --git a/src/libstore/include/nix/store/gc-store.hh b/src/libstore/include/nix/store/gc-store.hh index 5691a3568c1..de7a71382f9 100644 --- a/src/libstore/include/nix/store/gc-store.hh +++ b/src/libstore/include/nix/store/gc-store.hh @@ -17,28 +17,31 @@ typedef boost::unordered_flat_map< std::hash> Roots; +/** + * Garbage collector operation: + * + * - `gcReturnLive`: return the set of paths reachable from + * (i.e. in the closure of) the roots. + * + * - `gcReturnDead`: return the set of paths not reachable from + * the roots. + * + * - `gcDeleteDead`: actually delete the latter set. + * + * - `gcDeleteSpecific`: delete the paths listed in + * `pathsToDelete`, insofar as they are not reachable. + */ +enum class GCAction { + gcReturnLive, + gcReturnDead, + gcDeleteDead, + gcDeleteSpecific, +}; + struct GCOptions { - /** - * Garbage collector operation: - * - * - `gcReturnLive`: return the set of paths reachable from - * (i.e. in the closure of) the roots. - * - * - `gcReturnDead`: return the set of paths not reachable from - * the roots. - * - * - `gcDeleteDead`: actually delete the latter set. - * - * - `gcDeleteSpecific`: delete the paths listed in - * `pathsToDelete`, insofar as they are not reachable. - */ - typedef enum { - gcReturnLive, - gcReturnDead, - gcDeleteDead, - gcDeleteSpecific, - } GCAction; + using GCAction = nix::GCAction; + using enum GCAction; GCAction action{gcDeleteDead}; diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index 28f2acf049b..d28198a16ad 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -75,10 +75,10 @@ class Settings : public Config public: - static unsigned int getDefaultCores(); - Settings(); + static unsigned int getDefaultCores(); + Path nixPrefix; /** @@ -101,7 +101,7 @@ public: /** * The directory where system configuration files are stored. */ - Path nixConfDir; + std::filesystem::path nixConfDir; /** * A list of user configuration files to load. @@ -189,7 +189,7 @@ public: 0, "cores", R"( - Sets the value of the `NIX_BUILD_CORES` environment variable in the [invocation of the `builder` executable](@docroot@/language/derivations.md#builder-execution) of a derivation. + Sets the value of the `NIX_BUILD_CORES` environment variable in the [invocation of the `builder` executable](@docroot@/store/building.md#builder-execution) of a derivation. The `builder` executable can use this variable to control its own maximum amount of parallelism. For instance, in Nixpkgs, if the attribute `enableParallelBuilding` for the `mkDerivation` build helper is set to `true`, it passes the `-j${NIX_BUILD_CORES}` flag to GNU Make. - If set to `0`, nix will detect the number of CPU cores and pass this number via NIX_BUILD_CORES. + If set to `0`, nix will detect the number of CPU cores and pass this number via `NIX_BUILD_CORES`. > **Note** > @@ -288,7 +288,7 @@ public: Setting builders{ this, - "@" + nixConfDir + "/machines", + "@" + nixConfDir.string() + "/machines", "builders", R"( A semicolon- or newline-separated list of build machines. @@ -790,6 +790,8 @@ public: "build-dir", R"( Override the `build-dir` store setting for all stores that have this setting. + + See also the per-store [`build-dir`](@docroot@/store/types/local-store.md#store-local-store-build-dir) setting. )"}; Setting allowedImpureHostPrefixes{ @@ -1137,7 +1139,7 @@ public: Setting netrcFile{ this, - fmt("%s/%s", nixConfDir, "netrc"), + (nixConfDir / "netrc").string(), "netrc-file", R"( If set to an absolute path to a `netrc` file, Nix uses the HTTP @@ -1370,13 +1372,6 @@ public: Set it to 1 to warn on all paths. )"}; - struct ExternalBuilder - { - std::vector systems; - Path program; - std::vector args; - }; - using ExternalBuilders = std::vector; Setting externalBuilders{ @@ -1440,6 +1435,12 @@ public: // Current system: 'aarch64-darwin' with features {apple-virt, benchmark, big-parallel, nixos-test} // Xp::ExternalBuilders }; + + /** + * Finds the first external derivation builder that supports this + * derivation, or else returns a null pointer. + */ + const ExternalBuilder * findExternalDerivationBuilderIfSupported(const Derivation & drv); }; // FIXME: don't use a global variable. diff --git a/src/libstore/include/nix/store/http-binary-cache-store.hh b/src/libstore/include/nix/store/http-binary-cache-store.hh index e0b7ac1ea32..ea3d77b7987 100644 --- a/src/libstore/include/nix/store/http-binary-cache-store.hh +++ b/src/libstore/include/nix/store/http-binary-cache-store.hh @@ -3,6 +3,10 @@ #include "nix/util/url.hh" #include "nix/store/binary-cache-store.hh" +#include "nix/store/filetransfer.hh" +#include "nix/util/sync.hh" + +#include namespace nix { @@ -46,4 +50,68 @@ struct HttpBinaryCacheStoreConfig : std::enable_shared_from_this _state; + +public: + + using Config = HttpBinaryCacheStoreConfig; + + ref config; + + HttpBinaryCacheStore(ref config); + + void init() override; + +protected: + + std::optional getCompressionMethod(const std::string & path); + + void maybeDisable(); + + void checkEnabled(); + + bool fileExists(const std::string & path) override; + + void upsertFile( + const std::string & path, RestartableSource & source, const std::string & mimeType, uint64_t sizeHint) override; + + FileTransferRequest makeRequest(std::string_view path); + + /** + * Uploads data to the binary cache. + * + * This is a lower-level method that handles the actual upload after + * compression has been applied. It does not handle compression or + * error wrapping - those are the caller's responsibility. + * + * @param path The path in the binary cache to upload to + * @param source The data source (should already be compressed if needed) + * @param sizeHint Size hint for the data + * @param mimeType The MIME type of the content + * @param contentEncoding Optional Content-Encoding header value (e.g., "xz", "br") + */ + void upload( + std::string_view path, + RestartableSource & source, + uint64_t sizeHint, + std::string_view mimeType, + std::optional headers); + + void getFile(const std::string & path, Sink & sink) override; + + void getFile(const std::string & path, Callback> callback) noexcept override; + + std::optional getNixCacheInfo() override; + + std::optional isTrustedClient() override; +}; + } // namespace nix diff --git a/src/libstore/include/nix/store/legacy-ssh-store.hh b/src/libstore/include/nix/store/legacy-ssh-store.hh index c91f88a8478..994918f90f0 100644 --- a/src/libstore/include/nix/store/legacy-ssh-store.hh +++ b/src/libstore/include/nix/store/legacy-ssh-store.hh @@ -208,8 +208,8 @@ public: */ std::optional isTrustedClient() override; - void - queryRealisationUncached(const DrvOutput &, Callback> callback) noexcept override + void queryRealisationUncached( + const DrvOutput &, Callback> callback) noexcept override // TODO: Implement { unsupported("queryRealisation"); diff --git a/src/libstore/include/nix/store/local-fs-store.hh b/src/libstore/include/nix/store/local-fs-store.hh index 08f8e165646..7b8dde9b0e0 100644 --- a/src/libstore/include/nix/store/local-fs-store.hh +++ b/src/libstore/include/nix/store/local-fs-store.hh @@ -66,7 +66,10 @@ public: this, rootDir.get() ? *rootDir.get() + "/nix/store" : storeDir, "real", "Physical path of the Nix store."}; }; -struct LocalFSStore : virtual Store, virtual GcStore, virtual LogStore +struct alignas(8) /* Work around ASAN failures on i686-linux. */ + LocalFSStore : virtual Store, + virtual GcStore, + virtual LogStore { using Config = LocalFSStoreConfig; @@ -78,7 +81,6 @@ struct LocalFSStore : virtual Store, virtual GcStore, virtual LogStore LocalFSStore(const Config & params); - void narFromPath(const StorePath & path, Sink & sink) override; ref getFSAccessor(bool requireValidPath = true) override; std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath = true) override; @@ -103,7 +105,12 @@ struct LocalFSStore : virtual Store, virtual GcStore, virtual LogStore return config.realStoreDir; } - Path toRealPath(const Path & storePath) override + Path toRealPath(const StorePath & storePath) + { + return toRealPath(printStorePath(storePath)); + } + + Path toRealPath(const Path & storePath) { assert(isInStore(storePath)); return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1); diff --git a/src/libstore/include/nix/store/local-overlay-store.hh b/src/libstore/include/nix/store/local-overlay-store.hh index b89d0a1a01a..1d69d341708 100644 --- a/src/libstore/include/nix/store/local-overlay-store.hh +++ b/src/libstore/include/nix/store/local-overlay-store.hh @@ -173,7 +173,7 @@ private: * Check lower store if upper DB does not have. */ void queryRealisationUncached( - const DrvOutput &, Callback> callback) noexcept override; + const DrvOutput &, Callback> callback) noexcept override; /** * Call `remountIfNecessary` after collecting garbage normally. diff --git a/src/libstore/include/nix/store/local-store.hh b/src/libstore/include/nix/store/local-store.hh index 0decabd5647..fd457c2d3be 100644 --- a/src/libstore/include/nix/store/local-store.hh +++ b/src/libstore/include/nix/store/local-store.hh @@ -55,13 +55,15 @@ private: This is also the location where [`--keep-failed`](@docroot@/command-ref/opt-common.md#opt-keep-failed) leaves its files. - If Nix runs without sandbox, or if the platform does not support sandboxing with bind mounts (e.g. macOS), then the [`builder`](@docroot@/language/derivations.md#attr-builder)'s environment will contain this directory, instead of the virtual location [`sandbox-build-dir`](#conf-sandbox-build-dir). + If Nix runs without sandbox, or if the platform does not support sandboxing with bind mounts (e.g. macOS), then the [`builder`](@docroot@/language/derivations.md#attr-builder)'s environment will contain this directory, instead of the virtual location [`sandbox-build-dir`](@docroot@/command-ref/conf-file.md#conf-sandbox-build-dir). > **Warning** > > `build-dir` must not be set to a world-writable directory. > Placing temporary build directories in a world-writable place allows other users to access or modify build data that is currently in use. > This alone is merely an impurity, but combined with another factor this has allowed malicious derivations to escape the build sandbox. + + See also the global [`build-dir`](@docroot@/command-ref/conf-file.md#conf-build-dir) setting. )"}; public: Path getBuildDir() const; @@ -389,10 +391,10 @@ public: void cacheDrvOutputMapping( State & state, const uint64_t deriver, const std::string & outputName, const StorePath & output); - std::optional queryRealisation_(State & state, const DrvOutput & id); - std::optional> queryRealisationCore_(State & state, const DrvOutput & id); + std::optional queryRealisation_(State & state, const DrvOutput & id); + std::optional> queryRealisationCore_(State & state, const DrvOutput & id); void queryRealisationUncached( - const DrvOutput &, Callback> callback) noexcept override; + const DrvOutput &, Callback> callback) noexcept override; std::optional getVersion() override; diff --git a/src/libstore/include/nix/store/meson.build b/src/libstore/include/nix/store/meson.build index 82d9ac85730..91bce9ba9b9 100644 --- a/src/libstore/include/nix/store/meson.build +++ b/src/libstore/include/nix/store/meson.build @@ -20,6 +20,7 @@ headers = [ config_pub_h ] + files( 'build/derivation-building-misc.hh', 'build/derivation-env-desugar.hh', 'build/derivation-goal.hh', + 'build/derivation-resolution-goal.hh', 'build/derivation-trampoline-goal.hh', 'build/drv-output-substitution-goal.hh', 'build/goal.hh', @@ -56,7 +57,6 @@ headers = [ config_pub_h ] + files( 'machines.hh', 'make-content-addressed.hh', 'names.hh', - 'nar-accessor.hh', 'nar-info-disk-cache.hh', 'nar-info.hh', 'outputs-spec.hh', @@ -77,7 +77,6 @@ headers = [ config_pub_h ] + files( 'restricted-store.hh', 's3-binary-cache-store.hh', 's3-url.hh', - 's3.hh', 'serve-protocol-connection.hh', 'serve-protocol-impl.hh', 'serve-protocol.hh', diff --git a/src/libstore/include/nix/store/nar-accessor.hh b/src/libstore/include/nix/store/nar-accessor.hh deleted file mode 100644 index 0e69d436e7d..00000000000 --- a/src/libstore/include/nix/store/nar-accessor.hh +++ /dev/null @@ -1,38 +0,0 @@ -#pragma once -///@file - -#include "nix/util/source-accessor.hh" - -#include - -#include - -namespace nix { - -struct Source; - -/** - * Return an object that provides access to the contents of a NAR - * file. - */ -ref makeNarAccessor(std::string && nar); - -ref makeNarAccessor(Source & source); - -/** - * Create a NAR accessor from a NAR listing (in the format produced by - * listNar()). The callback getNarBytes(offset, length) is used by the - * readFile() method of the accessor to get the contents of files - * inside the NAR. - */ -using GetNarBytes = std::function; - -ref makeLazyNarAccessor(const std::string & listing, GetNarBytes getNarBytes); - -/** - * Write a JSON representation of the contents of a NAR (except file - * contents). - */ -nlohmann::json listNar(ref accessor, const CanonPath & path, bool recurse); - -} // namespace nix diff --git a/src/libstore/include/nix/store/nar-info.hh b/src/libstore/include/nix/store/nar-info.hh index 1684837c690..ac25f75c2cd 100644 --- a/src/libstore/include/nix/store/nar-info.hh +++ b/src/libstore/include/nix/store/nar-info.hh @@ -9,22 +9,53 @@ namespace nix { struct StoreDirConfig; -struct NarInfo : ValidPathInfo +struct UnkeyedNarInfo : virtual UnkeyedValidPathInfo { std::string url; std::string compression; std::optional fileHash; uint64_t fileSize = 0; + UnkeyedNarInfo(UnkeyedValidPathInfo info) + : UnkeyedValidPathInfo(std::move(info)) + { + } + + bool operator==(const UnkeyedNarInfo &) const = default; + // TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet + // auto operator <=>(const NarInfo &) const = default; + + nlohmann::json + toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const override; + static UnkeyedNarInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json); +}; + +/** + * Key and the extra NAR fields + */ +struct NarInfo : ValidPathInfo, UnkeyedNarInfo +{ NarInfo() = delete; NarInfo(ValidPathInfo info) - : ValidPathInfo{std::move(info)} + : UnkeyedValidPathInfo{static_cast(info)} + /* Later copies from `*this` are pointless. The argument is only + there so the constructors can also call + `UnkeyedValidPathInfo`, but this won't happen since the base + class is virtual. Only this counstructor (assuming it is most + derived) will initialize that virtual base class. */ + , ValidPathInfo{info.path, static_cast(*this)} + , UnkeyedNarInfo{static_cast(*this)} + { + } + + NarInfo(const StoreDirConfig & store, StorePath path, Hash narHash) + : NarInfo{ValidPathInfo{std::move(path), UnkeyedValidPathInfo{store, narHash}}} { } - NarInfo(StorePath path, Hash narHash) - : NarInfo{ValidPathInfo{std::move(path), UnkeyedValidPathInfo(narHash)}} + NarInfo(std::string storeDir, StorePath path, Hash narHash) + : NarInfo{ValidPathInfo{std::move(path), UnkeyedValidPathInfo{std::move(storeDir), narHash}}} { } @@ -37,13 +68,10 @@ struct NarInfo : ValidPathInfo NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence); bool operator==(const NarInfo &) const = default; - // TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet - // auto operator <=>(const NarInfo &) const = default; std::string to_string(const StoreDirConfig & store) const; - - nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const override; - static NarInfo fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json); }; } // namespace nix + +JSON_IMPL(nix::UnkeyedNarInfo) diff --git a/src/libstore/include/nix/store/parsed-derivations.hh b/src/libstore/include/nix/store/parsed-derivations.hh index edef1b2d243..09859131040 100644 --- a/src/libstore/include/nix/store/parsed-derivations.hh +++ b/src/libstore/include/nix/store/parsed-derivations.hh @@ -9,6 +9,7 @@ namespace nix { class Store; +template struct DerivationOptions; struct DerivationOutput; @@ -18,7 +19,7 @@ struct StructuredAttrs { static constexpr std::string_view envVarName{"__json"}; - nlohmann::json structuredAttrs; + nlohmann::json::object_t structuredAttrs; bool operator==(const StructuredAttrs &) const = default; @@ -45,9 +46,9 @@ struct StructuredAttrs */ static void checkKeyNotInUse(const StringPairs & env); - nlohmann::json prepareStructuredAttrs( + nlohmann::json::object_t prepareStructuredAttrs( Store & store, - const DerivationOptions & drvOptions, + const DerivationOptions & drvOptions, const StorePathSet & inputPaths, const DerivationOutputs & outputs) const; @@ -62,7 +63,7 @@ struct StructuredAttrs * `prepareStructuredAttrs`, *not* the original `structuredAttrs` * field. */ - static std::string writeShell(const nlohmann::json & prepared); + static std::string writeShell(const nlohmann::json::object_t & prepared); }; } // namespace nix diff --git a/src/libstore/include/nix/store/path-info.hh b/src/libstore/include/nix/store/path-info.hh index cbc5abdb442..dbcd933f426 100644 --- a/src/libstore/include/nix/store/path-info.hh +++ b/src/libstore/include/nix/store/path-info.hh @@ -14,6 +14,22 @@ namespace nix { class Store; struct StoreDirConfig; +/** + * JSON format version for path info output. + */ +enum class PathInfoJsonFormat { + /// Legacy format with string hashes and full store paths + V1 = 1, + /// New format with structured hashes and store path base names + V2 = 2, +}; + +/** + * Convert an integer version number to PathInfoJsonFormat. + * Throws Error if the version is not supported. + */ +PathInfoJsonFormat parsePathInfoJsonFormat(uint64_t version); + struct SubstitutablePathInfo { std::optional deriver; @@ -38,6 +54,14 @@ using SubstitutablePathInfos = std::map; */ struct UnkeyedValidPathInfo { + /** + * The store directory this store object belongs to. + * + * This supports relocatable store objects where different objects + * may have different store directories. + */ + std::string storeDir; + /** * Path to derivation that produced this store object, if known. */ @@ -94,15 +118,20 @@ struct UnkeyedValidPathInfo * path then implies the contents.) * * Ideally, the content-addressability assertion would just be a Boolean, - * and the store path would be computed from the name component, ‘narHash’ - * and ‘references’. However, we support many types of content addresses. + * and the store path would be computed from the name component, 'narHash' + * and 'references'. However, we support many types of content addresses. */ std::optional ca; UnkeyedValidPathInfo(const UnkeyedValidPathInfo & other) = default; - UnkeyedValidPathInfo(Hash narHash) - : narHash(narHash) {}; + UnkeyedValidPathInfo(const StoreDirConfig & store, Hash narHash); + + UnkeyedValidPathInfo(std::string storeDir, Hash narHash) + : storeDir(std::move(storeDir)) + , narHash(std::move(narHash)) + { + } bool operator==(const UnkeyedValidPathInfo &) const noexcept; @@ -114,14 +143,20 @@ struct UnkeyedValidPathInfo virtual ~UnkeyedValidPathInfo() {} /** + * @param store If non-null, store paths are rendered as full paths. + * If null, store paths are rendered as base names. * @param includeImpureInfo If true, variable elements such as the - * registration time are included. - */ - virtual nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const; - static UnkeyedValidPathInfo fromJSON(const StoreDirConfig & store, const nlohmann::json & json); + * registration time are included. + * @param format JSON format version. Version 1 uses string hashes and + * string content addresses. Version 2 uses structured + * hashes and structured content addresses. + */ + virtual nlohmann::json + toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const; + static UnkeyedValidPathInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json); }; -struct ValidPathInfo : UnkeyedValidPathInfo +struct ValidPathInfo : virtual UnkeyedValidPathInfo { StorePath path; @@ -174,10 +209,14 @@ struct ValidPathInfo : UnkeyedValidPathInfo ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info) - , path(std::move(path)) {}; + , path(std::move(path)) + { + } + ValidPathInfo(const StorePath & path, UnkeyedValidPathInfo info) - : UnkeyedValidPathInfo(info) - , path(path) {}; + : ValidPathInfo(StorePath{path}, std::move(info)) + { + } static ValidPathInfo makeFromCA(const StoreDirConfig & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash); @@ -191,3 +230,7 @@ static_assert(std::is_move_constructible_v); using ValidPathInfos = std::map; } // namespace nix + +JSON_IMPL(nix::PathInfoJsonFormat) +JSON_IMPL(nix::UnkeyedValidPathInfo) +JSON_IMPL(nix::ValidPathInfo) diff --git a/src/libstore/include/nix/store/path-references.hh b/src/libstore/include/nix/store/path-references.hh index 66d0da2683f..6aa506da4a3 100644 --- a/src/libstore/include/nix/store/path-references.hh +++ b/src/libstore/include/nix/store/path-references.hh @@ -3,6 +3,10 @@ #include "nix/store/references.hh" #include "nix/store/path.hh" +#include "nix/util/source-accessor.hh" + +#include +#include namespace nix { @@ -21,4 +25,57 @@ public: StorePathSet getResultPaths(); }; +/** + * Result of scanning a single file for references. + */ +struct FileRefScanResult +{ + CanonPath filePath; ///< The file that was scanned + StorePathSet foundRefs; ///< Which store paths were found in this file +}; + +/** + * Scan a store path tree and report which references appear in which files. + * + * This is like scanForReferences() but provides per-file granularity. + * Useful for cycle detection and detailed dependency analysis like `nix why-depends --precise`. + * + * The function walks the tree using the provided accessor and streams each file's + * contents through a RefScanSink to detect hash references. For each file that + * contains at least one reference, a callback is invoked with the file path and + * the set of references found. + * + * Note: This function only searches for the hash part of store paths (e.g., + * "dc04vv14dak1c1r48qa0m23vr9jy8sm0"), not the name part. A store path like + * "/nix/store/dc04vv14dak1c1r48qa0m23vr9jy8sm0-foo" will be detected if the + * hash appears anywhere in the scanned content, regardless of the "-foo" suffix. + * + * @param accessor Source accessor to read the tree + * @param rootPath Root path to scan + * @param refs Set of store paths to search for + * @param callback Called for each file that contains at least one reference + */ +void scanForReferencesDeep( + SourceAccessor & accessor, + const CanonPath & rootPath, + const StorePathSet & refs, + std::function callback); + +/** + * Scan a store path tree and return which references appear in which files. + * + * This is a convenience wrapper around the callback-based scanForReferencesDeep() + * that collects all results into a map for efficient lookups. + * + * Note: This function only searches for the hash part of store paths, not the name part. + * See the callback-based overload for details. + * + * @param accessor Source accessor to read the tree + * @param rootPath Root path to scan + * @param refs Set of store paths to search for + * @return Map from file paths to the set of references found in each file + */ +std::map +scanForReferencesDeep(SourceAccessor & accessor, const CanonPath & rootPath, const StorePathSet & refs); + } // namespace nix diff --git a/src/libstore/include/nix/store/pathlocks.hh b/src/libstore/include/nix/store/pathlocks.hh index 05c7e079a53..7e27bec4cc1 100644 --- a/src/libstore/include/nix/store/pathlocks.hh +++ b/src/libstore/include/nix/store/pathlocks.hh @@ -1,6 +1,8 @@ #pragma once ///@file +#include + #include "nix/util/file-descriptor.hh" namespace nix { @@ -10,12 +12,12 @@ namespace nix { * -1 is returned if create is false and the lock could not be opened * because it doesn't exist. Any other error throws an exception. */ -AutoCloseFD openLockFile(const Path & path, bool create); +AutoCloseFD openLockFile(const std::filesystem::path & path, bool create); /** * Delete an open lock file. */ -void deleteLockFile(const Path & path, Descriptor desc); +void deleteLockFile(const std::filesystem::path & path, Descriptor desc); enum LockType { ltRead, ltWrite, ltNone }; @@ -24,14 +26,14 @@ bool lockFile(Descriptor desc, LockType lockType, bool wait); class PathLocks { private: - typedef std::pair FDPair; + typedef std::pair FDPair; std::list fds; bool deletePaths; public: PathLocks(); - PathLocks(const PathSet & paths, const std::string & waitMsg = ""); - bool lockPaths(const PathSet & _paths, const std::string & waitMsg = "", bool wait = true); + PathLocks(const std::set & paths, const std::string & waitMsg = ""); + bool lockPaths(const std::set & _paths, const std::string & waitMsg = "", bool wait = true); ~PathLocks(); void unlock(); void setDeletion(bool deletePaths); diff --git a/src/libstore/include/nix/store/profiles.hh b/src/libstore/include/nix/store/profiles.hh index 75cd1134097..1cc306744f7 100644 --- a/src/libstore/include/nix/store/profiles.hh +++ b/src/libstore/include/nix/store/profiles.hh @@ -7,12 +7,13 @@ * See the manual for additional information. */ -#include "nix/util/types.hh" -#include "nix/store/pathlocks.hh" - +#include #include #include +#include "nix/util/types.hh" +#include "nix/store/pathlocks.hh" + namespace nix { class StorePath; @@ -47,9 +48,9 @@ struct Generation * distinct contents to avoid bloat, but nothing stops two * non-adjacent generations from having the same contents. * - * @todo Use `StorePath` instead of `Path`? + * @todo Use `StorePath` instead of `std::filesystem::path`? */ - Path path; + std::filesystem::path path; /** * When the generation was created. This is extra metadata about the @@ -81,7 +82,7 @@ typedef std::list Generations; * * Note that the current/active generation need not be the latest one. */ -std::pair> findGenerations(Path profile); +std::pair> findGenerations(std::filesystem::path profile); struct LocalFSStore; @@ -96,7 +97,7 @@ struct LocalFSStore; * The behavior of reusing existing generations like this makes this * procedure idempotent. It also avoids clutter. */ -Path createGeneration(LocalFSStore & store, Path profile, StorePath outPath); +std::filesystem::path createGeneration(LocalFSStore & store, std::filesystem::path profile, StorePath outPath); /** * Unconditionally delete a generation @@ -111,7 +112,7 @@ Path createGeneration(LocalFSStore & store, Path profile, StorePath outPath); * * @todo Should we expose this at all? */ -void deleteGeneration(const Path & profile, GenerationNumber gen); +void deleteGeneration(const std::filesystem::path & profile, GenerationNumber gen); /** * Delete the given set of generations. @@ -128,7 +129,8 @@ void deleteGeneration(const Path & profile, GenerationNumber gen); * Trying to delete the currently active generation will fail, and cause * no generations to be deleted. */ -void deleteGenerations(const Path & profile, const std::set & gensToDelete, bool dryRun); +void deleteGenerations( + const std::filesystem::path & profile, const std::set & gensToDelete, bool dryRun); /** * Delete generations older than `max` passed the current generation. @@ -142,7 +144,7 @@ void deleteGenerations(const Path & profile, const std::set & * @param dryRun Log what would be deleted instead of actually doing * so. */ -void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun); +void deleteGenerationsGreaterThan(const std::filesystem::path & profile, GenerationNumber max, bool dryRun); /** * Delete all generations other than the current one @@ -153,7 +155,7 @@ void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bo * @param dryRun Log what would be deleted instead of actually doing * so. */ -void deleteOldGenerations(const Path & profile, bool dryRun); +void deleteOldGenerations(const std::filesystem::path & profile, bool dryRun); /** * Delete generations older than `t`, except for the most recent one @@ -165,7 +167,7 @@ void deleteOldGenerations(const Path & profile, bool dryRun); * @param dryRun Log what would be deleted instead of actually doing * so. */ -void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun); +void deleteGenerationsOlderThan(const std::filesystem::path & profile, time_t t, bool dryRun); /** * Parse a temp spec intended for `deleteGenerationsOlderThan()`. @@ -180,19 +182,19 @@ time_t parseOlderThanTimeSpec(std::string_view timeSpec); * * @todo Always use `switchGeneration()` instead, and delete this. */ -void switchLink(Path link, Path target); +void switchLink(std::filesystem::path link, std::filesystem::path target); /** * Roll back a profile to the specified generation, or to the most * recent one older than the current. */ -void switchGeneration(const Path & profile, std::optional dstGen, bool dryRun); +void switchGeneration(const std::filesystem::path & profile, std::optional dstGen, bool dryRun); /** * Ensure exclusive access to a profile. Any command that modifies * the profile first acquires this lock. */ -void lockProfile(PathLocks & lock, const Path & profile); +void lockProfile(PathLocks & lock, const std::filesystem::path & profile); /** * Optimistic locking is used by long-running operations like `nix-env @@ -205,34 +207,34 @@ void lockProfile(PathLocks & lock, const Path & profile); * store. Most of the time, only the user environment has to be * rebuilt. */ -std::string optimisticLockProfile(const Path & profile); +std::string optimisticLockProfile(const std::filesystem::path & profile); /** * Create and return the path to a directory suitable for storing the user’s * profiles. */ -Path profilesDir(); +std::filesystem::path profilesDir(); /** * Return the path to the profile directory for root (but don't try creating it) */ -Path rootProfilesDir(); +std::filesystem::path rootProfilesDir(); /** * Create and return the path to the file used for storing the users's channels */ -Path defaultChannelsDir(); +std::filesystem::path defaultChannelsDir(); /** * Return the path to the channel directory for root (but don't try creating it) */ -Path rootChannelsDir(); +std::filesystem::path rootChannelsDir(); /** * Resolve the default profile (~/.nix-profile by default, * $XDG_STATE_HOME/nix/profile if XDG Base Directory Support is enabled), * and create if doesn't exist */ -Path getDefaultProfile(); +std::filesystem::path getDefaultProfile(); } // namespace nix diff --git a/src/libstore/include/nix/store/realisation.hh b/src/libstore/include/nix/store/realisation.hh index 3424a39c9c8..af0e4aefd8a 100644 --- a/src/libstore/include/nix/store/realisation.hh +++ b/src/libstore/include/nix/store/realisation.hh @@ -46,12 +46,12 @@ struct DrvOutput static DrvOutput parse(const std::string &); - GENERATE_CMP(DrvOutput, me->drvHash, me->outputName); + bool operator==(const DrvOutput &) const = default; + auto operator<=>(const DrvOutput &) const = default; }; -struct Realisation +struct UnkeyedRealisation { - DrvOutput id; StorePath outPath; StringSet signatures; @@ -64,22 +64,35 @@ struct Realisation */ std::map dependentRealisations; - std::string fingerprint() const; - void sign(const Signer &); - bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const; - size_t checkSignatures(const PublicKeys & publicKeys) const; + std::string fingerprint(const DrvOutput & key) const; - static std::set closure(Store &, const std::set &); - static void closure(Store &, const std::set &, std::set & res); + void sign(const DrvOutput & key, const Signer &); + + bool checkSignature(const DrvOutput & key, const PublicKeys & publicKeys, const std::string & sig) const; - bool isCompatibleWith(const Realisation & other) const; + size_t checkSignatures(const DrvOutput & key, const PublicKeys & publicKeys) const; - StorePath getPath() const + const StorePath & getPath() const { return outPath; } - GENERATE_CMP(Realisation, me->id, me->outPath); + // TODO sketchy that it avoids signatures + GENERATE_CMP(UnkeyedRealisation, me->outPath); +}; + +struct Realisation : UnkeyedRealisation +{ + DrvOutput id; + + bool isCompatibleWith(const UnkeyedRealisation & other) const; + + static std::set closure(Store &, const std::set &); + + static void closure(Store &, const std::set &, std::set & res); + + bool operator==(const Realisation &) const = default; + auto operator<=>(const Realisation &) const = default; }; /** @@ -103,12 +116,13 @@ struct OpaquePath { StorePath path; - StorePath getPath() const + const StorePath & getPath() const & { return path; } - GENERATE_CMP(OpaquePath, me->path); + bool operator==(const OpaquePath &) const = default; + auto operator<=>(const OpaquePath &) const = default; }; /** @@ -116,7 +130,7 @@ struct OpaquePath */ struct RealisedPath { - /* + /** * A path is either the result of the realisation of a derivation or * an opaque blob that has been directly added to the store */ @@ -138,13 +152,14 @@ struct RealisedPath /** * Get the raw store path associated to this */ - StorePath path() const; + const StorePath & path() const &; void closure(Store & store, Set & ret) const; static void closure(Store & store, const Set & startPaths, Set & ret); Set closure(Store & store) const; - GENERATE_CMP(RealisedPath, me->raw); + bool operator==(const RealisedPath &) const = default; + auto operator<=>(const RealisedPath &) const = default; }; class MissingRealisation : public Error @@ -167,4 +182,6 @@ public: } // namespace nix +JSON_IMPL(nix::DrvOutput) +JSON_IMPL(nix::UnkeyedRealisation) JSON_IMPL(nix::Realisation) diff --git a/src/libstore/include/nix/store/remote-store.hh b/src/libstore/include/nix/store/remote-store.hh index 6a207d24a91..1244eeec001 100644 --- a/src/libstore/include/nix/store/remote-store.hh +++ b/src/libstore/include/nix/store/remote-store.hh @@ -106,7 +106,7 @@ struct RemoteStore : public virtual Store, void registerDrvOutput(const Realisation & info) override; void queryRealisationUncached( - const DrvOutput &, Callback> callback) noexcept override; + const DrvOutput &, Callback> callback) noexcept override; void buildPaths(const std::vector & paths, BuildMode buildMode, std::shared_ptr evalStore) override; diff --git a/src/libstore/include/nix/store/s3-binary-cache-store.hh b/src/libstore/include/nix/store/s3-binary-cache-store.hh index 2fe66b0ad93..5896293f1c4 100644 --- a/src/libstore/include/nix/store/s3-binary-cache-store.hh +++ b/src/libstore/include/nix/store/s3-binary-cache-store.hh @@ -2,41 +2,28 @@ ///@file #include "nix/store/config.hh" - -#if NIX_WITH_S3_SUPPORT - -# include "nix/store/binary-cache-store.hh" - -# include +#include "nix/store/http-binary-cache-store.hh" namespace nix { -struct S3BinaryCacheStoreConfig : std::enable_shared_from_this, virtual BinaryCacheStoreConfig +struct S3BinaryCacheStoreConfig : HttpBinaryCacheStoreConfig { - std::string bucketName; - - using BinaryCacheStoreConfig::BinaryCacheStoreConfig; + using HttpBinaryCacheStoreConfig::HttpBinaryCacheStoreConfig; S3BinaryCacheStoreConfig(std::string_view uriScheme, std::string_view bucketName, const Params & params); const Setting profile{ this, - "", + "default", "profile", R"( The name of the AWS configuration profile to use. By default Nix uses the `default` profile. )"}; -protected: - - constexpr static const char * defaultRegion = "us-east-1"; - -public: - const Setting region{ this, - defaultRegion, + "us-east-1", "region", R"( The region of the S3 bucket. If your bucket is not in @@ -46,7 +33,7 @@ public: const Setting scheme{ this, - "", + "https", "scheme", R"( The scheme used for S3 requests, `https` (default) or `http`. This @@ -64,74 +51,86 @@ public: "", "endpoint", R"( - The URL of the endpoint of an S3-compatible service such as MinIO. - Do not specify this setting if you're using Amazon S3. + The S3 endpoint to use. When empty (default), uses AWS S3 with + region-specific endpoints (e.g., s3.us-east-1.amazonaws.com). + For S3-compatible services such as MinIO, set this to your service's endpoint. > **Note** > - > This endpoint must support HTTPS and uses path-based + > Custom endpoints must support HTTPS and use path-based > addressing instead of virtual host based addressing. )"}; - const Setting narinfoCompression{ - this, "", "narinfo-compression", "Compression method for `.narinfo` files."}; + const Setting multipartUpload{ + this, + false, + "multipart-upload", + R"( + Whether to use multipart uploads for large files. When enabled, + files exceeding the multipart threshold will be uploaded in + multiple parts, which is required for files larger than 5 GiB and + can improve performance and reliability for large uploads. + )"}; - const Setting lsCompression{this, "", "ls-compression", "Compression method for `.ls` files."}; + const Setting multipartChunkSize{ + this, + 5 * 1024 * 1024, + "multipart-chunk-size", + R"( + The size (in bytes) of each part in multipart uploads. Must be + at least 5 MiB (AWS S3 requirement). Larger chunk sizes reduce the + number of requests but use more memory. Default is 5 MiB. + )", + {"buffer-size"}}; - const Setting logCompression{ + const Setting multipartThreshold{ this, - "", - "log-compression", + 100 * 1024 * 1024, + "multipart-threshold", R"( - Compression method for `log/*` files. It is recommended to - use a compression method supported by most web browsers - (e.g. `brotli`). + The minimum file size (in bytes) for using multipart uploads. + Files smaller than this threshold will use regular PUT requests. + Default is 100 MiB. Only takes effect when multipart-upload is enabled. )"}; - const Setting multipartUpload{this, false, "multipart-upload", "Whether to use multi-part uploads."}; + const Setting> storageClass{ + this, + std::nullopt, + "storage-class", + R"( + The S3 storage class to use for uploaded objects. When not set (default), + uses the bucket's default storage class. Valid values include: + - STANDARD (default, frequently accessed data) + - REDUCED_REDUNDANCY (less frequently accessed data) + - STANDARD_IA (infrequent access) + - ONEZONE_IA (infrequent access, single AZ) + - INTELLIGENT_TIERING (automatic cost optimization) + - GLACIER (archival with retrieval times in minutes to hours) + - DEEP_ARCHIVE (long-term archival with 12-hour retrieval) + - GLACIER_IR (instant retrieval archival) + + See AWS S3 documentation for detailed storage class descriptions and pricing: + https://docs.aws.amazon.com/AmazonS3/latest/userguide/storage-class-intro.html + )"}; - const Setting bufferSize{ - this, 5 * 1024 * 1024, "buffer-size", "Size (in bytes) of each part in multi-part uploads."}; + /** + * Set of settings that are part of the S3 URI itself. + * These are needed for region specification and other S3-specific settings. + */ + const std::set s3UriSettings = {&profile, ®ion, &scheme, &endpoint}; static const std::string name() { return "S3 Binary Cache Store"; } - static StringSet uriSchemes() - { - return {"s3"}; - } + static StringSet uriSchemes(); static std::string doc(); - ref openStore() const override; - - StoreReference getReference() const override; -}; + std::string getHumanReadableURI() const override; -struct S3BinaryCacheStore : virtual BinaryCacheStore -{ - using Config = S3BinaryCacheStoreConfig; - - ref config; - - S3BinaryCacheStore(ref); - - struct Stats - { - std::atomic put{0}; - std::atomic putBytes{0}; - std::atomic putTimeMs{0}; - std::atomic get{0}; - std::atomic getBytes{0}; - std::atomic getTimeMs{0}; - std::atomic head{0}; - }; - - virtual const Stats & getS3Stats() = 0; + ref openStore() const override; }; } // namespace nix - -#endif diff --git a/src/libstore/include/nix/store/s3-url.hh b/src/libstore/include/nix/store/s3-url.hh index 45c3b2d1c7b..cf59dbea86a 100644 --- a/src/libstore/include/nix/store/s3-url.hh +++ b/src/libstore/include/nix/store/s3-url.hh @@ -1,16 +1,13 @@ #pragma once ///@file #include "nix/store/config.hh" +#include "nix/util/url.hh" +#include "nix/util/util.hh" -#if NIX_WITH_S3_SUPPORT || NIX_WITH_CURL_S3 - -# include "nix/util/url.hh" -# include "nix/util/util.hh" - -# include -# include -# include -# include +#include +#include +#include +#include namespace nix { @@ -29,6 +26,7 @@ struct ParsedS3URL std::optional profile; std::optional region; std::optional scheme; + std::optional versionId; /** * The endpoint can be either missing, be an absolute URI (with a scheme like `http:`) * or an authority (so an IP address or a registered name). @@ -56,5 +54,3 @@ struct ParsedS3URL }; } // namespace nix - -#endif diff --git a/src/libstore/include/nix/store/s3.hh b/src/libstore/include/nix/store/s3.hh deleted file mode 100644 index ba3adbc2a28..00000000000 --- a/src/libstore/include/nix/store/s3.hh +++ /dev/null @@ -1,50 +0,0 @@ -#pragma once -///@file -#include "nix/store/config.hh" -#if NIX_WITH_S3_SUPPORT - -# include "nix/util/ref.hh" -# include "nix/store/s3-url.hh" - -# include - -namespace Aws { -namespace Client { -struct ClientConfiguration; -} -} // namespace Aws - -namespace Aws { -namespace S3 { -class S3Client; -} -} // namespace Aws - -namespace nix { - -struct S3Helper -{ - ref config; - ref client; - - S3Helper( - const std::string & profile, - const std::string & region, - const std::string & scheme, - const std::string & endpoint); - - ref - makeConfig(const std::string & region, const std::string & scheme, const std::string & endpoint); - - struct FileTransferResult - { - std::optional data; - unsigned int durationMs; - }; - - FileTransferResult getObject(const std::string & bucketName, const std::string & key); -}; - -} // namespace nix - -#endif diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index 09c8799e1f8..e74bee09550 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -31,6 +31,7 @@ MakeError(SubstituterDisabled, Error); MakeError(InvalidStoreReference, Error); +struct UnkeyedRealisation; struct Realisation; struct RealisedPath; struct DrvOutput; @@ -309,7 +310,7 @@ protected: // Note: this is a `ref` to avoid false sharing with immutable // bits of `Store`. - ref>> pathInfoCache; + ref>> pathInfoCache; std::shared_ptr diskCache; @@ -407,12 +408,12 @@ public: /** * Query the information about a realisation. */ - std::shared_ptr queryRealisation(const DrvOutput &); + std::shared_ptr queryRealisation(const DrvOutput &); /** * Asynchronous version of queryRealisation(). */ - void queryRealisation(const DrvOutput &, Callback> callback) noexcept; + void queryRealisation(const DrvOutput &, Callback> callback) noexcept; /** * Check whether the given valid path info is sufficiently attested, by @@ -439,8 +440,8 @@ protected: virtual void queryPathInfoUncached(const StorePath & path, Callback> callback) noexcept = 0; - virtual void - queryRealisationUncached(const DrvOutput &, Callback> callback) noexcept = 0; + virtual void queryRealisationUncached( + const DrvOutput &, Callback> callback) noexcept = 0; public: @@ -617,7 +618,7 @@ public: /** * Write a NAR dump of a store path. */ - virtual void narFromPath(const StorePath & path, Sink & sink) = 0; + virtual void narFromPath(const StorePath & path, Sink & sink); /** * For each path, if it's a derivation, build it. Building a @@ -733,10 +734,28 @@ public: * the Nix store. * * @return nullptr if the store doesn't contain an object at the - * givine path. + * given path. */ virtual std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath = true) = 0; + /** + * Get an accessor for the store object or throw an Error if it's invalid or + * doesn't exist. + * + * @throws InvalidPath if the store object doesn't exist or (if requireValidPath = true) is + * invalid. + */ + [[nodiscard]] ref requireStoreObjectAccessor(const StorePath & path, bool requireValidPath = true) + { + auto accessor = getFSAccessor(path, requireValidPath); + if (!accessor) { + throw InvalidPath( + requireValidPath ? "path '%1%' is not a valid store path" : "store path '%1%' does not exist", + printStorePath(path)); + } + return ref{accessor}; + } + /** * Repair the contents of the given path by redownloading it using * a substituter (if available). @@ -768,15 +787,20 @@ public: */ Derivation derivationFromPath(const StorePath & drvPath); + /** + * Write a derivation to the Nix store, and return its path. + */ + virtual StorePath writeDerivation(const Derivation & drv, RepairFlag repair = NoRepair); + /** * Read a derivation (which must already be valid). */ - Derivation readDerivation(const StorePath & drvPath); + virtual Derivation readDerivation(const StorePath & drvPath); /** * Read a derivation from a potentially invalid path. */ - Derivation readInvalidDerivation(const StorePath & drvPath); + virtual Derivation readInvalidDerivation(const StorePath & drvPath); /** * @param [out] out Place in here the set of all store paths in the @@ -880,16 +904,6 @@ public: */ virtual std::optional isTrustedClient() = 0; - virtual Path toRealPath(const Path & storePath) - { - return storePath; - } - - Path toRealPath(const StorePath & storePath) - { - return toRealPath(printStorePath(storePath)); - } - /** * Synchronises the options of the client with those of the daemon * (a no-op when there’s no daemon) @@ -991,6 +1005,12 @@ OutputPathMap resolveDerivedPath(Store &, const DerivedPath::Built &, Store * ev */ std::string showPaths(const PathSet & paths); +/** + * Display a set of paths in human-readable form (i.e., between quotes + * and separated by commas). + */ +std::string showPaths(const std::set paths); + std::optional decodeValidPathInfo(const Store & store, std::istream & str, std::optional hashGiven = std::nullopt); @@ -999,4 +1019,10 @@ const ContentAddress * getDerivationCA(const BasicDerivation & drv); std::map drvOutputReferences(Store & store, const Derivation & drv, const StorePath & outputPath, Store * evalStore = nullptr); +template<> +struct json_avoids_null : std::true_type +{}; + } // namespace nix + +JSON_IMPL(nix::TrustedFlag) diff --git a/src/libstore/include/nix/store/store-dir-config.hh b/src/libstore/include/nix/store/store-dir-config.hh index 07cda5c12af..34e928182ad 100644 --- a/src/libstore/include/nix/store/store-dir-config.hh +++ b/src/libstore/include/nix/store/store-dir-config.hh @@ -91,7 +91,7 @@ struct StoreDirConfig std::pair computeStorePath( std::string_view name, const SourcePath & path, - ContentAddressMethod method = FileIngestionMethod::NixArchive, + ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = {}, PathFilter & filter = defaultPathFilter) const; diff --git a/src/libstore/include/nix/store/uds-remote-store.hh b/src/libstore/include/nix/store/uds-remote-store.hh index fe6e486f412..764e8768a32 100644 --- a/src/libstore/include/nix/store/uds-remote-store.hh +++ b/src/libstore/include/nix/store/uds-remote-store.hh @@ -68,7 +68,7 @@ struct UDSRemoteStore : virtual IndirectRootStore, virtual RemoteStore void narFromPath(const StorePath & path, Sink & sink) override { - LocalFSStore::narFromPath(path, sink); + Store::narFromPath(path, sink); } /** diff --git a/src/libstore/include/nix/store/worker-protocol.hh b/src/libstore/include/nix/store/worker-protocol.hh index 5105ef20207..36d918a3dc8 100644 --- a/src/libstore/include/nix/store/worker-protocol.hh +++ b/src/libstore/include/nix/store/worker-protocol.hh @@ -37,6 +37,7 @@ struct ValidPathInfo; struct UnkeyedValidPathInfo; enum BuildMode : uint8_t; enum TrustedFlag : bool; +enum class GCAction; /** * The "worker protocol", used by unix:// and ssh-ng:// stores. @@ -268,6 +269,8 @@ DECLARE_WORKER_SERIALISER(UnkeyedValidPathInfo); template<> DECLARE_WORKER_SERIALISER(BuildMode); template<> +DECLARE_WORKER_SERIALISER(GCAction); +template<> DECLARE_WORKER_SERIALISER(std::optional); template<> DECLARE_WORKER_SERIALISER(std::optional); diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index b5e43de68b4..63730a01bd7 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -54,15 +54,12 @@ struct LocalBinaryCacheStore : virtual BinaryCacheStore bool fileExists(const std::string & path) override; void upsertFile( - const std::string & path, - std::shared_ptr> istream, - const std::string & mimeType) override + const std::string & path, RestartableSource & source, const std::string & mimeType, uint64_t sizeHint) override { auto path2 = config->binaryCacheDir + "/" + path; static std::atomic counter{0}; Path tmp = fmt("%s.tmp.%d.%d", path2, getpid(), ++counter); AutoDelete del(tmp, false); - StreamToSourceAdapter source(istream); writeFile(tmp, source); std::filesystem::rename(tmp, path2); del.cancel(); diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index 67024c4e365..b8f8c6dbdea 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -112,13 +112,6 @@ std::shared_ptr LocalFSStore::getFSAccessor(const StorePath & pa return std::make_shared(std::move(absPath)); } -void LocalFSStore::narFromPath(const StorePath & path, Sink & sink) -{ - if (!isValidPath(path)) - throw Error("path '%s' is not valid", printStorePath(path)); - dumpPath(getRealStoreDir() + std::string(printStorePath(path), storeDir.size()), sink); -} - const std::string LocalFSStore::drvsLogDir = "drvs"; std::optional LocalFSStore::getBuildLogExact(const StorePath & path) diff --git a/src/libstore/local-overlay-store.cc b/src/libstore/local-overlay-store.cc index 2b000b3dba6..c8aa1d1a2b6 100644 --- a/src/libstore/local-overlay-store.cc +++ b/src/libstore/local-overlay-store.cc @@ -77,7 +77,7 @@ void LocalOverlayStore::registerDrvOutput(const Realisation & info) // First do queryRealisation on lower layer to populate DB auto res = lowerStore->queryRealisation(info.id); if (res) - LocalStore::registerDrvOutput(*res); + LocalStore::registerDrvOutput({*res, info.id}); LocalStore::registerDrvOutput(info); } @@ -108,12 +108,12 @@ void LocalOverlayStore::queryPathInfoUncached( } void LocalOverlayStore::queryRealisationUncached( - const DrvOutput & drvOutput, Callback> callback) noexcept + const DrvOutput & drvOutput, Callback> callback) noexcept { auto callbackPtr = std::make_shared(std::move(callback)); LocalStore::queryRealisationUncached( - drvOutput, {[this, drvOutput, callbackPtr](std::future> fut) { + drvOutput, {[this, drvOutput, callbackPtr](std::future> fut) { try { auto info = fut.get(); if (info) @@ -123,7 +123,7 @@ void LocalOverlayStore::queryRealisationUncached( } // If we don't have it, check lower store lowerStore->queryRealisation( - drvOutput, {[callbackPtr](std::future> fut) { + drvOutput, {[callbackPtr](std::future> fut) { try { (*callbackPtr)(fut.get()); } catch (...) { @@ -246,7 +246,7 @@ void LocalOverlayStore::optimiseStore() if (lowerStore->isValidPath(path)) { uint64_t bytesFreed = 0; // Deduplicate store path - deleteStorePath(Store::toRealPath(path), bytesFreed); + deleteStorePath(toRealPath(path), bytesFreed); } done++; act.progress(done, paths.size()); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index c4b9fde310c..3c9ae14f03f 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -724,8 +724,7 @@ uint64_t LocalStore::addValidPath(State & state, const ValidPathInfo & info, boo } } - pathInfoCache->lock()->upsert( - std::string(info.path.to_string()), PathInfoCacheValue{.value = std::make_shared(info)}); + pathInfoCache->lock()->upsert(info.path, PathInfoCacheValue{.value = std::make_shared(info)}); return id; } @@ -760,7 +759,7 @@ std::shared_ptr LocalStore::queryPathInfoInternal(State & s throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what()); } - auto info = std::make_shared(path, narHash); + auto info = std::make_shared(path, UnkeyedValidPathInfo(*this, narHash)); info->id = id; @@ -991,19 +990,22 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) error if a cycle is detected and roll back the transaction. Cycles can only occur when a derivation has multiple outputs. */ - topoSort( - paths, - {[&](const StorePath & path) { - auto i = infos.find(path); - return i == infos.end() ? StorePathSet() : i->second.references; - }}, - {[&](const StorePath & path, const StorePath & parent) { - return BuildError( - BuildResult::Failure::OutputRejected, - "cycle detected in the references of '%s' from '%s'", - printStorePath(path), - printStorePath(parent)); - }}); + auto topoSortResult = topoSort(paths, [&](const StorePath & path) { + auto i = infos.find(path); + return i == infos.end() ? StorePathSet() : i->second.references; + }); + + std::visit( + overloaded{ + [&](const Cycle & cycle) { + throw BuildError( + BuildResult::Failure::OutputRejected, + "cycle detected in the references of '%s' from '%s'", + printStorePath(cycle.path), + printStorePath(cycle.parent)); + }, + [](auto &) { /* Success, continue */ }}, + topoSortResult); txn.commit(); }); @@ -1020,7 +1022,7 @@ void LocalStore::invalidatePath(State & state, const StorePath & path) /* Note that the foreign key constraints on the Refs table take care of deleting the references entries for `path'. */ - pathInfoCache->lock()->erase(std::string(path.to_string())); + pathInfoCache->lock()->erase(path); } const PublicKeys & LocalStore::getPublicKeys() @@ -1038,7 +1040,7 @@ bool LocalStore::pathInfoIsUntrusted(const ValidPathInfo & info) bool LocalStore::realisationIsUntrusted(const Realisation & realisation) { - return config->requireSigs && !realisation.checkSignatures(getPublicKeys()); + return config->requireSigs && !realisation.checkSignatures(realisation.id, getPublicKeys()); } void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) @@ -1065,7 +1067,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairF PathLocks outputLock; - auto realPath = Store::toRealPath(info.path); + auto realPath = toRealPath(info.path); /* Lock the output path. But don't lock if we're being called from a build hook (whose parent process already acquired a @@ -1245,10 +1247,8 @@ StorePath LocalStore::addToStoreFromDump( auto desc = ContentAddressWithReferences::fromParts( hashMethod, - methodsMatch - ? dumpHash - : hashPath(PosixSourceAccessor::createAtRoot(tempPath), hashMethod.getFileIngestionMethod(), hashAlgo) - .first, + methodsMatch ? dumpHash + : hashPath(makeFSSourceAccessor(tempPath), hashMethod.getFileIngestionMethod(), hashAlgo).first, { .others = references, // caller is not capable of creating a self-reference, because this is content-addressed without modulus @@ -1264,7 +1264,7 @@ StorePath LocalStore::addToStoreFromDump( /* The first check above is an optimisation to prevent unnecessary lock acquisition. */ - auto realPath = Store::toRealPath(dstPath); + auto realPath = toRealPath(dstPath); PathLocks outputLock({realPath}); @@ -1333,7 +1333,7 @@ std::pair LocalStore::createTempDirInStore() /* There is a slight possibility that `tmpDir' gets deleted by the GC between createTempDir() and when we acquire a lock on it. We'll repeat until 'tmpDir' exists and we've locked it. */ - tmpDirFn = createTempDir(config->realStoreDir, "tmp"); + tmpDirFn = createTempDir(std::filesystem::path{config->realStoreDir.get()}, "tmp"); tmpDirFd = openDirectory(tmpDirFn); if (!tmpDirFd) { continue; @@ -1384,12 +1384,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) checkInterrupt(); auto name = link.path().filename(); printMsg(lvlTalkative, "checking contents of %s", name); - PosixSourceAccessor accessor; - std::string hash = hashPath( - PosixSourceAccessor::createAtRoot(link.path()), - FileIngestionMethod::NixArchive, - HashAlgorithm::SHA256) - .first.to_string(HashFormat::Nix32, false); + std::string hash = + hashPath(makeFSSourceAccessor(link.path()), FileIngestionMethod::NixArchive, HashAlgorithm::SHA256) + .first.to_string(HashFormat::Nix32, false); if (hash != name.string()) { printError("link %s was modified! expected hash %s, got '%s'", link.path(), name, hash); if (repair) { @@ -1415,7 +1412,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) auto hashSink = HashSink(info->narHash.algo); - dumpPath(Store::toRealPath(i), hashSink); + dumpPath(toRealPath(i), hashSink); auto current = hashSink.finish(); if (info->narHash != nullHash && info->narHash != current.hash) { @@ -1586,7 +1583,7 @@ void LocalStore::addSignatures(const StorePath & storePath, const StringSet & si }); } -std::optional> +std::optional> LocalStore::queryRealisationCore_(LocalStore::State & state, const DrvOutput & id) { auto useQueryRealisedOutput(state.stmts->QueryRealisedOutput.use()(id.strHash())(id.outputName)); @@ -1598,14 +1595,13 @@ LocalStore::queryRealisationCore_(LocalStore::State & state, const DrvOutput & i return { {realisationDbId, - Realisation{ - .id = id, + UnkeyedRealisation{ .outPath = outputPath, .signatures = signatures, }}}; } -std::optional LocalStore::queryRealisation_(LocalStore::State & state, const DrvOutput & id) +std::optional LocalStore::queryRealisation_(LocalStore::State & state, const DrvOutput & id) { auto maybeCore = queryRealisationCore_(state, id); if (!maybeCore) @@ -1631,13 +1627,17 @@ std::optional LocalStore::queryRealisation_(LocalStore::State } void LocalStore::queryRealisationUncached( - const DrvOutput & id, Callback> callback) noexcept + const DrvOutput & id, Callback> callback) noexcept { try { - auto maybeRealisation = - retrySQLite>([&]() { return queryRealisation_(*_state->lock(), id); }); + if (!experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { + callback(nullptr); + return; + } + auto maybeRealisation = retrySQLite>( + [&]() { return queryRealisation_(*_state->lock(), id); }); if (maybeRealisation) - callback(std::make_shared(maybeRealisation.value())); + callback(std::make_shared(maybeRealisation.value())); else callback(nullptr); diff --git a/src/libstore/meson.build b/src/libstore/meson.build index c23bc28a177..0a0d2b8cac6 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -120,7 +120,18 @@ boost = dependency( # put in `deps_other`. deps_other += boost -curl = dependency('libcurl', 'curl') +curl = dependency('libcurl', 'curl', version : '>= 7.75.0') +if curl.version().version_compare('>=8.16.0') and curl.version().version_compare( + '<8.17.0', +) + # Out of precaution, avoid building with libcurl version that suffer from https://github.com/curl/curl/issues/19334. + error( + 'curl @0@ has issues with write pausing, please use libcurl < 8.16 or >= 8.17, see https://github.com/curl/curl/issues/19334'.format( + curl.version(), + ), + ) +endif + deps_private += curl # seccomp only makes sense on Linux @@ -149,54 +160,16 @@ deps_public += nlohmann_json sqlite = dependency('sqlite3', 'sqlite', version : '>=3.6.19') deps_private += sqlite -# AWS C++ SDK has bad pkg-config. See -# https://github.com/aws/aws-sdk-cpp/issues/2673 for details. -aws_s3 = dependency('aws-cpp-sdk-s3', required : false) -# The S3 store definitions in the header will be hidden based on this variables. -configdata_pub.set('NIX_WITH_S3_SUPPORT', aws_s3.found().to_int()) -if aws_s3.found() - aws_s3 = declare_dependency( - include_directories : include_directories(aws_s3.get_variable('includedir')), - link_args : [ - '-L' + aws_s3.get_variable('libdir'), - '-laws-cpp-sdk-transfer', - '-laws-cpp-sdk-s3', - '-laws-cpp-sdk-identity-management', - '-laws-cpp-sdk-cognito-identity', - '-laws-cpp-sdk-sts', - '-laws-cpp-sdk-core', - '-laws-crt-cpp', - ], - ).as_system('system') -endif -deps_other += aws_s3 - -# Curl-based S3 store support (alternative to AWS SDK) -# Check if curl supports AWS SigV4 (requires >= 7.75.0) -curl_supports_aws_sigv4 = curl.version().version_compare('>= 7.75.0') -# AWS CRT C++ for lightweight credential management -aws_crt_cpp = cxx.find_library('aws-crt-cpp', required : false) - -curl_s3_store_opt = get_option('curl-s3-store').require( - curl_supports_aws_sigv4, - error_message : 'curl-based S3 support requires curl >= 7.75.0', -).require( - aws_crt_cpp.found(), - error_message : 'curl-based S3 support requires aws-crt-cpp', -) +s3_aws_auth = get_option('s3-aws-auth') +aws_crt_cpp = cxx.find_library('aws-crt-cpp', required : s3_aws_auth) -# Make AWS SDK and curl-based S3 mutually exclusive -if aws_s3.found() and curl_s3_store_opt.enabled() - error( - 'Cannot enable both AWS SDK S3 support and curl-based S3 support. Please choose one.', - ) -endif - -if curl_s3_store_opt.enabled() +if s3_aws_auth.enabled() deps_other += aws_crt_cpp + aws_c_common = cxx.find_library('aws-c-common', required : true) + deps_other += aws_c_common endif -configdata_pub.set('NIX_WITH_CURL_S3', curl_s3_store_opt.enabled().to_int()) +configdata_pub.set('NIX_WITH_AWS_AUTH', s3_aws_auth.enabled().to_int()) subdir('nix-meson-build-support/generate-header') @@ -297,18 +270,18 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'active-builds.cc', 'async-path-writer.cc', - 'aws-creds.cc', 'binary-cache-store.cc', 'build-result.cc', + 'build/derivation-builder.cc', 'build/derivation-building-goal.cc', 'build/derivation-check.cc', 'build/derivation-env-desugar.cc', 'build/derivation-goal.cc', + 'build/derivation-resolution-goal.cc', 'build/derivation-trampoline-goal.cc', 'build/drv-output-substitution-goal.cc', 'build/entry-points.cc', @@ -346,7 +319,6 @@ sources = files( 'make-content-addressed.cc', 'misc.cc', 'names.cc', - 'nar-accessor.cc', 'nar-info-disk-cache.cc', 'nar-info.cc', 'optimise-store.cc', @@ -380,6 +352,11 @@ sources = files( 'worker-protocol.cc', ) +# AWS credentials code requires AWS CRT, so only compile when enabled +if s3_aws_auth.enabled() + sources += files('aws-creds.cc') +endif + subdir('include/nix/store') if host_machine.system() == 'linux' diff --git a/src/libstore/meson.options b/src/libstore/meson.options index edc43bd4513..c822133df46 100644 --- a/src/libstore/meson.options +++ b/src/libstore/meson.options @@ -35,8 +35,7 @@ option( ) option( - 'curl-s3-store', + 's3-aws-auth', type : 'feature', - value : 'disabled', - description : 'Enable curl-based S3 binary cache store support (requires aws-crt-cpp and curl >= 7.75.0)', + description : 'build support for AWS authentication with S3', ) diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 7efaa4f860e..f9a339a0057 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -10,6 +10,7 @@ #include "nix/util/closure.hh" #include "nix/store/filetransfer.hh" #include "nix/util/strings.hh" +#include "nix/util/json-utils.hh" #include @@ -126,13 +127,13 @@ MissingPaths Store::queryMissing(const std::vector & targets) std::function doPath; - std::function, const DerivedPathMap::ChildNode &)> enqueueDerivedPaths; - - enqueueDerivedPaths = [&](ref inputDrv, const DerivedPathMap::ChildNode & inputNode) { + auto enqueueDerivedPaths = [&](this auto self, + ref inputDrv, + const DerivedPathMap::ChildNode & inputNode) -> void { if (!inputNode.value.empty()) pool.enqueue(std::bind(doPath, DerivedPath::Built{inputDrv, inputNode.value})); for (const auto & [outputName, childNode] : inputNode.childMap) - enqueueDerivedPaths(make_ref(SingleDerivedPath::Built{inputDrv, outputName}), childNode); + self(make_ref(SingleDerivedPath::Built{inputDrv, outputName}), childNode); }; auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) { @@ -224,11 +225,12 @@ MissingPaths Store::queryMissing(const std::vector & targets) return; auto drv = make_ref(derivationFromPath(drvPath)); - DerivationOptions drvOptions; + DerivationOptions drvOptions; try { // FIXME: this is a lot of work just to get the value // of `allowSubstitutes`. - drvOptions = DerivationOptions::fromStructuredAttrs(drv->env, drv->structuredAttrs); + drvOptions = derivationOptionsFromStructuredAttrs( + *this, drv->inputDrvs, drv->env, get(drv->structuredAttrs)); } catch (Error & e) { e.addTrace({}, "while parsing derivation '%s'", printStorePath(drvPath)); throw; @@ -311,22 +313,25 @@ MissingPaths Store::queryMissing(const std::vector & targets) StorePaths Store::topoSortPaths(const StorePathSet & paths) { - return topoSort( - paths, - {[&](const StorePath & path) { - try { - return queryPathInfo(path)->references; - } catch (InvalidPath &) { - return StorePathSet(); - } - }}, - {[&](const StorePath & path, const StorePath & parent) { - return BuildError( - BuildResult::Failure::OutputRejected, - "cycle detected in the references of '%s' from '%s'", - printStorePath(path), - printStorePath(parent)); - }}); + auto result = topoSort(paths, [&](const StorePath & path) { + try { + return queryPathInfo(path)->references; + } catch (InvalidPath &) { + return StorePathSet(); + } + }); + + return std::visit( + overloaded{ + [&](const Cycle & cycle) -> StorePaths { + throw BuildError( + BuildResult::Failure::OutputRejected, + "cycle detected in the references of '%s' from '%s'", + printStorePath(cycle.path), + printStorePath(cycle.parent)); + }, + [](const auto & sorted) { return sorted; }}, + result); } std::map @@ -350,9 +355,9 @@ drvOutputReferences(Store & store, const Derivation & drv, const StorePath & out std::set inputRealisations; - std::function::ChildNode &)> accumRealisations; - - accumRealisations = [&](const StorePath & inputDrv, const DerivedPathMap::ChildNode & inputNode) { + auto accumRealisations = [&](this auto & self, + const StorePath & inputDrv, + const DerivedPathMap::ChildNode & inputNode) -> void { if (!inputNode.value.empty()) { auto outputHashes = staticOutputHashes(evalStore, evalStore.readDerivation(inputDrv)); for (const auto & outputName : inputNode.value) { @@ -360,18 +365,19 @@ drvOutputReferences(Store & store, const Derivation & drv, const StorePath & out if (!outputHash) throw Error( "output '%s' of derivation '%s' isn't realised", outputName, store.printStorePath(inputDrv)); - auto thisRealisation = store.queryRealisation(DrvOutput{*outputHash, outputName}); + DrvOutput key{*outputHash, outputName}; + auto thisRealisation = store.queryRealisation(key); if (!thisRealisation) throw Error( "output '%s' of derivation '%s' isn’t built", outputName, store.printStorePath(inputDrv)); - inputRealisations.insert(*thisRealisation); + inputRealisations.insert({*thisRealisation, std::move(key)}); } } if (!inputNode.value.empty()) { auto d = makeConstantStorePathRef(inputDrv); for (const auto & [outputName, childNode] : inputNode.childMap) { SingleDerivedPath next = SingleDerivedPath::Built{d, outputName}; - accumRealisations( + self( // TODO deep resolutions for dynamic derivations, issue #8947, would go here. resolveDerivedPath(store, next, evalStore_), childNode); @@ -478,3 +484,19 @@ OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd) } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +TrustedFlag adl_serializer::from_json(const json & json) +{ + return getBoolean(json) ? TrustedFlag::Trusted : TrustedFlag::NotTrusted; +} + +void adl_serializer::to_json(json & json, const TrustedFlag & trustedFlag) +{ + json = static_cast(trustedFlag); +} + +} // namespace nlohmann diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 11608a667b3..c32c6cd2b31 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -86,7 +86,7 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache Sync _state; - NarInfoDiskCacheImpl(Path dbPath = getCacheDir() + "/binary-cache-v7.sqlite") + NarInfoDiskCacheImpl(Path dbPath = (getCacheDir() / "binary-cache-v7.sqlite").string()) { auto state(_state.lock()); @@ -264,8 +264,8 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache return {oInvalid, 0}; auto namePart = queryNAR.getStr(1); - auto narInfo = - make_ref(StorePath(hashPart + "-" + namePart), Hash::parseAnyPrefixed(queryNAR.getStr(6))); + auto narInfo = make_ref( + cache.storeDir, StorePath(hashPart + "-" + namePart), Hash::parseAnyPrefixed(queryNAR.getStr(6))); narInfo->url = queryNAR.getStr(2); narInfo->compression = queryNAR.getStr(3); if (!queryNAR.isNull(4)) diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 1e7c48287d6..27ed5a76143 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -7,7 +7,9 @@ namespace nix { NarInfo::NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence) - : ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack + : UnkeyedValidPathInfo(store, Hash::dummy) // FIXME: hack + , ValidPathInfo(StorePath::dummy, static_cast(*this)) // FIXME: hack + , UnkeyedNarInfo(static_cast(*this)) { unsigned line = 1; @@ -130,19 +132,24 @@ std::string NarInfo::to_string(const StoreDirConfig & store) const return res; } -nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const +nlohmann::json +UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const { using nlohmann::json; - auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo, hashFormat); + auto jsonObject = UnkeyedValidPathInfo::toJSON(store, includeImpureInfo, format); if (includeImpureInfo) { if (!url.empty()) jsonObject["url"] = url; if (!compression.empty()) jsonObject["compression"] = compression; - if (fileHash) - jsonObject["downloadHash"] = fileHash->to_string(hashFormat, true); + if (fileHash) { + if (format == PathInfoJsonFormat::V1) + jsonObject["downloadHash"] = fileHash->to_string(HashFormat::SRI, true); + else + jsonObject["downloadHash"] = *fileHash; + } if (fileSize) jsonObject["downloadSize"] = fileSize; } @@ -150,28 +157,49 @@ nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureI return jsonObject; } -NarInfo NarInfo::fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json) +UnkeyedNarInfo UnkeyedNarInfo::fromJSON(const StoreDirConfig * store, const nlohmann::json & json) { - using nlohmann::detail::value_t; + UnkeyedNarInfo res{UnkeyedValidPathInfo::fromJSON(store, json)}; + + auto & obj = getObject(json); - NarInfo res{ValidPathInfo{ - path, - UnkeyedValidPathInfo::fromJSON(store, json), - }}; + PathInfoJsonFormat format = PathInfoJsonFormat::V1; + if (auto * version = optionalValueAt(obj, "version")) + format = *version; - if (json.contains("url")) - res.url = getString(valueAt(json, "url")); + if (auto * url = get(obj, "url")) + res.url = getString(*url); - if (json.contains("compression")) - res.compression = getString(valueAt(json, "compression")); + if (auto * compression = get(obj, "compression")) + res.compression = getString(*compression); - if (json.contains("downloadHash")) - res.fileHash = Hash::parseAny(getString(valueAt(json, "downloadHash")), std::nullopt); + if (auto * downloadHash = get(obj, "downloadHash")) { + if (format == PathInfoJsonFormat::V1) + res.fileHash = Hash::parseSRI(getString(*downloadHash)); + else + res.fileHash = *downloadHash; + } - if (json.contains("downloadSize")) - res.fileSize = getUnsigned(valueAt(json, "downloadSize")); + if (auto * downloadSize = get(obj, "downloadSize")) + res.fileSize = getUnsigned(*downloadSize); return res; } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +UnkeyedNarInfo adl_serializer::from_json(const json & json) +{ + return UnkeyedNarInfo::fromJSON(nullptr, json); +} + +void adl_serializer::to_json(json & json, const UnkeyedNarInfo & c) +{ + json = c.toJSON(nullptr, true, PathInfoJsonFormat::V2); +} + +} // namespace nlohmann diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 8f28781362e..d1e85830027 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -172,7 +172,7 @@ void LocalStore::optimisePath_( auto stLink = lstat(linkPath.string()); if (st.st_size != stLink.st_size || (repair && hash != ({ hashPath( - PosixSourceAccessor::createAtRoot(linkPath), + makeFSSourceAccessor(linkPath), FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256) .hash; @@ -234,7 +234,7 @@ void LocalStore::optimisePath_( its timestamp back to 0. */ MakeReadOnly makeReadOnly(mustToggle ? dirOfPath : ""); - std::filesystem::path tempLink = fmt("%1%/.tmp-link-%2%-%3%", config->realStoreDir, getpid(), rand()); + std::filesystem::path tempLink = makeTempPath(config->realStoreDir.get(), ".tmp-link"); try { std::filesystem::create_hard_link(linkPath, tempLink); @@ -255,8 +255,12 @@ void LocalStore::optimisePath_( try { std::filesystem::rename(tempLink, path); } catch (std::filesystem::filesystem_error & e) { - std::filesystem::remove(tempLink); - printError("unable to unlink %1%", tempLink); + { + std::error_code ec; + remove(tempLink, ec); /* Clean up after ourselves. */ + if (ec) + printError("unable to unlink %1%: %2%", tempLink, ec.message()); + } if (e.code() == std::errc::too_many_links) { /* Some filesystems generate too many links on the rename, rather than on the original link. (Probably it @@ -312,7 +316,7 @@ void LocalStore::optimiseStore() optimiseStore(stats); - printInfo("%s freed by hard-linking %d files", showBytes(stats.bytesFreed), stats.filesLinked); + printInfo("%s freed by hard-linking %d files", renderSize(stats.bytesFreed), stats.filesLinked); } void LocalStore::optimisePath(const Path & path, RepairFlag repair) diff --git a/src/libstore/outputs-spec.cc b/src/libstore/outputs-spec.cc index aacc964cdbb..622df5fc344 100644 --- a/src/libstore/outputs-spec.cc +++ b/src/libstore/outputs-spec.cc @@ -1,10 +1,10 @@ -#include #include +#include +#include "nix/store/path.hh" +#include "nix/store/store-dir-config.hh" #include "nix/util/util.hh" -#include "nix/util/regex-combinators.hh" #include "nix/store/outputs-spec.hh" -#include "nix/store/path-regex.hh" #include "nix/util/strings-inline.hh" namespace nix { @@ -19,31 +19,27 @@ bool OutputsSpec::contains(const std::string & outputName) const raw); } -static std::string outputSpecRegexStr = regex::either(regex::group(R"(\*)"), regex::group(regex::list(nameRegexStr))); - std::optional OutputsSpec::parseOpt(std::string_view s) { - static std::regex regex(std::string{outputSpecRegexStr}); - - std::cmatch match; - if (!std::regex_match(s.cbegin(), s.cend(), match, regex)) + try { + return parse(s); + } catch (BadStorePathName &) { return std::nullopt; - - if (match[1].matched) - return {OutputsSpec::All{}}; - - if (match[2].matched) - return OutputsSpec::Names{tokenizeString({match[2].first, match[2].second}, ",")}; - - assert(false); + } } OutputsSpec OutputsSpec::parse(std::string_view s) { - std::optional spec = parseOpt(s); - if (!spec) - throw Error("invalid outputs specifier '%s'", s); - return std::move(*spec); + using namespace std::string_view_literals; + + if (s == "*"sv) + return OutputsSpec::All{}; + + auto names = splitString(s, ","); + for (const auto & name : names) + checkName(name); + + return OutputsSpec::Names{std::move(names)}; } std::optional> ExtendedOutputsSpec::parseOpt(std::string_view s) diff --git a/src/libstore/package.nix b/src/libstore/package.nix index ba16c6ab803..44f43fdad36 100644 --- a/src/libstore/package.nix +++ b/src/libstore/package.nix @@ -9,7 +9,6 @@ nix-util, boost, curl, - aws-sdk-cpp, aws-crt-cpp, libseccomp, nlohmann_json, @@ -26,8 +25,6 @@ withAWS ? # Default is this way because there have been issues building this dependency stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin), - - withCurlS3 ? false, }: let @@ -69,8 +66,7 @@ mkMesonLibrary (finalAttrs: { sqlite ] ++ lib.optional stdenv.hostPlatform.isLinux libseccomp - ++ lib.optional withAWS aws-sdk-cpp - ++ lib.optional withCurlS3 aws-crt-cpp; + ++ lib.optional withAWS aws-crt-cpp; propagatedBuildInputs = [ nix-util @@ -80,7 +76,7 @@ mkMesonLibrary (finalAttrs: { mesonFlags = [ (lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux) (lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell) - (lib.mesonEnable "curl-s3-store" withCurlS3) + (lib.mesonEnable "s3-aws-auth" withAWS) ] ++ lib.optionals stdenv.hostPlatform.isLinux [ (lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox") diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index 9e8d44d6ee8..95434bd20ac 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -33,7 +33,8 @@ std::optional StructuredAttrs::tryExtract(StringPairs & env) std::pair StructuredAttrs::unparse() const { - return {envVarName, structuredAttrs.dump()}; + // TODO don't copy the JSON object just to dump it. + return {envVarName, static_cast(structuredAttrs).dump()}; } void StructuredAttrs::checkKeyNotInUse(const StringPairs & env) @@ -97,9 +98,9 @@ static nlohmann::json pathInfoToJSON(Store & store, const StorePathSet & storePa return jsonList; } -nlohmann::json StructuredAttrs::prepareStructuredAttrs( +nlohmann::json::object_t StructuredAttrs::prepareStructuredAttrs( Store & store, - const DerivationOptions & drvOptions, + const DerivationOptions & drvOptions, const StorePathSet & inputPaths, const DerivationOutputs & outputs) const { @@ -113,14 +114,14 @@ nlohmann::json StructuredAttrs::prepareStructuredAttrs( json["outputs"] = std::move(outputsJson); /* Handle exportReferencesGraph. */ - for (auto & [key, storePaths] : drvOptions.getParsedExportReferencesGraph(store)) { - json[key] = pathInfoToJSON(store, store.exportReferences(storePaths, storePaths)); + for (auto & [key, storePaths] : drvOptions.exportReferencesGraph) { + json[key] = pathInfoToJSON(store, store.exportReferences(storePaths, inputPaths)); } return json; } -std::string StructuredAttrs::writeShell(const nlohmann::json & json) +std::string StructuredAttrs::writeShell(const nlohmann::json::object_t & json) { auto handleSimpleType = [](const nlohmann::json & value) -> std::optional { @@ -144,7 +145,7 @@ std::string StructuredAttrs::writeShell(const nlohmann::json & json) std::string jsonSh; - for (auto & [key, value] : json.items()) { + for (auto & [key, value] : json) { if (!std::regex_match(key, shVarName)) continue; diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index 270c532bb31..ebab52cec81 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -8,10 +8,28 @@ namespace nix { +PathInfoJsonFormat parsePathInfoJsonFormat(uint64_t version) +{ + switch (version) { + case 1: + return PathInfoJsonFormat::V1; + case 2: + return PathInfoJsonFormat::V2; + default: + throw Error("unsupported path info JSON format version %d; supported versions are 1 and 2", version); + } +} + +UnkeyedValidPathInfo::UnkeyedValidPathInfo(const StoreDirConfig & store, Hash narHash) + : UnkeyedValidPathInfo{store.storeDir, narHash} +{ +} + GENERATE_CMP_EXT( , std::weak_ordering, UnkeyedValidPathInfo, + me->storeDir, me->deriver, me->narHash, me->references, @@ -129,7 +147,7 @@ ValidPathInfo ValidPathInfo::makeFromCA( { ValidPathInfo res{ store.makeFixedOutputPathFromCA(name, ca), - narHash, + UnkeyedValidPathInfo(store, narHash), }; res.ca = ContentAddress{ .method = ca.getMethod(), @@ -150,27 +168,46 @@ ValidPathInfo ValidPathInfo::makeFromCA( } nlohmann::json -UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const +UnkeyedValidPathInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo, PathInfoJsonFormat format) const { using nlohmann::json; + if (format == PathInfoJsonFormat::V1) + assert(store); + auto jsonObject = json::object(); - jsonObject["narHash"] = narHash.to_string(hashFormat, true); + jsonObject["version"] = format; + + jsonObject["storeDir"] = storeDir; + + jsonObject["narHash"] = format == PathInfoJsonFormat::V1 + ? static_cast(narHash.to_string(HashFormat::SRI, true)) + : static_cast(narHash); + jsonObject["narSize"] = narSize; { auto & jsonRefs = jsonObject["references"] = json::array(); for (auto & ref : references) - jsonRefs.emplace_back(store.printStorePath(ref)); + jsonRefs.emplace_back( + format == PathInfoJsonFormat::V1 ? static_cast(store->printStorePath(ref)) + : static_cast(ref)); } - jsonObject["ca"] = ca ? (std::optional{renderContentAddress(*ca)}) : std::nullopt; + if (format == PathInfoJsonFormat::V1) + jsonObject["ca"] = ca ? static_cast(renderContentAddress(*ca)) : static_cast(nullptr); + else + jsonObject["ca"] = ca; if (includeImpureInfo) { - jsonObject["deriver"] = deriver ? (std::optional{store.printStorePath(*deriver)}) : std::nullopt; - - jsonObject["registrationTime"] = registrationTime ? (std::optional{registrationTime}) : std::nullopt; + if (format == PathInfoJsonFormat::V1) { + jsonObject["deriver"] = + deriver ? static_cast(store->printStorePath(*deriver)) : static_cast(nullptr); + } else { + jsonObject["deriver"] = deriver; + } + jsonObject["registrationTime"] = registrationTime ? std::optional{registrationTime} : std::nullopt; jsonObject["ultimate"] = ultimate; @@ -182,46 +219,119 @@ UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInf return jsonObject; } -UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store, const nlohmann::json & _json) +UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig * store, const nlohmann::json & _json) { + auto & json = getObject(_json); + + PathInfoJsonFormat format = PathInfoJsonFormat::V1; + if (auto * version = optionalValueAt(json, "version")) + format = *version; + + if (format == PathInfoJsonFormat::V1) + assert(store); + UnkeyedValidPathInfo res{ - Hash(Hash::dummy), + [&] { + if (auto * rawStoreDir = optionalValueAt(json, "storeDir")) + return getString(*rawStoreDir); + else if (format == PathInfoJsonFormat::V1) + return store->storeDir; + else + throw Error("'storeDir' field is required in path info JSON format version 2"); + }(), + [&] { + return format == PathInfoJsonFormat::V1 ? Hash::parseSRI(getString(valueAt(json, "narHash"))) + : Hash(valueAt(json, "narHash")); + }(), }; - auto & json = getObject(_json); - res.narHash = Hash::parseAny(getString(valueAt(json, "narHash")), std::nullopt); res.narSize = getUnsigned(valueAt(json, "narSize")); try { - auto references = getStringList(valueAt(json, "references")); + auto & references = getArray(valueAt(json, "references")); for (auto & input : references) - res.references.insert(store.parseStorePath(static_cast(input))); + res.references.insert( + format == PathInfoJsonFormat::V1 ? store->parseStorePath(getString(input)) + : static_cast(input)); } catch (Error & e) { e.addTrace({}, "while reading key 'references'"); throw; } - // New format as this as nullable but mandatory field; handling - // missing is for back-compat. - if (json.contains("ca")) - if (auto * rawCa = getNullable(valueAt(json, "ca"))) - res.ca = ContentAddress::parse(getString(*rawCa)); + try { + if (format == PathInfoJsonFormat::V1) { + if (auto * rawCa = getNullable(valueAt(json, "ca"))) + res.ca = ContentAddress::parse(getString(*rawCa)); + } else { + res.ca = ptrToOwned(getNullable(valueAt(json, "ca"))); + } + } catch (Error & e) { + e.addTrace({}, "while reading key 'ca'"); + throw; + } - if (json.contains("deriver")) - if (auto * rawDeriver = getNullable(valueAt(json, "deriver"))) - res.deriver = store.parseStorePath(getString(*rawDeriver)); + if (auto * rawDeriver0 = optionalValueAt(json, "deriver")) { + if (format == PathInfoJsonFormat::V1) { + if (auto * rawDeriver = getNullable(*rawDeriver0)) + res.deriver = store->parseStorePath(getString(*rawDeriver)); + } else { + res.deriver = ptrToOwned(getNullable(*rawDeriver0)); + } + } - if (json.contains("registrationTime")) - if (auto * rawRegistrationTime = getNullable(valueAt(json, "registrationTime"))) + if (auto * rawRegistrationTime0 = optionalValueAt(json, "registrationTime")) + if (auto * rawRegistrationTime = getNullable(*rawRegistrationTime0)) res.registrationTime = getInteger(*rawRegistrationTime); - if (json.contains("ultimate")) - res.ultimate = getBoolean(valueAt(json, "ultimate")); + if (auto * rawUltimate = optionalValueAt(json, "ultimate")) + res.ultimate = getBoolean(*rawUltimate); - if (json.contains("signatures")) - res.sigs = getStringSet(valueAt(json, "signatures")); + if (auto * rawSignatures = optionalValueAt(json, "signatures")) + res.sigs = getStringSet(*rawSignatures); return res; } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +PathInfoJsonFormat adl_serializer::from_json(const json & json) +{ + return parsePathInfoJsonFormat(getUnsigned(json)); +} + +void adl_serializer::to_json(json & json, const PathInfoJsonFormat & format) +{ + json = static_cast(format); +} + +UnkeyedValidPathInfo adl_serializer::from_json(const json & json) +{ + return UnkeyedValidPathInfo::fromJSON(nullptr, json); +} + +void adl_serializer::to_json(json & json, const UnkeyedValidPathInfo & c) +{ + json = c.toJSON(nullptr, true, PathInfoJsonFormat::V2); +} + +ValidPathInfo adl_serializer::from_json(const json & json0) +{ + auto json = getObject(json0); + + return ValidPathInfo{ + valueAt(json, "path"), + adl_serializer::from_json(json0), + }; +} + +void adl_serializer::to_json(json & json, const ValidPathInfo & v) +{ + adl_serializer::to_json(json, v); + json["path"] = v.path; +} + +} // namespace nlohmann diff --git a/src/libstore/path-references.cc b/src/libstore/path-references.cc index 8b167e9026c..3d783bbe4be 100644 --- a/src/libstore/path-references.cc +++ b/src/libstore/path-references.cc @@ -1,11 +1,15 @@ #include "nix/store/path-references.hh" #include "nix/util/hash.hh" #include "nix/util/archive.hh" +#include "nix/util/source-accessor.hh" +#include "nix/util/canon-path.hh" +#include "nix/util/logging.hh" #include #include #include #include +#include namespace nix { @@ -54,4 +58,90 @@ StorePathSet scanForReferences(Sink & toTee, const Path & path, const StorePathS return refsSink.getResultPaths(); } +void scanForReferencesDeep( + SourceAccessor & accessor, + const CanonPath & rootPath, + const StorePathSet & refs, + std::function callback) +{ + // Recursive tree walker + auto walk = [&](this auto & self, const CanonPath & path) -> void { + auto stat = accessor.lstat(path); + + switch (stat.type) { + case SourceAccessor::tRegular: { + // Create a fresh sink for each file to independently detect references. + // RefScanSink accumulates found hashes globally - once a hash is found, + // it remains in the result set. If we reused the same sink across files, + // we couldn't distinguish which files contain which references, as a hash + // found in an earlier file wouldn't be reported when found in later files. + PathRefScanSink sink = PathRefScanSink::fromPaths(refs); + + // Scan this file by streaming its contents through the sink + accessor.readFile(path, sink); + + // Get the references found in this file + auto foundRefs = sink.getResultPaths(); + + // Report if we found anything in this file + if (!foundRefs.empty()) { + debug("scanForReferencesDeep: found %d references in %s", foundRefs.size(), path.abs()); + callback(FileRefScanResult{.filePath = path, .foundRefs = std::move(foundRefs)}); + } + break; + } + + case SourceAccessor::tDirectory: { + // Recursively scan directory contents + auto entries = accessor.readDirectory(path); + for (const auto & [name, entryType] : entries) { + self(path / name); + } + break; + } + + case SourceAccessor::tSymlink: { + // Create a fresh sink for the symlink target (same reason as regular files) + PathRefScanSink sink = PathRefScanSink::fromPaths(refs); + + // Scan symlink target for references + auto target = accessor.readLink(path); + sink(std::string_view(target)); + + // Get the references found in this symlink target + auto foundRefs = sink.getResultPaths(); + + if (!foundRefs.empty()) { + debug("scanForReferencesDeep: found %d references in symlink %s", foundRefs.size(), path.abs()); + callback(FileRefScanResult{.filePath = path, .foundRefs = std::move(foundRefs)}); + } + break; + } + + case SourceAccessor::tChar: + case SourceAccessor::tBlock: + case SourceAccessor::tSocket: + case SourceAccessor::tFifo: + case SourceAccessor::tUnknown: + default: + throw Error("file '%s' has an unsupported type", path.abs()); + } + }; + + // Start the recursive walk from the root + walk(rootPath); +} + +std::map +scanForReferencesDeep(SourceAccessor & accessor, const CanonPath & rootPath, const StorePathSet & refs) +{ + std::map results; + + scanForReferencesDeep(accessor, rootPath, refs, [&](FileRefScanResult result) { + results[std::move(result.filePath)] = std::move(result.foundRefs); + }); + + return results; +} + } // namespace nix diff --git a/src/libstore/pathlocks.cc b/src/libstore/pathlocks.cc index 068c65625b8..a8e828655af 100644 --- a/src/libstore/pathlocks.cc +++ b/src/libstore/pathlocks.cc @@ -13,7 +13,7 @@ PathLocks::PathLocks() { } -PathLocks::PathLocks(const PathSet & paths, const std::string & waitMsg) +PathLocks::PathLocks(const std::set & paths, const std::string & waitMsg) : deletePaths(false) { lockPaths(paths, waitMsg); diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc index 3f6fcb6ff76..22d3f8f8973 100644 --- a/src/libstore/profiles.cc +++ b/src/libstore/profiles.cc @@ -31,12 +31,12 @@ static std::optional parseName(const std::string & profileName return {}; } -std::pair> findGenerations(Path profile) +std::pair> findGenerations(std::filesystem::path profile) { Generations gens; - std::filesystem::path profileDir = dirOf(profile); - auto profileName = std::string(baseNameOf(profile)); + std::filesystem::path profileDir = profile.parent_path(); + auto profileName = profile.filename().string(); for (auto & i : DirectoryIterator{profileDir}) { checkInterrupt(); @@ -48,18 +48,20 @@ std::pair> findGenerations(Path pro gens.sort([](const Generation & a, const Generation & b) { return a.number < b.number; }); - return {gens, pathExists(profile) ? parseName(profileName, readLink(profile)) : std::nullopt}; + return {gens, pathExists(profile) ? parseName(profileName, readLink(profile).string()) : std::nullopt}; } /** * Create a generation name that can be parsed by `parseName()`. */ -static Path makeName(const Path & profile, GenerationNumber num) +static std::filesystem::path makeName(const std::filesystem::path & profile, GenerationNumber num) { - return fmt("%s-%s-link", profile, num); + /* NB std::filesystem::path when put in format strings is + quoted automatically. */ + return fmt("%s-%s-link", profile.string(), num); } -Path createGeneration(LocalFSStore & store, Path profile, StorePath outPath) +std::filesystem::path createGeneration(LocalFSStore & store, std::filesystem::path profile, StorePath outPath) { /* The new generation number should be higher than old the previous ones. */ @@ -90,21 +92,24 @@ Path createGeneration(LocalFSStore & store, Path profile, StorePath outPath) to the permanent roots (of which the GC would have a stale view). If we didn't do it this way, the GC might remove the user environment etc. we've just built. */ - Path generation = makeName(profile, num + 1); - store.addPermRoot(outPath, generation); + auto generation = makeName(profile, num + 1); + store.addPermRoot(outPath, generation.string()); return generation; } -static void removeFile(const Path & path) +static void removeFile(const std::filesystem::path & path) { - if (remove(path.c_str()) == -1) - throw SysError("cannot unlink '%1%'", path); + try { + std::filesystem::remove(path); + } catch (std::filesystem::filesystem_error & e) { + throw SysError("removing file '%1%'", path); + } } -void deleteGeneration(const Path & profile, GenerationNumber gen) +void deleteGeneration(const std::filesystem::path & profile, GenerationNumber gen) { - Path generation = makeName(profile, gen); + std::filesystem::path generation = makeName(profile, gen); removeFile(generation); } @@ -117,7 +122,7 @@ void deleteGeneration(const Path & profile, GenerationNumber gen) * * - We only actually delete if `dryRun` is false. */ -static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun) +static void deleteGeneration2(const std::filesystem::path & profile, GenerationNumber gen, bool dryRun) { if (dryRun) notice("would remove profile version %1%", gen); @@ -127,7 +132,8 @@ static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool d } } -void deleteGenerations(const Path & profile, const std::set & gensToDelete, bool dryRun) +void deleteGenerations( + const std::filesystem::path & profile, const std::set & gensToDelete, bool dryRun) { PathLocks lock; lockProfile(lock, profile); @@ -153,7 +159,7 @@ static inline void iterDropUntil(Generations & gens, auto && i, auto && cond) ; } -void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun) +void deleteGenerationsGreaterThan(const std::filesystem::path & profile, GenerationNumber max, bool dryRun) { if (max == 0) throw Error("Must keep at least one generation, otherwise the current one would be deleted"); @@ -178,7 +184,7 @@ void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bo deleteGeneration2(profile, i->number, dryRun); } -void deleteOldGenerations(const Path & profile, bool dryRun) +void deleteOldGenerations(const std::filesystem::path & profile, bool dryRun) { PathLocks lock; lockProfile(lock, profile); @@ -190,7 +196,7 @@ void deleteOldGenerations(const Path & profile, bool dryRun) deleteGeneration2(profile, i.number, dryRun); } -void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun) +void deleteGenerationsOlderThan(const std::filesystem::path & profile, time_t t, bool dryRun) { PathLocks lock; lockProfile(lock, profile); @@ -238,16 +244,16 @@ time_t parseOlderThanTimeSpec(std::string_view timeSpec) return curTime - *days * 24 * 3600; } -void switchLink(Path link, Path target) +void switchLink(std::filesystem::path link, std::filesystem::path target) { /* Hacky. */ - if (dirOf(target) == dirOf(link)) - target = baseNameOf(target); + if (target.parent_path() == link.parent_path()) + target = target.filename(); replaceSymlink(target, link); } -void switchGeneration(const Path & profile, std::optional dstGen, bool dryRun) +void switchGeneration(const std::filesystem::path & profile, std::optional dstGen, bool dryRun) { PathLocks lock; lockProfile(lock, profile); @@ -274,44 +280,47 @@ void switchGeneration(const Path & profile, std::optional dstG switchLink(profile, dst->path); } -void lockProfile(PathLocks & lock, const Path & profile) +void lockProfile(PathLocks & lock, const std::filesystem::path & profile) { lock.lockPaths({profile}, fmt("waiting for lock on profile '%1%'", profile)); lock.setDeletion(true); } -std::string optimisticLockProfile(const Path & profile) +std::string optimisticLockProfile(const std::filesystem::path & profile) { - return pathExists(profile) ? readLink(profile) : ""; + return pathExists(profile) ? readLink(profile).string() : ""; } -Path profilesDir() +std::filesystem::path profilesDir() { - auto profileRoot = isRootUser() ? rootProfilesDir() : createNixStateDir() + "/profiles"; + auto profileRoot = isRootUser() ? rootProfilesDir() : std::filesystem::path{createNixStateDir()} / "profiles"; createDirs(profileRoot); return profileRoot; } -Path rootProfilesDir() +std::filesystem::path rootProfilesDir() { - return settings.nixStateDir + "/profiles/per-user/root"; + return std::filesystem::path{settings.nixStateDir} / "profiles/per-user/root"; } -Path getDefaultProfile() +std::filesystem::path getDefaultProfile() { - Path profileLink = settings.useXDGBaseDirectories ? createNixStateDir() + "/profile" : getHome() + "/.nix-profile"; + std::filesystem::path profileLink = settings.useXDGBaseDirectories + ? std::filesystem::path{createNixStateDir()} / "profile" + : std::filesystem::path{getHome()} / ".nix-profile"; try { - auto profile = profilesDir() + "/profile"; + auto profile = profilesDir() / "profile"; if (!pathExists(profileLink)) { replaceSymlink(profile, profileLink); } // Backwards compatibility measure: Make root's profile available as // `.../default` as it's what NixOS and most of the init scripts expect - Path globalProfileLink = settings.nixStateDir + "/profiles/default"; + auto globalProfileLink = std::filesystem::path{settings.nixStateDir} / "profiles" / "default"; if (isRootUser() && !pathExists(globalProfileLink)) { replaceSymlink(profile, globalProfileLink); } - return absPath(readLink(profileLink), dirOf(profileLink)); + auto linkDir = profileLink.parent_path(); + return absPath(readLink(profileLink), &linkDir); } catch (Error &) { return profileLink; } catch (std::filesystem::filesystem_error &) { @@ -319,14 +328,14 @@ Path getDefaultProfile() } } -Path defaultChannelsDir() +std::filesystem::path defaultChannelsDir() { - return profilesDir() + "/channels"; + return profilesDir() / "channels"; } -Path rootChannelsDir() +std::filesystem::path rootChannelsDir() { - return rootProfilesDir() + "/channels"; + return rootProfilesDir() / "channels"; } } // namespace nix diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index febd67bd2d5..4aeb05874fb 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -39,7 +39,7 @@ void Realisation::closure(Store & store, const std::set & startOutp std::set res; for (auto & [currentDep, _] : current.dependentRealisations) { if (auto currentRealisation = store.queryRealisation(currentDep)) - res.insert(*currentRealisation); + res.insert({*currentRealisation, currentDep}); else throw Error("Unrealised derivation '%s'", currentDep.to_string()); } @@ -61,24 +61,25 @@ void Realisation::closure(Store & store, const std::set & startOutp }); } -std::string Realisation::fingerprint() const +std::string UnkeyedRealisation::fingerprint(const DrvOutput & key) const { - nlohmann::json serialized = *this; + nlohmann::json serialized = Realisation{*this, key}; serialized.erase("signatures"); return serialized.dump(); } -void Realisation::sign(const Signer & signer) +void UnkeyedRealisation::sign(const DrvOutput & key, const Signer & signer) { - signatures.insert(signer.signDetached(fingerprint())); + signatures.insert(signer.signDetached(fingerprint(key))); } -bool Realisation::checkSignature(const PublicKeys & publicKeys, const std::string & sig) const +bool UnkeyedRealisation::checkSignature( + const DrvOutput & key, const PublicKeys & publicKeys, const std::string & sig) const { - return verifyDetached(fingerprint(), sig, publicKeys); + return verifyDetached(fingerprint(key), sig, publicKeys); } -size_t Realisation::checkSignatures(const PublicKeys & publicKeys) const +size_t UnkeyedRealisation::checkSignatures(const DrvOutput & key, const PublicKeys & publicKeys) const { // FIXME: Maybe we should return `maxSigs` if the realisation corresponds to // an input-addressed one − because in that case the drv is enough to check @@ -86,19 +87,18 @@ size_t Realisation::checkSignatures(const PublicKeys & publicKeys) const size_t good = 0; for (auto & sig : signatures) - if (checkSignature(publicKeys, sig)) + if (checkSignature(key, publicKeys, sig)) good++; return good; } -StorePath RealisedPath::path() const +const StorePath & RealisedPath::path() const & { - return std::visit([](auto && arg) { return arg.getPath(); }, raw); + return std::visit([](auto & arg) -> auto & { return arg.getPath(); }, raw); } -bool Realisation::isCompatibleWith(const Realisation & other) const +bool Realisation::isCompatibleWith(const UnkeyedRealisation & other) const { - assert(id == other.id); if (outPath == other.outPath) { if (dependentRealisations.empty() != other.dependentRealisations.empty()) { warn( @@ -144,7 +144,17 @@ namespace nlohmann { using namespace nix; -Realisation adl_serializer::from_json(const json & json0) +DrvOutput adl_serializer::from_json(const json & json) +{ + return DrvOutput::parse(getString(json)); +} + +void adl_serializer::to_json(json & json, const DrvOutput & drvOutput) +{ + json = drvOutput.to_string(); +} + +UnkeyedRealisation adl_serializer::from_json(const json & json0) { auto json = getObject(json0); @@ -157,25 +167,39 @@ Realisation adl_serializer::from_json(const json & json0) for (auto & [jsonDepId, jsonDepOutPath] : getObject(*jsonDependencies)) dependentRealisations.insert({DrvOutput::parse(jsonDepId), jsonDepOutPath}); - return Realisation{ - .id = DrvOutput::parse(valueAt(json, "id")), + return UnkeyedRealisation{ .outPath = valueAt(json, "outPath"), .signatures = signatures, .dependentRealisations = dependentRealisations, }; } -void adl_serializer::to_json(json & json, const Realisation & r) +void adl_serializer::to_json(json & json, const UnkeyedRealisation & r) { auto jsonDependentRealisations = nlohmann::json::object(); for (auto & [depId, depOutPath] : r.dependentRealisations) jsonDependentRealisations.emplace(depId.to_string(), depOutPath); json = { - {"id", r.id.to_string()}, {"outPath", r.outPath}, {"signatures", r.signatures}, {"dependentRealisations", jsonDependentRealisations}, }; } +Realisation adl_serializer::from_json(const json & json0) +{ + auto json = getObject(json0); + + return Realisation{ + static_cast(json0), + valueAt(json, "id"), + }; +} + +void adl_serializer::to_json(json & json, const Realisation & r) +{ + json = static_cast(r); + json["id"] = r.id; +} + } // namespace nlohmann diff --git a/src/libstore/remote-fs-accessor.cc b/src/libstore/remote-fs-accessor.cc index e6715cbdfb0..51bab995354 100644 --- a/src/libstore/remote-fs-accessor.cc +++ b/src/libstore/remote-fs-accessor.cc @@ -1,6 +1,6 @@ #include #include "nix/store/remote-fs-accessor.hh" -#include "nix/store/nar-accessor.hh" +#include "nix/util/nar-accessor.hh" #include #include @@ -39,7 +39,7 @@ ref RemoteFSAccessor::addToCache(std::string_view hashPart, std: if (cacheDir != "") { try { - nlohmann::json j = listNar(narAccessor, CanonPath::root, true); + nlohmann::json j = listNarDeep(*narAccessor, CanonPath::root); writeFile(makeCacheFile(hashPart, "ls"), j.dump()); } catch (...) { ignoreExceptionExceptInterrupt(); @@ -70,26 +70,8 @@ std::shared_ptr RemoteFSAccessor::accessObject(const StorePath & try { listing = nix::readFile(makeCacheFile(storePath.hashPart(), "ls")); - - auto narAccessor = makeLazyNarAccessor(listing, [cacheFile](uint64_t offset, uint64_t length) { - AutoCloseFD fd = toDescriptor(open( - cacheFile.c_str(), - O_RDONLY -#ifndef _WIN32 - | O_CLOEXEC -#endif - )); - if (!fd) - throw SysError("opening NAR cache file '%s'", cacheFile); - - if (lseek(fromDescriptorReadOnly(fd.get()), offset, SEEK_SET) != (off_t) offset) - throw SysError("seeking in '%s'", cacheFile); - - std::string buf(length, 0); - readFull(fd.get(), buf.data(), length); - - return buf; - }); + auto listingJson = nlohmann::json::parse(listing); + auto narAccessor = makeLazyNarAccessor(listingJson, seekableGetNarBytes(cacheFile)); nars.emplace(storePath.hashPart(), narAccessor); return narAccessor; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 045d518b979..91ff48a76c1 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -159,7 +159,8 @@ void RemoteStore::setOptions() bool RemoteStore::isValidPathUncached(const StorePath & path) { auto conn(getConnection()); - conn->to << WorkerProto::Op::IsValidPath << printStorePath(path); + conn->to << WorkerProto::Op::IsValidPath; + WorkerProto::write(*this, *conn, path); conn.processStderr(); return readInt(conn->from); } @@ -205,10 +206,8 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S conn.processStderr(); size_t count = readNum(conn->from); for (size_t n = 0; n < count; n++) { - SubstitutablePathInfo & info(infos[parseStorePath(readString(conn->from))]); - auto deriver = readString(conn->from); - if (deriver != "") - info.deriver = parseStorePath(deriver); + SubstitutablePathInfo & info(infos[WorkerProto::Serialise::read(*this, *conn)]); + info.deriver = WorkerProto::Serialise>::read(*this, *conn); info.references = WorkerProto::Serialise::read(*this, *conn); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); @@ -235,7 +234,8 @@ void RemoteStore::queryPathInfoUncached( void RemoteStore::queryReferrers(const StorePath & path, StorePathSet & referrers) { auto conn(getConnection()); - conn->to << WorkerProto::Op::QueryReferrers << printStorePath(path); + conn->to << WorkerProto::Op::QueryReferrers; + WorkerProto::write(*this, *conn, path); conn.processStderr(); for (auto & i : WorkerProto::Serialise::read(*this, *conn)) referrers.insert(i); @@ -244,7 +244,8 @@ void RemoteStore::queryReferrers(const StorePath & path, StorePathSet & referrer StorePathSet RemoteStore::queryValidDerivers(const StorePath & path) { auto conn(getConnection()); - conn->to << WorkerProto::Op::QueryValidDerivers << printStorePath(path); + conn->to << WorkerProto::Op::QueryValidDerivers; + WorkerProto::write(*this, *conn, path); conn.processStderr(); return WorkerProto::Serialise::read(*this, *conn); } @@ -255,7 +256,8 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path) return Store::queryDerivationOutputs(path); } auto conn(getConnection()); - conn->to << WorkerProto::Op::QueryDerivationOutputs << printStorePath(path); + conn->to << WorkerProto::Op::QueryDerivationOutputs; + WorkerProto::write(*this, *conn, path); conn.processStderr(); return WorkerProto::Serialise::read(*this, *conn); } @@ -266,7 +268,8 @@ RemoteStore::queryPartialDerivationOutputMap(const StorePath & path, Store * eva if (GET_PROTOCOL_MINOR(getProtocol()) >= 0x16) { if (!evalStore_) { auto conn(getConnection()); - conn->to << WorkerProto::Op::QueryDerivationOutputMap << printStorePath(path); + conn->to << WorkerProto::Op::QueryDerivationOutputMap; + WorkerProto::write(*this, *conn, path); conn.processStderr(); return WorkerProto::Serialise>>::read(*this, *conn); } else { @@ -299,10 +302,7 @@ std::optional RemoteStore::queryPathFromHashPart(const std::string & auto conn(getConnection()); conn->to << WorkerProto::Op::QueryPathFromHashPart << hashPart; conn.processStderr(); - Path path = readString(conn->from); - if (path.empty()) - return {}; - return parseStorePath(path); + return WorkerProto::Serialise>::read(*this, *conn); } ref RemoteStore::addCAToStore( @@ -384,7 +384,7 @@ ref RemoteStore::addCAToStore( break; } } - auto path = parseStorePath(readString(conn->from)); + auto path = WorkerProto::Serialise::read(*this, *conn); // Release our connection to prevent a deadlock in queryPathInfo(). conn_.reset(); return queryPathInfo(path); @@ -426,9 +426,10 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, Repair { auto conn(getConnection()); - conn->to << WorkerProto::Op::AddToStoreNar << printStorePath(info.path) - << (info.deriver ? printStorePath(*info.deriver) : "") - << info.narHash.to_string(HashFormat::Base16, false); + conn->to << WorkerProto::Op::AddToStoreNar; + WorkerProto::write(*this, *conn, info.path); + WorkerProto::write(*this, *conn, info.deriver); + conn->to << info.narHash.to_string(HashFormat::Base16, false); WorkerProto::write(*this, *conn, info.references); conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << renderContentAddress(info.ca) << repair << !checkSigs; @@ -492,7 +493,7 @@ void RemoteStore::registerDrvOutput(const Realisation & info) auto conn(getConnection()); conn->to << WorkerProto::Op::RegisterDrvOutput; if (GET_PROTOCOL_MINOR(conn->protoVersion) < 31) { - conn->to << info.id.to_string(); + WorkerProto::write(*this, *conn, info.id); conn->to << std::string(info.outPath.to_string()); } else { WorkerProto::write(*this, *conn, info); @@ -501,7 +502,7 @@ void RemoteStore::registerDrvOutput(const Realisation & info) } void RemoteStore::queryRealisationUncached( - const DrvOutput & id, Callback> callback) noexcept + const DrvOutput & id, Callback> callback) noexcept { try { auto conn(getConnection()); @@ -512,24 +513,24 @@ void RemoteStore::queryRealisationUncached( } conn->to << WorkerProto::Op::QueryRealisation; - conn->to << id.to_string(); + WorkerProto::write(*this, *conn, id); conn.processStderr(); - auto real = [&]() -> std::shared_ptr { + auto real = [&]() -> std::shared_ptr { if (GET_PROTOCOL_MINOR(conn->protoVersion) < 31) { auto outPaths = WorkerProto::Serialise>::read(*this, *conn); if (outPaths.empty()) return nullptr; - return std::make_shared(Realisation{.id = id, .outPath = *outPaths.begin()}); + return std::make_shared(UnkeyedRealisation{.outPath = *outPaths.begin()}); } else { auto realisations = WorkerProto::Serialise>::read(*this, *conn); if (realisations.empty()) return nullptr; - return std::make_shared(*realisations.begin()); + return std::make_shared(*realisations.begin()); } }(); - callback(std::shared_ptr(real)); + callback(std::shared_ptr(real)); } catch (...) { return callback.rethrow(); } @@ -626,13 +627,15 @@ std::vector RemoteStore::buildPathsWithResults( auto realisation = queryRealisation(outputId); if (!realisation) throw MissingRealisation(outputId); - success.builtOutputs.emplace(output, *realisation); + success.builtOutputs.emplace(output, Realisation{*realisation, outputId}); } else { success.builtOutputs.emplace( output, Realisation{ - .id = outputId, - .outPath = outputPath, + UnkeyedRealisation{ + .outPath = outputPath, + }, + outputId, }); } } @@ -661,7 +664,8 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD void RemoteStore::ensurePath(const StorePath & path) { auto conn(getConnection()); - conn->to << WorkerProto::Op::EnsurePath << printStorePath(path); + conn->to << WorkerProto::Op::EnsurePath; + WorkerProto::write(*this, *conn, path); conn.processStderr(); readInt(conn->from); } @@ -681,8 +685,7 @@ Roots RemoteStore::findRoots(bool censor) Roots result; while (count--) { Path link = readString(conn->from); - auto target = parseStorePath(readString(conn->from)); - result[std::move(target)].emplace(link); + result[WorkerProto::Serialise::read(*this, *conn)].emplace(link); } return result; } @@ -691,7 +694,8 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) { auto conn(getConnection()); - conn->to << WorkerProto::Op::CollectGarbage << options.action; + conn->to << WorkerProto::Op::CollectGarbage; + WorkerProto::write(*this, *conn, options.action); WorkerProto::write(*this, *conn, options.pathsToDelete); conn->to << options.ignoreLiveness << options.maxFreed @@ -726,7 +730,9 @@ bool RemoteStore::verifyStore(bool checkContents, RepairFlag repair) void RemoteStore::addSignatures(const StorePath & storePath, const StringSet & sigs) { auto conn(getConnection()); - conn->to << WorkerProto::Op::AddSignatures << printStorePath(storePath) << sigs; + conn->to << WorkerProto::Op::AddSignatures; + WorkerProto::write(*this, *conn, storePath); + conn->to << sigs; conn.processStderr(); readInt(conn->from); } diff --git a/src/libstore/restricted-store.cc b/src/libstore/restricted-store.cc index a1cb4160638..ef8aaa3801d 100644 --- a/src/libstore/restricted-store.cc +++ b/src/libstore/restricted-store.cc @@ -107,7 +107,7 @@ struct RestrictedStore : public virtual IndirectRootStore, public virtual GcStor void registerDrvOutput(const Realisation & info) override; void queryRealisationUncached( - const DrvOutput & id, Callback> callback) noexcept override; + const DrvOutput & id, Callback> callback) noexcept override; void buildPaths(const std::vector & paths, BuildMode buildMode, std::shared_ptr evalStore) override; @@ -226,7 +226,7 @@ void RestrictedStore::narFromPath(const StorePath & path, Sink & sink) { if (!goal.isAllowed(path)) throw InvalidPath("cannot dump unknown path '%s' in recursive Nix", printStorePath(path)); - LocalFSStore::narFromPath(path, sink); + Store::narFromPath(path, sink); } void RestrictedStore::ensurePath(const StorePath & path) @@ -244,7 +244,7 @@ void RestrictedStore::registerDrvOutput(const Realisation & info) } void RestrictedStore::queryRealisationUncached( - const DrvOutput & id, Callback> callback) noexcept + const DrvOutput & id, Callback> callback) noexcept // XXX: This should probably be allowed if the realisation corresponds to // an allowed derivation { diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index b70f04be782..fea5e467f7b 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -1,592 +1,471 @@ #include "nix/store/s3-binary-cache-store.hh" - -#if NIX_WITH_S3_SUPPORT - -# include - -# include "nix/store/s3.hh" -# include "nix/store/nar-info.hh" -# include "nix/store/nar-info-disk-cache.hh" -# include "nix/store/globals.hh" -# include "nix/util/compression.hh" -# include "nix/store/filetransfer.hh" -# include "nix/util/signals.hh" -# include "nix/store/store-registration.hh" - -# include -# include -# include -# include -# include -# include -# include -# include -# include -# include -# include -# include -# include -# include -# include -# include - -using namespace Aws::Transfer; +#include "nix/store/http-binary-cache-store.hh" +#include "nix/store/store-registration.hh" +#include "nix/util/error.hh" +#include "nix/util/logging.hh" +#include "nix/util/serialise.hh" +#include "nix/util/util.hh" + +#include +#include +#include +#include +#include namespace nix { -struct S3Error : public Error -{ - Aws::S3::S3Errors err; - Aws::String exceptionName; - - template - S3Error(Aws::S3::S3Errors err, Aws::String exceptionName, const Args &... args) - : Error(args...) - , err(err) - , exceptionName(exceptionName){}; -}; +MakeError(UploadToS3, Error); -/* Helper: given an Outcome, return R in case of success, or - throw an exception in case of an error. */ -template -R && checkAws(std::string_view s, Aws::Utils::Outcome && outcome) -{ - if (!outcome.IsSuccess()) - throw S3Error( - outcome.GetError().GetErrorType(), - outcome.GetError().GetExceptionName(), - fmt("%s: %s (request id: %s)", s, outcome.GetError().GetMessage(), outcome.GetError().GetRequestId())); - return outcome.GetResultWithOwnership(); -} +static constexpr uint64_t AWS_MIN_PART_SIZE = 5 * 1024 * 1024; // 5MiB +static constexpr uint64_t AWS_MAX_PART_SIZE = 5ULL * 1024 * 1024 * 1024; // 5GiB +static constexpr uint64_t AWS_MAX_PART_COUNT = 10000; -class AwsLogger : public Aws::Utils::Logging::FormattedLogSystem +class S3BinaryCacheStore : public virtual HttpBinaryCacheStore { - using Aws::Utils::Logging::FormattedLogSystem::FormattedLogSystem; - - void ProcessFormattedStatement(Aws::String && statement) override +public: + S3BinaryCacheStore(ref config) + : Store{*config} + , BinaryCacheStore{*config} + , HttpBinaryCacheStore{config} + , s3Config{config} { - debug("AWS: %s", chomp(statement)); } -# if !(AWS_SDK_VERSION_MAJOR <= 1 && AWS_SDK_VERSION_MINOR <= 7 && AWS_SDK_VERSION_PATCH <= 115) - void Flush() override {} -# endif -}; - -/* Retrieve the credentials from the list of AWS default providers, with the addition of the STS creds provider. This - last can be used to acquire further permissions with a specific IAM role. - Roughly based on https://github.com/aws/aws-sdk-cpp/issues/150#issuecomment-538548438 -*/ -struct CustomAwsCredentialsProviderChain : public Aws::Auth::AWSCredentialsProviderChain -{ - CustomAwsCredentialsProviderChain(const std::string & profile) - { - if (profile.empty()) { - // Use all the default AWS providers, plus the possibility to acquire a IAM role directly via a profile. - Aws::Auth::DefaultAWSCredentialsProviderChain default_aws_chain; - for (auto provider : default_aws_chain.GetProviders()) - AddProvider(provider); - AddProvider(std::make_shared()); - } else { - // Override the profile name to retrieve from the AWS config and credentials. I believe this option - // comes from the ?profile querystring in nix.conf. - AddProvider(std::make_shared(profile.c_str())); - AddProvider(std::make_shared(profile)); - } - } -}; + void upsertFile( + const std::string & path, RestartableSource & source, const std::string & mimeType, uint64_t sizeHint) override; -static void initAWS() -{ - static std::once_flag flag; - std::call_once(flag, []() { - Aws::SDKOptions options; - - /* We install our own OpenSSL locking function (see - shared.cc), so don't let aws-sdk-cpp override it. */ - options.cryptoOptions.initAndCleanupOpenSSL = false; - - if (verbosity >= lvlDebug) { - options.loggingOptions.logLevel = - verbosity == lvlDebug ? Aws::Utils::Logging::LogLevel::Debug : Aws::Utils::Logging::LogLevel::Trace; - options.loggingOptions.logger_create_fn = [options]() { - return std::make_shared(options.loggingOptions.logLevel); - }; - } +private: + ref s3Config; - Aws::InitAPI(options); - }); -} + /** + * Uploads a file to S3 using a regular (non-multipart) upload. + * + * This method is suitable for files up to 5GiB in size. For larger files, + * multipart upload should be used instead. + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html + */ + void upload( + std::string_view path, + RestartableSource & source, + uint64_t sizeHint, + std::string_view mimeType, + std::optional headers); -S3Helper::S3Helper( - const std::string & profile, const std::string & region, const std::string & scheme, const std::string & endpoint) - : config(makeConfig(region, scheme, endpoint)) - , client( - make_ref( - std::make_shared(profile), - *config, -# if AWS_SDK_VERSION_MAJOR == 1 && AWS_SDK_VERSION_MINOR < 3 - false, -# else - Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, -# endif - endpoint.empty())) -{ -} + /** + * Uploads a file to S3 using multipart upload. + * + * This method is suitable for large files that exceed the multipart threshold. + * It orchestrates the complete multipart upload process: creating the upload, + * splitting the data into parts, uploading each part, and completing the upload. + * If any error occurs, the multipart upload is automatically aborted. + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/userguide/mpuoverview.html + */ + void uploadMultipart( + std::string_view path, + RestartableSource & source, + uint64_t sizeHint, + std::string_view mimeType, + std::optional headers); -/* Log AWS retries. */ -class RetryStrategy : public Aws::Client::DefaultRetryStrategy -{ - bool ShouldRetry(const Aws::Client::AWSError & error, long attemptedRetries) const override + /** + * A Sink that manages a complete S3 multipart upload lifecycle. + * Creates the upload on construction, buffers and uploads chunks as data arrives, + * and completes or aborts the upload appropriately. + */ + struct MultipartSink : Sink { - checkInterrupt(); - auto retry = Aws::Client::DefaultRetryStrategy::ShouldRetry(error, attemptedRetries); - if (retry) - printError( - "AWS error '%s' (%s; request id: %s), will retry in %d ms", - error.GetExceptionName(), - error.GetMessage(), - error.GetRequestId(), - CalculateDelayBeforeNextRetry(error, attemptedRetries)); - return retry; - } -}; - -ref -S3Helper::makeConfig(const std::string & region, const std::string & scheme, const std::string & endpoint) -{ - initAWS(); - auto res = make_ref(); - res->allowSystemProxy = true; - res->region = region; - if (!scheme.empty()) { - res->scheme = Aws::Http::SchemeMapper::FromString(scheme.c_str()); - } - if (!endpoint.empty()) { - res->endpointOverride = endpoint; - } - res->requestTimeoutMs = 600 * 1000; - res->connectTimeoutMs = 5 * 1000; - res->retryStrategy = std::make_shared(); - res->caFile = settings.caFile; - return res; -} - -S3Helper::FileTransferResult S3Helper::getObject(const std::string & bucketName, const std::string & key) -{ - std::string uri = "s3://" + bucketName + "/" + key; - Activity act( - *logger, lvlTalkative, actFileTransfer, fmt("downloading '%s'", uri), Logger::Fields{uri}, getCurActivity()); - - auto request = Aws::S3::Model::GetObjectRequest().WithBucket(bucketName).WithKey(key); + S3BinaryCacheStore & store; + std::string_view path; + std::string uploadId; + std::string::size_type chunkSize; + + std::vector partEtags; + std::string buffer; + + MultipartSink( + S3BinaryCacheStore & store, + std::string_view path, + uint64_t sizeHint, + std::string_view mimeType, + std::optional headers); + + void operator()(std::string_view data) override; + void finish(); + void uploadChunk(std::string chunk); + }; - request.SetResponseStreamFactory([&]() { return Aws::New("STRINGSTREAM"); }); + /** + * Creates a multipart upload for large objects to S3. + * + * @see + * https://docs.aws.amazon.com/AmazonS3/latest/API/API_CreateMultipartUpload.html#API_CreateMultipartUpload_RequestSyntax + */ + std::string createMultipartUpload(std::string_view key, std::string_view mimeType, std::optional headers); - size_t bytesDone = 0; - size_t bytesExpected = 0; - request.SetDataReceivedEventHandler( - [&](const Aws::Http::HttpRequest * req, Aws::Http::HttpResponse * resp, long long l) { - if (!bytesExpected && resp->HasHeader("Content-Length")) { - if (auto length = string2Int(resp->GetHeader("Content-Length"))) { - bytesExpected = *length; - } - } - bytesDone += l; - act.progress(bytesDone, bytesExpected); - }); + /** + * Uploads a single part of a multipart upload + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/API/API_UploadPart.html#API_UploadPart_RequestSyntax + * + * @returns the [ETag](https://en.wikipedia.org/wiki/HTTP_ETag) + */ + std::string uploadPart(std::string_view key, std::string_view uploadId, uint64_t partNumber, std::string data); - request.SetContinueRequestHandler([](const Aws::Http::HttpRequest *) { return !isInterrupted(); }); + /** + * Completes a multipart upload by combining all uploaded parts. + * @see + * https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html#API_CompleteMultipartUpload_RequestSyntax + */ + void + completeMultipartUpload(std::string_view key, std::string_view uploadId, std::span partEtags); - FileTransferResult res; + /** + * Abort a multipart upload + * + * @see + * https://docs.aws.amazon.com/AmazonS3/latest/API/API_AbortMultipartUpload.html#API_AbortMultipartUpload_RequestSyntax + */ + void abortMultipartUpload(std::string_view key, std::string_view uploadId) noexcept; +}; - auto now1 = std::chrono::steady_clock::now(); +void S3BinaryCacheStore::upsertFile( + const std::string & path, RestartableSource & source, const std::string & mimeType, uint64_t sizeHint) +{ + auto doUpload = [&](RestartableSource & src, uint64_t size, std::optional headers) { + Headers uploadHeaders = headers.value_or(Headers()); + if (auto storageClass = s3Config->storageClass.get()) { + uploadHeaders.emplace_back("x-amz-storage-class", *storageClass); + } + if (s3Config->multipartUpload && size > s3Config->multipartThreshold) { + uploadMultipart(path, src, size, mimeType, std::move(uploadHeaders)); + } else { + upload(path, src, size, mimeType, std::move(uploadHeaders)); + } + }; try { - - auto result = checkAws(fmt("AWS error fetching '%s'", key), client->GetObject(request)); - - act.progress(result.GetContentLength(), result.GetContentLength()); - - res.data = decompress(result.GetContentEncoding(), dynamic_cast(result.GetBody()).str()); - - } catch (S3Error & e) { - if ((e.err != Aws::S3::S3Errors::NO_SUCH_KEY) && (e.err != Aws::S3::S3Errors::ACCESS_DENIED) && - // Expired tokens are not really an error, more of a caching problem. Should be treated same as 403. - // - // AWS unwilling to provide a specific error type for the situation - // (https://github.com/aws/aws-sdk-cpp/issues/1843) so use this hack - (e.exceptionName != "ExpiredToken")) - throw; + if (auto compressionMethod = getCompressionMethod(path)) { + CompressedSource compressed(source, *compressionMethod); + Headers headers = {{"Content-Encoding", *compressionMethod}}; + doUpload(compressed, compressed.size(), std::move(headers)); + } else { + doUpload(source, sizeHint, std::nullopt); + } + } catch (FileTransferError & e) { + UploadToS3 err(e.message()); + err.addTrace({}, "while uploading to S3 binary cache at '%s'", config->cacheUri.to_string()); + throw err; } - - auto now2 = std::chrono::steady_clock::now(); - - res.durationMs = std::chrono::duration_cast(now2 - now1).count(); - - return res; -} - -S3BinaryCacheStoreConfig::S3BinaryCacheStoreConfig( - std::string_view uriScheme, std::string_view bucketName, const Params & params) - : StoreConfig(params) - , BinaryCacheStoreConfig(params) - , bucketName(bucketName) -{ - // Don't want to use use AWS SDK in header, so we check the default - // here. TODO do this better after we overhaul the store settings - // system. - assert(std::string{defaultRegion} == std::string{Aws::Region::US_EAST_1}); - - if (bucketName.empty()) - throw UsageError("`%s` store requires a bucket name in its Store URI", uriScheme); -} - -S3BinaryCacheStore::S3BinaryCacheStore(ref config) - : BinaryCacheStore(*config) - , config{config} -{ } -std::string S3BinaryCacheStoreConfig::doc() +void S3BinaryCacheStore::upload( + std::string_view path, + RestartableSource & source, + uint64_t sizeHint, + std::string_view mimeType, + std::optional headers) { - return -# include "s3-binary-cache-store.md" - ; + debug("using S3 regular upload for '%s' (%d bytes)", path, sizeHint); + if (sizeHint > AWS_MAX_PART_SIZE) + throw Error( + "file too large for S3 upload without multipart: %s would exceed maximum size of %s. Consider enabling multipart-upload.", + renderSize(sizeHint), + renderSize(AWS_MAX_PART_SIZE)); + + HttpBinaryCacheStore::upload(path, source, sizeHint, mimeType, std::move(headers)); } -StoreReference S3BinaryCacheStoreConfig::getReference() const +void S3BinaryCacheStore::uploadMultipart( + std::string_view path, + RestartableSource & source, + uint64_t sizeHint, + std::string_view mimeType, + std::optional headers) { - return { - .variant = - StoreReference::Specified{ - .scheme = *uriSchemes().begin(), - .authority = bucketName, - }, - .params = getQueryParams(), - }; + debug("using S3 multipart upload for '%s' (%d bytes)", path, sizeHint); + MultipartSink sink(*this, path, sizeHint, mimeType, std::move(headers)); + source.drainInto(sink); + sink.finish(); } -struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore +S3BinaryCacheStore::MultipartSink::MultipartSink( + S3BinaryCacheStore & store, + std::string_view path, + uint64_t sizeHint, + std::string_view mimeType, + std::optional headers) + : store(store) + , path(path) { - Stats stats; + // Calculate chunk size and estimated parts + chunkSize = store.s3Config->multipartChunkSize; + uint64_t estimatedParts = (sizeHint + chunkSize - 1) / chunkSize; // ceil division + + if (estimatedParts > AWS_MAX_PART_COUNT) { + // Equivalent to ceil(sizeHint / AWS_MAX_PART_COUNT) + uint64_t minChunkSize = (sizeHint + AWS_MAX_PART_COUNT - 1) / AWS_MAX_PART_COUNT; + + if (minChunkSize > AWS_MAX_PART_SIZE) { + throw Error( + "file too large for S3 multipart upload: %s would require chunk size of %s " + "(max %s) to stay within %d part limit", + renderSize(sizeHint), + renderSize(minChunkSize), + renderSize(AWS_MAX_PART_SIZE), + AWS_MAX_PART_COUNT); + } - S3Helper s3Helper; + warn( + "adjusting S3 multipart chunk size from %s to %s " + "to stay within %d part limit for %s file", + renderSize(store.s3Config->multipartChunkSize.get()), + renderSize(minChunkSize), + AWS_MAX_PART_COUNT, + renderSize(sizeHint)); - S3BinaryCacheStoreImpl(ref config) - : Store{*config} - , BinaryCacheStore{*config} - , S3BinaryCacheStore{config} - , s3Helper(config->profile, config->region, config->scheme, config->endpoint) - { - diskCache = getNarInfoDiskCache(); + chunkSize = minChunkSize; + estimatedParts = AWS_MAX_PART_COUNT; } - void init() override - { - /* FIXME: The URI (when used as a cache key) must have several parameters rendered (e.g. the endpoint). - This must be represented as a separate opaque string (probably a URI) that has the right query parameters. */ - auto cacheUri = config->getReference().render(/*withParams=*/false); - if (auto cacheInfo = diskCache->upToDateCacheExists(cacheUri)) { - config->wantMassQuery.setDefault(cacheInfo->wantMassQuery); - config->priority.setDefault(cacheInfo->priority); - } else { - BinaryCacheStore::init(); - diskCache->createCache(cacheUri, config->storeDir, config->wantMassQuery, config->priority); + buffer.reserve(chunkSize); + partEtags.reserve(estimatedParts); + uploadId = store.createMultipartUpload(path, mimeType, std::move(headers)); +} + +void S3BinaryCacheStore::MultipartSink::operator()(std::string_view data) +{ + buffer.append(data); + + while (buffer.size() >= chunkSize) { + // Move entire buffer, extract excess, copy back remainder + auto chunk = std::move(buffer); + auto excessSize = chunk.size() > chunkSize ? chunk.size() - chunkSize : 0; + if (excessSize > 0) { + buffer.resize(excessSize); + std::memcpy(buffer.data(), chunk.data() + chunkSize, excessSize); } + chunk.resize(std::min(chunkSize, chunk.size())); + uploadChunk(std::move(chunk)); } +} - const Stats & getS3Stats() override - { - return stats; +void S3BinaryCacheStore::MultipartSink::finish() +{ + if (!buffer.empty()) { + uploadChunk(std::move(buffer)); } - /* This is a specialisation of isValidPath() that optimistically - fetches the .narinfo file, rather than first checking for its - existence via a HEAD request. Since .narinfos are small, doing - a GET is unlikely to be slower than HEAD. */ - bool isValidPathUncached(const StorePath & storePath) override - { - try { - queryPathInfo(storePath); - return true; - } catch (InvalidPath & e) { - return false; + try { + if (partEtags.empty()) { + throw Error("no data read from stream"); } + store.completeMultipartUpload(path, uploadId, partEtags); + } catch (Error & e) { + store.abortMultipartUpload(path, uploadId); + e.addTrace({}, "while finishing an S3 multipart upload"); + throw; } +} - bool fileExists(const std::string & path) override - { - stats.head++; - - auto res = s3Helper.client->HeadObject( - Aws::S3::Model::HeadObjectRequest().WithBucket(config->bucketName).WithKey(path)); - - if (!res.IsSuccess()) { - auto & error = res.GetError(); - if (error.GetErrorType() == Aws::S3::S3Errors::RESOURCE_NOT_FOUND - || error.GetErrorType() == Aws::S3::S3Errors::NO_SUCH_KEY - // Expired tokens are not really an error, more of a caching problem. Should be treated same as 403. - // AWS unwilling to provide a specific error type for the situation - // (https://github.com/aws/aws-sdk-cpp/issues/1843) so use this hack - || (error.GetErrorType() == Aws::S3::S3Errors::UNKNOWN && error.GetExceptionName() == "ExpiredToken") - // If bucket listing is disabled, 404s turn into 403s - || error.GetErrorType() == Aws::S3::S3Errors::ACCESS_DENIED) - return false; - throw Error("AWS error fetching '%s': %s", path, error.GetMessage()); - } - - return true; +void S3BinaryCacheStore::MultipartSink::uploadChunk(std::string chunk) +{ + auto partNumber = partEtags.size() + 1; + try { + std::string etag = store.uploadPart(path, uploadId, partNumber, std::move(chunk)); + partEtags.push_back(std::move(etag)); + } catch (Error & e) { + store.abortMultipartUpload(path, uploadId); + e.addTrace({}, "while uploading part %d of an S3 multipart upload", partNumber); + throw; } +} - std::shared_ptr transferManager; - std::once_flag transferManagerCreated; - - struct AsyncContext : public Aws::Client::AsyncCallerContext - { - mutable std::mutex mutex; - mutable std::condition_variable cv; - const Activity & act; - - void notify() const - { - cv.notify_one(); - } +std::string S3BinaryCacheStore::createMultipartUpload( + std::string_view key, std::string_view mimeType, std::optional headers) +{ + auto req = makeRequest(key); - void wait() const - { - std::unique_lock lk(mutex); - cv.wait(lk); - } + // setupForS3() converts s3:// to https:// but strips query parameters + // So we call it first, then add our multipart parameters + req.setupForS3(); - AsyncContext(const Activity & act) - : act(act) - { - } - }; + auto url = req.uri.parsed(); + url.query["uploads"] = ""; + req.uri = VerbatimURL(url); - void uploadFile( - const std::string & path, - std::shared_ptr> istream, - const std::string & mimeType, - const std::string & contentEncoding) - { - std::string uri = "s3://" + config->bucketName + "/" + path; - Activity act( - *logger, lvlTalkative, actFileTransfer, fmt("uploading '%s'", uri), Logger::Fields{uri}, getCurActivity()); - istream->seekg(0, istream->end); - auto size = istream->tellg(); - istream->seekg(0, istream->beg); - - auto maxThreads = std::thread::hardware_concurrency(); - - static std::shared_ptr executor = - std::make_shared(maxThreads); - - std::call_once(transferManagerCreated, [&]() { - if (config->multipartUpload) { - TransferManagerConfiguration transferConfig(executor.get()); - - transferConfig.s3Client = s3Helper.client; - transferConfig.bufferSize = config->bufferSize; - - transferConfig.uploadProgressCallback = - [](const TransferManager * transferManager, - const std::shared_ptr & transferHandle) { - auto context = std::dynamic_pointer_cast(transferHandle->GetContext()); - size_t bytesDone = transferHandle->GetBytesTransferred(); - size_t bytesTotal = transferHandle->GetBytesTotalSize(); - try { - checkInterrupt(); - context->act.progress(bytesDone, bytesTotal); - } catch (...) { - context->notify(); - } - }; - transferConfig.transferStatusUpdatedCallback = - [](const TransferManager * transferManager, - const std::shared_ptr & transferHandle) { - auto context = std::dynamic_pointer_cast(transferHandle->GetContext()); - context->notify(); - }; - - transferManager = TransferManager::Create(transferConfig); - } - }); - - auto now1 = std::chrono::steady_clock::now(); - - auto & bucketName = config->bucketName; - - if (transferManager) { - - if (contentEncoding != "") - throw Error("setting a content encoding is not supported with S3 multi-part uploads"); - - auto context = std::make_shared(act); - std::shared_ptr transferHandle = transferManager->UploadFile( - istream, - bucketName, - path, - mimeType, - Aws::Map(), - context /*, contentEncoding */); - - TransferStatus status = transferHandle->GetStatus(); - while (status == TransferStatus::IN_PROGRESS || status == TransferStatus::NOT_STARTED) { - if (!isInterrupted()) { - context->wait(); - } else { - transferHandle->Cancel(); - transferHandle->WaitUntilFinished(); - } - status = transferHandle->GetStatus(); - } - act.progress(transferHandle->GetBytesTransferred(), transferHandle->GetBytesTotalSize()); - - if (status == TransferStatus::FAILED) - throw Error( - "AWS error: failed to upload 's3://%s/%s': %s", - bucketName, - path, - transferHandle->GetLastError().GetMessage()); - - if (status != TransferStatus::COMPLETED) - throw Error("AWS error: transfer status of 's3://%s/%s' in unexpected state", bucketName, path); + req.method = HttpMethod::Post; + StringSource payload{std::string_view("")}; + req.data = {payload}; + req.mimeType = mimeType; - } else { - act.progress(0, size); + if (headers) { + req.headers.reserve(req.headers.size() + headers->size()); + std::move(headers->begin(), headers->end(), std::back_inserter(req.headers)); + } - auto request = Aws::S3::Model::PutObjectRequest().WithBucket(bucketName).WithKey(path); + auto result = getFileTransfer()->enqueueFileTransfer(req).get(); - size_t bytesSent = 0; - request.SetDataSentEventHandler([&](const Aws::Http::HttpRequest * req, long long l) { - bytesSent += l; - act.progress(bytesSent, size); - }); + std::regex uploadIdRegex("([^<]+)"); + std::smatch match; - request.SetContinueRequestHandler([](const Aws::Http::HttpRequest *) { return !isInterrupted(); }); + if (std::regex_search(result.data, match, uploadIdRegex)) { + return match[1]; + } - request.SetContentType(mimeType); + throw Error("S3 CreateMultipartUpload response missing "); +} - if (contentEncoding != "") - request.SetContentEncoding(contentEncoding); +std::string +S3BinaryCacheStore::uploadPart(std::string_view key, std::string_view uploadId, uint64_t partNumber, std::string data) +{ + if (partNumber > AWS_MAX_PART_COUNT) { + throw Error("S3 multipart upload exceeded %d part limit", AWS_MAX_PART_COUNT); + } - request.SetBody(istream); + auto req = makeRequest(key); + req.method = HttpMethod::Put; + req.setupForS3(); - auto result = checkAws(fmt("AWS error uploading '%s'", path), s3Helper.client->PutObject(request)); + auto url = req.uri.parsed(); + url.query["partNumber"] = std::to_string(partNumber); + url.query["uploadId"] = uploadId; + req.uri = VerbatimURL(url); + StringSource payload{data}; + req.data = {payload}; + req.mimeType = "application/octet-stream"; - act.progress(size, size); - } + auto result = getFileTransfer()->enqueueFileTransfer(req).get(); - auto now2 = std::chrono::steady_clock::now(); + if (result.etag.empty()) { + throw Error("S3 UploadPart response missing ETag for part %d", partNumber); + } - auto duration = std::chrono::duration_cast(now2 - now1).count(); + debug("Part %d uploaded, ETag: %s", partNumber, result.etag); + return std::move(result.etag); +} - printInfo("uploaded 's3://%s/%s' (%d bytes) in %d ms", bucketName, path, size, duration); +void S3BinaryCacheStore::abortMultipartUpload(std::string_view key, std::string_view uploadId) noexcept +{ + try { + auto req = makeRequest(key); + req.setupForS3(); - stats.putTimeMs += duration; - stats.putBytes += std::max(size, (decltype(size)) 0); - stats.put++; - } + auto url = req.uri.parsed(); + url.query["uploadId"] = uploadId; + req.uri = VerbatimURL(url); + req.method = HttpMethod::Delete; - void upsertFile( - const std::string & path, - std::shared_ptr> istream, - const std::string & mimeType) override - { - auto compress = [&](std::string compression) { - auto compressed = nix::compress(compression, StreamToSourceAdapter(istream).drain()); - return std::make_shared(std::move(compressed)); - }; - - if (config->narinfoCompression != "" && hasSuffix(path, ".narinfo")) - uploadFile(path, compress(config->narinfoCompression), mimeType, config->narinfoCompression); - else if (config->lsCompression != "" && hasSuffix(path, ".ls")) - uploadFile(path, compress(config->lsCompression), mimeType, config->lsCompression); - else if (config->logCompression != "" && hasPrefix(path, "log/")) - uploadFile(path, compress(config->logCompression), mimeType, config->logCompression); - else - uploadFile(path, istream, mimeType, ""); + getFileTransfer()->enqueueFileTransfer(req).get(); + } catch (...) { + ignoreExceptionInDestructor(); } +} - void getFile(const std::string & path, Sink & sink) override - { - stats.get++; - - // FIXME: stream output to sink. - auto res = s3Helper.getObject(config->bucketName, path); - - stats.getBytes += res.data ? res.data->size() : 0; - stats.getTimeMs += res.durationMs; - - if (res.data) { - printTalkative( - "downloaded 's3://%s/%s' (%d bytes) in %d ms", - config->bucketName, - path, - res.data->size(), - res.durationMs); - - sink(*res.data); - } else - throw NoSuchBinaryCacheFile( - "file '%s' does not exist in binary cache '%s'", path, config->getHumanReadableURI()); +void S3BinaryCacheStore::completeMultipartUpload( + std::string_view key, std::string_view uploadId, std::span partEtags) +{ + auto req = makeRequest(key); + req.setupForS3(); + + auto url = req.uri.parsed(); + url.query["uploadId"] = uploadId; + req.uri = VerbatimURL(url); + req.method = HttpMethod::Post; + + std::string xml = ""; + for (const auto & [idx, etag] : enumerate(partEtags)) { + xml += ""; + // S3 part numbers are 1-indexed, but vector indices are 0-indexed + xml += "" + std::to_string(idx + 1) + ""; + xml += "" + etag + ""; + xml += ""; } + xml += ""; - StorePathSet queryAllValidPaths() override - { - StorePathSet paths; - std::string marker; + debug("S3 CompleteMultipartUpload XML (%d parts): %s", partEtags.size(), xml); - auto & bucketName = config->bucketName; + StringSource payload{xml}; + req.data = {payload}; + req.mimeType = "text/xml"; - do { - debug("listing bucket 's3://%s' from key '%s'...", bucketName, marker); + getFileTransfer()->enqueueFileTransfer(req).get(); - auto res = checkAws( - fmt("AWS error listing bucket '%s'", bucketName), - s3Helper.client->ListObjects( - Aws::S3::Model::ListObjectsRequest().WithBucket(bucketName).WithDelimiter("/").WithMarker(marker))); - - auto & contents = res.GetContents(); + debug("S3 multipart upload completed: %d parts uploaded for '%s'", partEtags.size(), key); +} - debug("got %d keys, next marker '%s'", contents.size(), res.GetNextMarker()); +StringSet S3BinaryCacheStoreConfig::uriSchemes() +{ + return {"s3"}; +} - for (const auto & object : contents) { - auto & key = object.GetKey(); - if (key.size() != 40 || !hasSuffix(key, ".narinfo")) - continue; - paths.insert(parseStorePath(storeDir + "/" + key.substr(0, key.size() - 8) + "-" + MissingName)); - } +S3BinaryCacheStoreConfig::S3BinaryCacheStoreConfig( + std::string_view scheme, std::string_view _cacheUri, const Params & params) + : StoreConfig(params) + , HttpBinaryCacheStoreConfig(scheme, _cacheUri, params) +{ + assert(cacheUri.query.empty()); + assert(cacheUri.scheme == "s3"); + + for (const auto & [key, value] : params) { + auto s3Params = + std::views::transform(s3UriSettings, [](const AbstractSetting * setting) { return setting->name; }); + if (std::ranges::contains(s3Params, key)) { + cacheUri.query[key] = value; + } + } - marker = res.GetNextMarker(); - } while (!marker.empty()); + if (multipartChunkSize < AWS_MIN_PART_SIZE) { + throw UsageError( + "multipart-chunk-size must be at least %s, got %s", + renderSize(AWS_MIN_PART_SIZE), + renderSize(multipartChunkSize.get())); + } - return paths; + if (multipartChunkSize > AWS_MAX_PART_SIZE) { + throw UsageError( + "multipart-chunk-size must be at most %s, got %s", + renderSize(AWS_MAX_PART_SIZE), + renderSize(multipartChunkSize.get())); } - /** - * For now, we conservatively say we don't know. - * - * \todo try to expose our S3 authentication status. - */ - std::optional isTrustedClient() override - { - return std::nullopt; + if (multipartUpload && multipartThreshold < multipartChunkSize) { + warn( + "multipart-threshold (%s) is less than multipart-chunk-size (%s), " + "which may result in single-part multipart uploads", + renderSize(multipartThreshold.get()), + renderSize(multipartChunkSize.get())); } -}; +} -ref S3BinaryCacheStoreImpl::Config::openStore() const +std::string S3BinaryCacheStoreConfig::getHumanReadableURI() const { - auto store = - make_ref(ref{// FIXME we shouldn't actually need a mutable config - std::const_pointer_cast(shared_from_this())}); - store->init(); - return store; + auto reference = getReference(); + reference.params = [&]() { + Params relevantParams; + for (auto & setting : s3UriSettings) + if (setting->overridden) + relevantParams.insert({setting->name, reference.params.at(setting->name)}); + return relevantParams; + }(); + return reference.render(); } -static RegisterStoreImplementation regS3BinaryCacheStore; +std::string S3BinaryCacheStoreConfig::doc() +{ + return +#include "s3-binary-cache-store.md" + ; +} -} // namespace nix +ref S3BinaryCacheStoreConfig::openStore() const +{ + auto sharedThis = std::const_pointer_cast( + std::static_pointer_cast(shared_from_this())); + return make_ref(ref{sharedThis}); +} -#endif +static RegisterStoreImplementation registerS3BinaryCacheStore; + +} // namespace nix diff --git a/src/libstore/s3-binary-cache-store.md b/src/libstore/s3-binary-cache-store.md index daa41defd82..0b0c2691929 100644 --- a/src/libstore/s3-binary-cache-store.md +++ b/src/libstore/s3-binary-cache-store.md @@ -27,7 +27,8 @@ like the following to be accessible: "Sid": "AllowDirectReads", "Action": [ "s3:GetObject", - "s3:GetBucketLocation" + "s3:GetBucketLocation", + "s3:ListBucket" ], "Effect": "Allow", "Resource": [ @@ -51,7 +52,7 @@ Consult the documentation linked above for further details. ### Authenticated reads to your S3 binary cache -Your bucket will need a bucket policy allowing the desired users to perform the `s3:GetObject` and `s3:GetBucketLocation` action on all objects in the bucket. +Your bucket will need a bucket policy allowing the desired users to perform the `s3:GetObject`, `s3:GetBucketLocation`, and `s3:ListBucket` actions on all objects in the bucket. The [anonymous policy given above](#anonymous-reads-to-your-s3-compatible-binary-cache) can be updated to have a restricted `Principal` to support this. ### Authenticated writes to your S3-compatible binary cache diff --git a/src/libstore/s3-url.cc b/src/libstore/s3-url.cc index baefe5cba5e..503c0cd9105 100644 --- a/src/libstore/s3-url.cc +++ b/src/libstore/s3-url.cc @@ -1,13 +1,10 @@ #include "nix/store/s3-url.hh" +#include "nix/util/error.hh" +#include "nix/util/split.hh" +#include "nix/util/strings-inline.hh" -#if NIX_WITH_S3_SUPPORT || NIX_WITH_CURL_S3 - -# include "nix/util/error.hh" -# include "nix/util/split.hh" -# include "nix/util/strings-inline.hh" - -# include -# include +#include +#include using namespace std::string_view_literals; @@ -51,6 +48,7 @@ try { .profile = getOptionalParam("profile"), .region = getOptionalParam("region"), .scheme = getOptionalParam("scheme"), + .versionId = getOptionalParam("versionId"), .endpoint = [&]() -> decltype(ParsedS3URL::endpoint) { if (!endpoint) return std::monostate(); @@ -76,6 +74,12 @@ ParsedURL ParsedS3URL::toHttpsUrl() const auto regionStr = region.transform(toView).value_or("us-east-1"); auto schemeStr = scheme.transform(toView).value_or("https"); + // Build query parameters (e.g., versionId if present) + StringMap queryParams; + if (versionId) { + queryParams["versionId"] = *versionId; + } + // Handle endpoint configuration using std::visit return std::visit( overloaded{ @@ -88,6 +92,7 @@ ParsedURL ParsedS3URL::toHttpsUrl() const .scheme = std::string{schemeStr}, .authority = ParsedURL::Authority{.host = "s3." + regionStr + ".amazonaws.com"}, .path = std::move(path), + .query = std::move(queryParams), }; }, [&](const ParsedURL::Authority & auth) { @@ -99,6 +104,7 @@ ParsedURL ParsedS3URL::toHttpsUrl() const .scheme = std::string{schemeStr}, .authority = auth, .path = std::move(path), + .query = std::move(queryParams), }; }, [&](const ParsedURL & endpointUrl) { @@ -110,6 +116,7 @@ ParsedURL ParsedS3URL::toHttpsUrl() const .scheme = endpointUrl.scheme, .authority = endpointUrl.authority, .path = std::move(path), + .query = std::move(queryParams), }; }, }, @@ -117,5 +124,3 @@ ParsedURL ParsedS3URL::toHttpsUrl() const } } // namespace nix - -#endif diff --git a/src/libstore/serve-protocol.cc b/src/libstore/serve-protocol.cc index 51b575fcd5a..e6c9eabd22c 100644 --- a/src/libstore/serve-protocol.cc +++ b/src/libstore/serve-protocol.cc @@ -78,7 +78,7 @@ UnkeyedValidPathInfo ServeProto::Serialise::read(const Sto { /* Hash should be set below unless very old `nix-store --serve`. Caller should assert that it did set it. */ - UnkeyedValidPathInfo info{Hash::dummy}; + UnkeyedValidPathInfo info{store, Hash::dummy}; auto deriver = readString(conn.from); if (deriver != "") diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index a7e28017fad..dc70b4ba8de 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -37,7 +37,8 @@ StoreReference SSHStoreConfig::getReference() const }; } -struct SSHStore : virtual RemoteStore +struct alignas(8) /* Work around ASAN failures on i686-linux. */ + SSHStore : virtual RemoteStore { using Config = SSHStoreConfig; @@ -143,7 +144,7 @@ struct MountedSSHStore : virtual SSHStore, virtual LocalFSStore void narFromPath(const StorePath & path, Sink & sink) override { - return LocalFSStore::narFromPath(path, sink); + return Store::narFromPath(path, sink); } ref getFSAccessor(bool requireValidPath) override diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 6d011d324d2..58a408ca155 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -304,6 +304,13 @@ ValidPathInfo Store::addToStoreSlow( return info; } +void Store::narFromPath(const StorePath & path, Sink & sink) +{ + auto accessor = requireStoreObjectAccessor(path); + SourcePath sourcePath{accessor}; + dumpPath(sourcePath, sink, FileSerialisationMethod::NixArchive); +} + StringSet Store::Config::getDefaultSystemFeatures() { auto res = settings.systemFeatures.get(); @@ -340,7 +347,7 @@ bool Store::PathInfoCacheValue::isKnownNow() void Store::invalidatePathInfoCacheFor(const StorePath & path) { - pathInfoCache->lock()->erase(path.to_string()); + pathInfoCache->lock()->erase(path); } std::map> Store::queryStaticPartialDerivationOutputMap(const StorePath & path) @@ -468,7 +475,7 @@ void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, Substituta bool Store::isValidPath(const StorePath & storePath) { - auto res = pathInfoCache->lock()->get(storePath.to_string()); + auto res = pathInfoCache->lock()->get(storePath); if (res && res->isKnownNow()) { stats.narInfoReadAverted++; return res->didExist(); @@ -480,7 +487,7 @@ bool Store::isValidPath(const StorePath & storePath) if (res.first != NarInfoDiskCache::oUnknown) { stats.narInfoReadAverted++; pathInfoCache->lock()->upsert( - storePath.to_string(), + storePath, res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{.value = res.second}); return res.first == NarInfoDiskCache::oValid; @@ -551,7 +558,7 @@ std::optional> Store::queryPathInfoFromClie { auto hashPart = std::string(storePath.hashPart()); - auto res = pathInfoCache->lock()->get(storePath.to_string()); + auto res = pathInfoCache->lock()->get(storePath); if (res && res->isKnownNow()) { stats.narInfoReadAverted++; if (res->didExist()) @@ -565,7 +572,7 @@ std::optional> Store::queryPathInfoFromClie if (res.first != NarInfoDiskCache::oUnknown) { stats.narInfoReadAverted++; pathInfoCache->lock()->upsert( - storePath.to_string(), + storePath, res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{.value = res.second}); if (res.first == NarInfoDiskCache::oInvalid || !goodStorePath(storePath, res.second->path)) @@ -605,7 +612,7 @@ void Store::queryPathInfo(const StorePath & storePath, CallbackupsertNarInfo(config.getReference().render(/*FIXME withParams=*/false), hashPart, info); - pathInfoCache->lock()->upsert(storePath.to_string(), PathInfoCacheValue{.value = info}); + pathInfoCache->lock()->upsert(storePath, PathInfoCacheValue{.value = info}); if (!info || !goodStorePath(storePath, info->path)) { stats.narInfoMissing++; @@ -619,7 +626,8 @@ void Store::queryPathInfo(const StorePath & storePath, Callback> callback) noexcept +void Store::queryRealisation( + const DrvOutput & id, Callback> callback) noexcept { try { @@ -645,20 +653,20 @@ void Store::queryRealisation(const DrvOutput & id, Callback(std::move(callback)); - queryRealisationUncached(id, {[this, id, callbackPtr](std::future> fut) { + queryRealisationUncached(id, {[this, id, callbackPtr](std::future> fut) { try { auto info = fut.get(); if (diskCache) { if (info) diskCache->upsertRealisation( - config.getReference().render(/*FIXME withParams=*/false), *info); + config.getReference().render(/*FIXME withParams=*/false), {*info, id}); else diskCache->upsertAbsentRealisation( config.getReference().render(/*FIXME withParams=*/false), id); } - (*callbackPtr)(std::shared_ptr(info)); + (*callbackPtr)(std::shared_ptr(info)); } catch (...) { callbackPtr->rethrow(); @@ -666,9 +674,9 @@ void Store::queryRealisation(const DrvOutput & id, Callback Store::queryRealisation(const DrvOutput & id) +std::shared_ptr Store::queryRealisation(const DrvOutput & id) { - using RealPtr = std::shared_ptr; + using RealPtr = std::shared_ptr; std::promise promise; queryRealisation(id, {[&](std::future result) { @@ -931,11 +939,12 @@ std::map copyPaths( std::set toplevelRealisations; for (auto & path : paths) { storePaths.insert(path.path()); - if (auto realisation = std::get_if(&path.raw)) { + if (auto * realisation = std::get_if(&path.raw)) { experimentalFeatureSettings.require(Xp::CaDerivations); toplevelRealisations.insert(*realisation); } } + auto pathsMap = copyPaths(srcStore, dstStore, storePaths, repair, checkSigs, substitute); try { @@ -952,7 +961,7 @@ std::map copyPaths( "dependency of '%s' but isn't registered", drvOutput.to_string(), current.id.to_string()); - children.insert(*currentChild); + children.insert({*currentChild, drvOutput}); } return children; }, @@ -1107,7 +1116,7 @@ decodeValidPathInfo(const Store & store, std::istream & str, std::optionalhash); + ValidPathInfo info(store.parseStorePath(path), {store, hashGiven->hash}); info.narSize = hashGiven->numBytesDigested; std::string deriver; getline(str, deriver); @@ -1138,6 +1147,11 @@ std::string StoreDirConfig::showPaths(const StorePathSet & paths) const return s; } +std::string showPaths(const std::set paths) +{ + return concatStringsSep(", ", quoteFSPaths(paths)); +} + std::string showPaths(const PathSet & paths) { return concatStringsSep(", ", quoteStrings(paths)); @@ -1151,7 +1165,7 @@ Derivation Store::derivationFromPath(const StorePath & drvPath) static Derivation readDerivationCommon(Store & store, const StorePath & drvPath, bool requireValidPath) { - auto accessor = store.getFSAccessor(drvPath, requireValidPath); + auto accessor = store.requireStoreObjectAccessor(drvPath, requireValidPath); try { return parseDerivation(store, accessor->readFile(CanonPath::root), Derivation::nameFromPath(drvPath)); } catch (FormatError & e) { @@ -1182,7 +1196,7 @@ std::optional Store::getBuildDerivationPath(const StorePath & path) // resolved derivation, so we need to get it first auto resolvedDrv = drv.tryResolve(*this); if (resolvedDrv) - return writeDerivation(*this, *resolvedDrv, NoRepair, true); + return ::nix::writeDerivation(*this, *resolvedDrv, NoRepair, true); } return path; @@ -1220,7 +1234,7 @@ void Store::signRealisation(Realisation & realisation) for (auto & secretKeyFile : secretKeyFiles.get()) { SecretKey secretKey(readFile(secretKeyFile)); LocalSigner signer(std::move(secretKey)); - realisation.sign(signer); + realisation.sign(realisation.id, signer); } } diff --git a/src/libstore/unix/build/chroot-derivation-builder.cc b/src/libstore/unix/build/chroot-derivation-builder.cc index 8c93595334c..354a604f535 100644 --- a/src/libstore/unix/build/chroot-derivation-builder.cc +++ b/src/libstore/unix/build/chroot-derivation-builder.cc @@ -58,7 +58,7 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl environment using bind-mounts. We put it in the Nix store so that the build outputs can be moved efficiently from the chroot to their final location. */ - auto chrootParentDir = store.Store::toRealPath(drvPath) + ".chroot"; + auto chrootParentDir = store.toRealPath(drvPath) + ".chroot"; deletePath(chrootParentDir); /* Clean up the chroot directory automatically. */ @@ -171,7 +171,7 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl continue; if (buildMode != bmCheck && status.known->isValid()) continue; - auto p = store.Store::toRealPath(status.known->path); + auto p = store.toRealPath(status.known->path); if (pathExists(chrootRootDir + p)) std::filesystem::rename((chrootRootDir + p), p); } @@ -181,11 +181,11 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl std::pair addDependencyPrep(const StorePath & path) { - DerivationBuilderImpl::addDependency(path); + DerivationBuilderImpl::addDependencyImpl(path); debug("materialising '%s' in the sandbox", store.printStorePath(path)); - Path source = store.Store::toRealPath(path); + Path source = store.toRealPath(path); Path target = chrootRootDir + store.printStorePath(path); if (pathExists(target)) { diff --git a/src/libstore/unix/build/darwin-derivation-builder.cc b/src/libstore/unix/build/darwin-derivation-builder.cc index 21b3c6cb93d..24329bffce1 100644 --- a/src/libstore/unix/build/darwin-derivation-builder.cc +++ b/src/libstore/unix/build/darwin-derivation-builder.cc @@ -3,11 +3,33 @@ # include # include # include +# include +# include +# include +# include /* This definition is undocumented but depended upon by all major browsers. */ extern "C" int sandbox_init_with_parameters(const char * profile, uint64_t flags, const char * const parameters[], char ** errorbuf); +/* Darwin IPC structures and constants */ +# define IPCS_MAGIC 0x00000001 +# define IPCS_SHM_ITER 0x00000002 +# define IPCS_SEM_ITER 0x00000020 +# define IPCS_MSG_ITER 0x00000200 +# define IPCS_SHM_SYSCTL "kern.sysv.ipcs.shm" +# define IPCS_MSG_SYSCTL "kern.sysv.ipcs.msg" +# define IPCS_SEM_SYSCTL "kern.sysv.ipcs.sem" + +struct IpcsCommand +{ + uint32_t ipcs_magic; + uint32_t ipcs_op; + uint32_t ipcs_cursor; + uint32_t ipcs_datalen; + void * ipcs_data; +}; + namespace nix { struct DarwinDerivationBuilder : DerivationBuilderImpl @@ -152,7 +174,7 @@ struct DarwinDerivationBuilder : DerivationBuilderImpl /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms to find temporary directories, so we want to open up a broader place for them to put their files, if needed. */ - Path globalTmpDir = canonPath(defaultTempDir(), true); + Path globalTmpDir = canonPath(defaultTempDir().string(), true); /* They don't like trailing slashes on subpath directives */ while (!globalTmpDir.empty() && globalTmpDir.back() == '/') @@ -204,6 +226,119 @@ struct DarwinDerivationBuilder : DerivationBuilderImpl posix_spawn( NULL, drv.builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data()); } + + /** + * Cleans up all System V IPC objects owned by the specified user. + * + * On Darwin, IPC objects (shared memory segments, message queues, and semaphore) + * can persist after the build user's processes are killed, since there are no IPC namespaces + * like on Linux. This can exhaust kernel IPC limits over time. + * + * Uses sysctl to enumerate and remove all IPC objects owned by the given UID. + */ + void cleanupSysVIPCForUser(uid_t uid) + { + struct IpcsCommand ic; + size_t ic_size = sizeof(ic); + // IPC ids to cleanup + std::vector shm_ids, msg_ids, sem_ids; + + { + struct shmid_ds shm_ds; + ic.ipcs_magic = IPCS_MAGIC; + ic.ipcs_op = IPCS_SHM_ITER; + ic.ipcs_cursor = 0; + ic.ipcs_data = &shm_ds; + ic.ipcs_datalen = sizeof(shm_ds); + + while (true) { + memset(&shm_ds, 0, sizeof(shm_ds)); + + if (sysctlbyname(IPCS_SHM_SYSCTL, &ic, &ic_size, &ic, ic_size) != 0) { + break; + } + + if (shm_ds.shm_perm.uid == uid) { + int shmid = shmget(shm_ds.shm_perm._key, 0, 0); + if (shmid != -1) { + shm_ids.push_back(shmid); + } + } + } + } + + for (auto id : shm_ids) { + if (shmctl(id, IPC_RMID, NULL) == 0) + debug("removed shared memory segment with shmid %d", id); + } + + { + struct msqid_ds msg_ds; + ic.ipcs_magic = IPCS_MAGIC; + ic.ipcs_op = IPCS_MSG_ITER; + ic.ipcs_cursor = 0; + ic.ipcs_data = &msg_ds; + ic.ipcs_datalen = sizeof(msg_ds); + + while (true) { + memset(&msg_ds, 0, sizeof(msg_ds)); + + if (sysctlbyname(IPCS_MSG_SYSCTL, &ic, &ic_size, &ic, ic_size) != 0) { + break; + } + + if (msg_ds.msg_perm.uid == uid) { + int msgid = msgget(msg_ds.msg_perm._key, 0); + if (msgid != -1) { + msg_ids.push_back(msgid); + } + } + } + } + + for (auto id : msg_ids) { + if (msgctl(id, IPC_RMID, NULL) == 0) + debug("removed message queue with msgid %d", id); + } + + { + struct semid_ds sem_ds; + ic.ipcs_magic = IPCS_MAGIC; + ic.ipcs_op = IPCS_SEM_ITER; + ic.ipcs_cursor = 0; + ic.ipcs_data = &sem_ds; + ic.ipcs_datalen = sizeof(sem_ds); + + while (true) { + memset(&sem_ds, 0, sizeof(sem_ds)); + + if (sysctlbyname(IPCS_SEM_SYSCTL, &ic, &ic_size, &ic, ic_size) != 0) { + break; + } + + if (sem_ds.sem_perm.uid == uid) { + int semid = semget(sem_ds.sem_perm._key, 0, 0); + if (semid != -1) { + sem_ids.push_back(semid); + } + } + } + } + + for (auto id : sem_ids) { + if (semctl(id, 0, IPC_RMID) == 0) + debug("removed semaphore with semid %d", id); + } + } + + void killSandbox(bool getStats) override + { + DerivationBuilderImpl::killSandbox(getStats); + if (buildUser) { + auto uid = buildUser->getUID(); + cleanupSysVIPCForUser(uid); + } + } }; } // namespace nix diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index a78677f213f..5841b414680 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -47,6 +47,12 @@ #include "store-config-private.hh" #include "build/derivation-check.hh" +#if NIX_WITH_AWS_AUTH +# include "nix/store/aws-creds.hh" +# include "nix/store/s3-url.hh" +# include "nix/util/url.hh" +#endif + namespace nix { struct NotDeterministic : BuildError @@ -235,12 +241,6 @@ class DerivationBuilderImpl : public DerivationBuilder, public DerivationBuilder return acquireUserLock(1, false); } - /** - * Throw an exception if we can't do this derivation because of - * missing system features. - */ - virtual void checkSystem(); - /** * Construct the `ActiveBuild` object for `ActiveBuildsTracker`. */ @@ -307,6 +307,15 @@ class DerivationBuilderImpl : public DerivationBuilder, public DerivationBuilder */ virtual void startChild(); +#if NIX_WITH_AWS_AUTH + /** + * Pre-resolve AWS credentials for S3 URLs in builtin:fetchurl. + * This should be called before forking to ensure credentials are available in child. + * Returns the credentials if successfully resolved, or std::nullopt otherwise. + */ + std::optional preResolveAwsCredentials(); +#endif + private: /** @@ -356,10 +365,20 @@ class DerivationBuilderImpl : public DerivationBuilder, public DerivationBuilder */ void writeBuilderFile(const std::string & name, std::string_view contents); + /** + * Arguments passed to runChild(). + */ + struct RunChildArgs + { +#if NIX_WITH_AWS_AUTH + std::optional awsCredentials; +#endif + }; + /** * Run the builder's process. */ - void runChild(); + void runChild(RunChildArgs args); /** * Move the current process into the chroot, if any. Called early @@ -687,33 +706,6 @@ static bool checkNotWorldWritable(std::filesystem::path path) return true; } -void DerivationBuilderImpl::checkSystem() -{ - /* Right platform? */ - if (!drvOptions.canBuildLocally(store, drv)) { - auto msg = - fmt("Cannot build '%s'.\n" - "Reason: " ANSI_RED "required system or feature not available" ANSI_NORMAL - "\n" - "Required system: '%s' with features {%s}\n" - "Current system: '%s' with features {%s}", - Magenta(store.printStorePath(drvPath)), - Magenta(drv.platform), - concatStringsSep(", ", drvOptions.getRequiredSystemFeatures(drv)), - Magenta(settings.thisSystem), - concatStringsSep(", ", store.Store::config.systemFeatures)); - - // since aarch64-darwin has Rosetta 2, this user can actually run x86_64-darwin on their hardware - we should - // tell them to run the command to install Darwin 2 - if (drv.platform == "x86_64-darwin" && settings.thisSystem == "aarch64-darwin") - msg += - fmt("\nNote: run `%s` to run programs for x86_64-darwin", - Magenta("/usr/sbin/softwareupdate --install-rosetta && launchctl stop org.nixos.nix-daemon")); - - throw BuildError(BuildResult::Failure::InputRejected, msg); - } -} - std::optional DerivationBuilderImpl::startBuild() { if (useBuildUsers()) { @@ -724,8 +716,6 @@ std::optional DerivationBuilderImpl::startBuild() return std::nullopt; } - checkSystem(); - /* Make sure that no other processes are executing under the sandbox uids. This must be done before any chownToBuilder() calls. */ @@ -992,11 +982,43 @@ void DerivationBuilderImpl::openSlave() throw SysError("cannot pipe standard error into log file"); } +#if NIX_WITH_AWS_AUTH +std::optional DerivationBuilderImpl::preResolveAwsCredentials() +{ + if (drv.isBuiltin() && drv.builder == "builtin:fetchurl") { + auto url = drv.env.find("url"); + if (url != drv.env.end()) { + try { + auto parsedUrl = parseURL(url->second); + if (parsedUrl.scheme == "s3") { + debug("Pre-resolving AWS credentials for S3 URL in builtin:fetchurl"); + auto s3Url = ParsedS3URL::parse(parsedUrl); + + // Use the preResolveAwsCredentials from aws-creds + auto credentials = getAwsCredentialsProvider()->getCredentials(s3Url); + debug("Successfully pre-resolved AWS credentials in parent process"); + return credentials; + } + } catch (const std::exception & e) { + debug("Error pre-resolving S3 credentials: %s", e.what()); + } + } + } + return std::nullopt; +} +#endif + void DerivationBuilderImpl::startChild() { - pid = startProcess([&]() { + RunChildArgs args{ +#if NIX_WITH_AWS_AUTH + .awsCredentials = preResolveAwsCredentials(), +#endif + }; + + pid = startProcess([this, args = std::move(args)]() { openSlave(); - runChild(); + runChild(std::move(args)); }); } @@ -1250,7 +1272,7 @@ void DerivationBuilderImpl::writeBuilderFile(const std::string & name, std::stri chownToBuilder(fd.get(), path); } -void DerivationBuilderImpl::runChild() +void DerivationBuilderImpl::runChild(RunChildArgs args) { /* Warning: in the child we should absolutely not make any SQLite calls! */ @@ -1267,6 +1289,9 @@ void DerivationBuilderImpl::runChild() BuiltinBuilderContext ctx{ .drv = drv, .tmpDirInSandbox = tmpDirInSandbox(), +#if NIX_WITH_AWS_AUTH + .awsCredentials = args.awsCredentials, +#endif }; if (drv.isBuiltin() && drv.builder == "builtin:fetchurl") { @@ -1408,8 +1433,18 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() struct PerhapsNeedToRegister { StorePathSet refs; + /** + * References to other outputs. Built by looking up in + * `scratchOutputsInverse`. + */ + StringSet otherOutputs; }; + /* inverse map of scratchOutputs for efficient lookup */ + std::map scratchOutputsInverse; + for (auto & [outputName, path] : scratchOutputs) + scratchOutputsInverse.insert_or_assign(path, outputName); + std::map> outputReferencesIfUnregistered; std::map outputStats; for (auto & [outputName, _] : drv.outputs) { @@ -1478,47 +1513,54 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() references = scanForReferences(blank, actualPath, referenceablePaths); } - outputReferencesIfUnregistered.insert_or_assign(outputName, PerhapsNeedToRegister{.refs = references}); + StringSet referencedOutputs; + for (auto & r : references) + if (auto * o = get(scratchOutputsInverse, r)) + referencedOutputs.insert(*o); + + outputReferencesIfUnregistered.insert_or_assign( + outputName, + PerhapsNeedToRegister{ + .refs = references, + .otherOutputs = referencedOutputs, + }); outputStats.insert_or_assign(outputName, std::move(st)); } - auto sortedOutputNames = topoSort( - outputsToSort, - {[&](const std::string & name) { - auto orifu = get(outputReferencesIfUnregistered, name); - if (!orifu) + StringSet emptySet; + + auto topoSortResult = topoSort(outputsToSort, [&](const std::string & name) -> const StringSet & { + auto * orifu = get(outputReferencesIfUnregistered, name); + if (!orifu) + throw BuildError( + BuildResult::Failure::OutputRejected, + "no output reference for '%s' in build of '%s'", + name, + store.printStorePath(drvPath)); + return std::visit( + overloaded{ + /* Since we'll use the already installed versions of these, we + can treat them as leaves and ignore any references they + have. */ + [&](const AlreadyRegistered &) -> const StringSet & { return emptySet; }, + [&](const PerhapsNeedToRegister & refs) -> const StringSet & { return refs.otherOutputs; }, + }, + *orifu); + }); + + auto sortedOutputNames = std::visit( + overloaded{ + [&](Cycle & cycle) -> std::vector { + // TODO with more -vvvv also show the temporary paths for manual inspection. throw BuildError( BuildResult::Failure::OutputRejected, - "no output reference for '%s' in build of '%s'", - name, - store.printStorePath(drvPath)); - return std::visit( - overloaded{ - /* Since we'll use the already installed versions of these, we - can treat them as leaves and ignore any references they - have. */ - [&](const AlreadyRegistered &) { return StringSet{}; }, - [&](const PerhapsNeedToRegister & refs) { - StringSet referencedOutputs; - /* FIXME build inverted map up front so no quadratic waste here */ - for (auto & r : refs.refs) - for (auto & [o, p] : scratchOutputs) - if (r == p) - referencedOutputs.insert(o); - return referencedOutputs; - }, - }, - *orifu); - }}, - {[&](const std::string & path, const std::string & parent) { - // TODO with more -vvvv also show the temporary paths for manual inspection. - return BuildError( - BuildResult::Failure::OutputRejected, - "cycle detected in build of '%s' in the references of output '%s' from output '%s'", - store.printStorePath(drvPath), - path, - parent); - }}); + "cycle detected in build of '%s' in the references of output '%s' from output '%s'", + store.printStorePath(drvPath), + cycle.path, + cycle.parent); + }, + [](auto & sorted) { return sorted; }}, + topoSortResult); std::reverse(sortedOutputNames.begin(), sortedOutputNames.end()); @@ -1681,7 +1723,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() {getFSSourceAccessor(), CanonPath(actualPath)}, FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); - ValidPathInfo newInfo0{requiredFinalPath, narHashAndSize.hash}; + ValidPathInfo newInfo0{requiredFinalPath, {store, narHashAndSize.hash}}; newInfo0.narSize = narHashAndSize.numBytesDigested; auto refs = rewriteRefs(); newInfo0.references = std::move(refs.others); @@ -1875,7 +1917,12 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() for (auto & [outputName, newInfo] : infos) { auto oldinfo = get(initialOutputs, outputName); assert(oldinfo); - auto thisRealisation = Realisation{.id = DrvOutput{oldinfo->outputHash, outputName}, .outPath = newInfo.path}; + auto thisRealisation = Realisation{ + { + .outPath = newInfo.path, + }, + DrvOutput{oldinfo->outputHash, outputName}, + }; if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) && !drv.type().isImpure()) { store.signRealisation(thisRealisation); store.registerDrvOutput(thisRealisation); @@ -1891,7 +1938,7 @@ void DerivationBuilderImpl::cleanupBuild(bool force) if (force) { /* Delete unused redirected outputs (when doing hash rewriting). */ for (auto & i : redirectedOutputs) - deletePath(store.Store::toRealPath(i.second)); + deletePath(store.toRealPath(i.second)); } if (topTmpDir != "") { @@ -1956,9 +2003,6 @@ namespace nix { std::unique_ptr makeDerivationBuilder( LocalStore & store, std::unique_ptr miscMethods, DerivationBuilderParams params) { - if (auto builder = ExternalDerivationBuilder::newIfSupported(store, miscMethods, params)) - return builder; - bool useSandbox = false; /* Are we doing a sandboxed build? */ diff --git a/src/libstore/unix/build/external-derivation-builder.cc b/src/libstore/unix/build/external-derivation-builder.cc index 71cfd1a62c0..7ddb6e093b1 100644 --- a/src/libstore/unix/build/external-derivation-builder.cc +++ b/src/libstore/unix/build/external-derivation-builder.cc @@ -2,31 +2,19 @@ namespace nix { struct ExternalDerivationBuilder : DerivationBuilderImpl { - Settings::ExternalBuilder externalBuilder; + ExternalBuilder externalBuilder; ExternalDerivationBuilder( LocalStore & store, std::unique_ptr miscMethods, DerivationBuilderParams params, - Settings::ExternalBuilder externalBuilder) + ExternalBuilder externalBuilder) : DerivationBuilderImpl(store, std::move(miscMethods), std::move(params)) , externalBuilder(std::move(externalBuilder)) { experimentalFeatureSettings.require(Xp::ExternalBuilders); } - static std::unique_ptr newIfSupported( - LocalStore & store, std::unique_ptr & miscMethods, DerivationBuilderParams & params) - { - for (auto & handler : settings.externalBuilders.get()) { - for (auto & system : handler.systems) - if (params.drv.platform == system) - return std::make_unique( - store, std::move(miscMethods), std::move(params), handler); - } - return {}; - } - Path tmpDirInSandbox() override { /* In a sandbox, for determinism, always use the same temporary @@ -40,8 +28,6 @@ struct ExternalDerivationBuilder : DerivationBuilderImpl createDir(tmpDir, 0700); } - void checkSystem() override {} - void startChild() override { if (drvOptions.getRequiredSystemFeatures(drv).count("recursive-nix")) @@ -120,4 +106,13 @@ struct ExternalDerivationBuilder : DerivationBuilderImpl } }; +std::unique_ptr makeExternalDerivationBuilder( + LocalStore & store, + std::unique_ptr miscMethods, + DerivationBuilderParams params, + const ExternalBuilder & handler) +{ + return std::make_unique(store, std::move(miscMethods), std::move(params), handler); +} + } // namespace nix diff --git a/src/libstore/unix/build/linux-derivation-builder.cc b/src/libstore/unix/build/linux-derivation-builder.cc index 7c6edca6567..fc2140817d7 100644 --- a/src/libstore/unix/build/linux-derivation-builder.cc +++ b/src/libstore/unix/build/linux-derivation-builder.cc @@ -276,6 +276,12 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu void startChild() override { + RunChildArgs args{ +# if NIX_WITH_AWS_AUTH + .awsCredentials = preResolveAwsCredentials(), +# endif + }; + /* Set up private namespaces for the build: - The PID namespace causes the build to start as PID 1. @@ -343,7 +349,7 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu if (usingUserNamespace) options.cloneFlags |= CLONE_NEWUSER; - pid_t child = startProcess([&]() { runChild(); }, options); + pid_t child = startProcess([this, args = std::move(args)]() { runChild(std::move(args)); }, options); writeFull(sendPid.writeSide.get(), fmt("%d\n", child)); _exit(0); diff --git a/src/libstore/unix/pathlocks.cc b/src/libstore/unix/pathlocks.cc index e3f411a5dbf..6117b82c892 100644 --- a/src/libstore/unix/pathlocks.cc +++ b/src/libstore/unix/pathlocks.cc @@ -13,18 +13,18 @@ namespace nix { -AutoCloseFD openLockFile(const Path & path, bool create) +AutoCloseFD openLockFile(const std::filesystem::path & path, bool create) { AutoCloseFD fd; fd = open(path.c_str(), O_CLOEXEC | O_RDWR | (create ? O_CREAT : 0), 0600); if (!fd && (create || errno != ENOENT)) - throw SysError("opening lock file '%1%'", path); + throw SysError("opening lock file %1%", path); return fd; } -void deleteLockFile(const Path & path, Descriptor desc) +void deleteLockFile(const std::filesystem::path & path, Descriptor desc) { /* Get rid of the lock file. Have to be careful not to introduce races. Write a (meaningless) token to the file to indicate to @@ -69,7 +69,7 @@ bool lockFile(Descriptor desc, LockType lockType, bool wait) return true; } -bool PathLocks::lockPaths(const PathSet & paths, const std::string & waitMsg, bool wait) +bool PathLocks::lockPaths(const std::set & paths, const std::string & waitMsg, bool wait) { assert(fds.empty()); @@ -81,9 +81,9 @@ bool PathLocks::lockPaths(const PathSet & paths, const std::string & waitMsg, bo preventing deadlocks. */ for (auto & path : paths) { checkInterrupt(); - Path lockPath = path + ".lock"; + std::filesystem::path lockPath = path + ".lock"; - debug("locking path '%1%'", path); + debug("locking path %1%", path); AutoCloseFD fd; @@ -106,19 +106,19 @@ bool PathLocks::lockPaths(const PathSet & paths, const std::string & waitMsg, bo } } - debug("lock acquired on '%1%'", lockPath); + debug("lock acquired on %1%", lockPath); /* Check that the lock file hasn't become stale (i.e., hasn't been unlinked). */ struct stat st; if (fstat(fd.get(), &st) == -1) - throw SysError("statting lock file '%1%'", lockPath); + throw SysError("statting lock file %1%", lockPath); if (st.st_size != 0) /* This lock file has been unlinked, so we're holding a lock on a deleted file. This means that other processes may create and acquire a lock on `lockPath', and proceed. So we must retry. */ - debug("open lock file '%1%' has become stale", lockPath); + debug("open lock file %1% has become stale", lockPath); else break; } @@ -137,9 +137,9 @@ void PathLocks::unlock() deleteLockFile(i.second, i.first); if (close(i.first) == -1) - printError("error (ignored): cannot close lock file on '%1%'", i.second); + printError("error (ignored): cannot close lock file on %1%", i.second); - debug("lock released on '%1%'", i.second); + debug("lock released on %1%", i.second); } fds.clear(); diff --git a/src/libstore/windows/pathlocks.cc b/src/libstore/windows/pathlocks.cc index c4e3a3d3999..32d9e7c0fa8 100644 --- a/src/libstore/windows/pathlocks.cc +++ b/src/libstore/windows/pathlocks.cc @@ -13,10 +13,10 @@ namespace nix { using namespace nix::windows; -void deleteLockFile(const Path & path, Descriptor desc) +void deleteLockFile(const std::filesystem::path & path, Descriptor desc) { - int exit = DeleteFileA(path.c_str()); + int exit = DeleteFileW(path.c_str()); if (exit == 0) warn("%s: &s", path, std::to_string(GetLastError())); } @@ -28,17 +28,17 @@ void PathLocks::unlock() deleteLockFile(i.second, i.first); if (CloseHandle(i.first) == -1) - printError("error (ignored): cannot close lock file on '%1%'", i.second); + printError("error (ignored): cannot close lock file on %1%", i.second); - debug("lock released on '%1%'", i.second); + debug("lock released on %1%", i.second); } fds.clear(); } -AutoCloseFD openLockFile(const Path & path, bool create) +AutoCloseFD openLockFile(const std::filesystem::path & path, bool create) { - AutoCloseFD desc = CreateFileA( + AutoCloseFD desc = CreateFileW( path.c_str(), GENERIC_READ | GENERIC_WRITE, FILE_SHARE_READ | FILE_SHARE_WRITE, @@ -103,14 +103,15 @@ bool lockFile(Descriptor desc, LockType lockType, bool wait) } } -bool PathLocks::lockPaths(const PathSet & paths, const std::string & waitMsg, bool wait) +bool PathLocks::lockPaths(const std::set & paths, const std::string & waitMsg, bool wait) { assert(fds.empty()); for (auto & path : paths) { checkInterrupt(); - Path lockPath = path + ".lock"; - debug("locking path '%1%'", path); + std::filesystem::path lockPath = path; + lockPath += L".lock"; + debug("locking path %1%", path); AutoCloseFD fd; @@ -127,13 +128,13 @@ bool PathLocks::lockPaths(const PathSet & paths, const std::string & waitMsg, bo } } - debug("lock acquired on '%1%'", lockPath); + debug("lock acquired on %1%", lockPath); struct _stat st; if (_fstat(fromDescriptorReadOnly(fd.get()), &st) == -1) - throw SysError("statting lock file '%1%'", lockPath); + throw SysError("statting lock file %1%", lockPath); if (st.st_size != 0) - debug("open lock file '%1%' has become stale", lockPath); + debug("open lock file %1% has become stale", lockPath); else break; } diff --git a/src/libstore/worker-protocol.cc b/src/libstore/worker-protocol.cc index a17d2c02857..2788222c0d7 100644 --- a/src/libstore/worker-protocol.cc +++ b/src/libstore/worker-protocol.cc @@ -1,6 +1,7 @@ #include "nix/util/serialise.hh" #include "nix/store/path-with-outputs.hh" #include "nix/store/store-api.hh" +#include "nix/store/gc-store.hh" #include "nix/store/build-result.hh" #include "nix/store/worker-protocol.hh" #include "nix/store/worker-protocol-impl.hh" @@ -47,6 +48,46 @@ void WorkerProto::Serialise::write( }; } +GCAction WorkerProto::Serialise::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) +{ + auto temp = readNum(conn.from); + using enum GCAction; + switch (temp) { + case 0: + return gcReturnLive; + case 1: + return gcReturnDead; + case 2: + return gcDeleteDead; + case 3: + return gcDeleteSpecific; + default: + throw Error("Invalid GC action"); + } +} + +void WorkerProto::Serialise::write( + const StoreDirConfig & store, WorkerProto::WriteConn conn, const GCAction & action) +{ + using enum GCAction; + switch (action) { + case gcReturnLive: + conn.to << unsigned{0}; + break; + case gcReturnDead: + conn.to << unsigned{1}; + break; + case gcDeleteDead: + conn.to << unsigned{2}; + break; + case gcDeleteSpecific: + conn.to << unsigned{3}; + break; + default: + assert(false); + } +} + std::optional WorkerProto::Serialise>::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { @@ -253,7 +294,7 @@ UnkeyedValidPathInfo WorkerProto::Serialise::read(const St { auto deriver = WorkerProto::Serialise>::read(store, conn); auto narHash = Hash::parseAny(readString(conn.from), HashAlgorithm::SHA256); - UnkeyedValidPathInfo info(narHash); + UnkeyedValidPathInfo info(store, narHash); info.deriver = std::move(deriver); info.references = WorkerProto::Serialise::read(store, conn); conn.from >> info.registrationTime >> info.narSize; diff --git a/src/libutil-c/meson.build b/src/libutil-c/meson.build index 54fd53c74f0..1806dbb6f9a 100644 --- a/src/libutil-c/meson.build +++ b/src/libutil-c/meson.build @@ -32,7 +32,6 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'nix_api_util.cc', diff --git a/src/libutil-c/nix_api_util.cc b/src/libutil-c/nix_api_util.cc index 3903823aa9b..f28a9168e30 100644 --- a/src/libutil-c/nix_api_util.cc +++ b/src/libutil-c/nix_api_util.cc @@ -13,7 +13,11 @@ extern "C" { nix_c_context * nix_c_context_create() { - return new nix_c_context(); + try { + return new nix_c_context(); + } catch (...) { + return nullptr; + } } void nix_c_context_free(nix_c_context * context) @@ -36,7 +40,7 @@ nix_err nix_context_error(nix_c_context * context) const char * demangled = abi::__cxa_demangle(typeid(e).name(), 0, 0, &status); if (demangled) { context->name = demangled; - // todo: free(demangled); + free((void *) demangled); } else { context->name = typeid(e).name(); } @@ -153,9 +157,9 @@ nix_err nix_err_code(const nix_c_context * read_context) } // internal -nix_err call_nix_get_string_callback(const std::string str, nix_get_string_callback callback, void * user_data) +nix_err call_nix_get_string_callback(const std::string_view str, nix_get_string_callback callback, void * user_data) { - callback(str.c_str(), str.size(), user_data); + callback(str.data(), str.size(), user_data); return NIX_OK; } diff --git a/src/libutil-c/nix_api_util.h b/src/libutil-c/nix_api_util.h index 4d7f394fa01..d301e5743cf 100644 --- a/src/libutil-c/nix_api_util.h +++ b/src/libutil-c/nix_api_util.h @@ -155,6 +155,8 @@ typedef struct nix_c_context nix_c_context; /** * @brief Called to get the value of a string owned by Nix. * + * The `start` data is borrowed and the function must not assume that the buffer persists after it returns. + * * @param[in] start the string to copy. * @param[in] n the string length. * @param[in] user_data optional, arbitrary data, passed to the nix_get_string_callback when it's called. diff --git a/src/libutil-c/nix_api_util_internal.h b/src/libutil-c/nix_api_util_internal.h index 92bb9c1d298..e4c5e93bbef 100644 --- a/src/libutil-c/nix_api_util_internal.h +++ b/src/libutil-c/nix_api_util_internal.h @@ -32,7 +32,7 @@ nix_err nix_context_error(nix_c_context * context); * @return NIX_OK if there were no errors. * @see nix_get_string_callback */ -nix_err call_nix_get_string_callback(const std::string str, nix_get_string_callback callback, void * user_data); +nix_err call_nix_get_string_callback(const std::string_view str, nix_get_string_callback callback, void * user_data); #define NIXC_CATCH_ERRS \ catch (...) \ diff --git a/src/libutil-test-support/include/nix/util/tests/characterization.hh b/src/libutil-test-support/include/nix/util/tests/characterization.hh index 0434590f799..d8fad1df925 100644 --- a/src/libutil-test-support/include/nix/util/tests/characterization.hh +++ b/src/libutil-test-support/include/nix/util/tests/characterization.hh @@ -31,16 +31,14 @@ static inline bool testAccept() /** * Mixin class for writing characterization tests */ -class CharacterizationTest : public virtual ::testing::Test +struct CharacterizationTest : virtual ::testing::Test { -protected: /** * While the "golden master" for this characterization test is * located. It should not be shared with any other test. */ virtual std::filesystem::path goldenMaster(PathView testStem) const = 0; -public: /** * Golden test for reading * diff --git a/src/libutil-test-support/include/nix/util/tests/json-characterization.hh b/src/libutil-test-support/include/nix/util/tests/json-characterization.hh index 5a38b8e2c42..6db32c4b6c3 100644 --- a/src/libutil-test-support/include/nix/util/tests/json-characterization.hh +++ b/src/libutil-test-support/include/nix/util/tests/json-characterization.hh @@ -5,12 +5,84 @@ #include #include "nix/util/types.hh" +#include "nix/util/ref.hh" #include "nix/util/file-system.hh" #include "nix/util/tests/characterization.hh" namespace nix { +/** + * Golden test for JSON reading + */ +template +void readJsonTest(CharacterizationTest & test, PathView testStem, const T & expected, auto... args) +{ + using namespace nlohmann; + test.readTest(Path{testStem} + ".json", [&](const auto & encodedRaw) { + auto encoded = json::parse(encodedRaw); + T decoded = adl_serializer::from_json(encoded, args...); + ASSERT_EQ(decoded, expected); + }); +} + +/** + * Golden test for JSON writing + */ +template +void writeJsonTest(CharacterizationTest & test, PathView testStem, const T & value) +{ + using namespace nlohmann; + test.writeTest( + Path{testStem} + ".json", + [&]() -> json { return static_cast(value); }, + [](const auto & file) { return json::parse(readFile(file)); }, + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); +} + +/** + * Specialization for when we need to do "JSON -> `ref`" in one + * direction, but "`const T &` -> JSON" in the other direction. + * + * We can't just return `const T &`, but it would be wasteful to + * requires a `const ref &` double indirection (and mandatory shared + * pointer), so we break the symmetry as the best remaining option. + */ +template +void writeJsonTest(CharacterizationTest & test, PathView testStem, const ref & value) +{ + using namespace nlohmann; + test.writeTest( + Path{testStem} + ".json", + [&]() -> json { return static_cast(*value); }, + [](const auto & file) { return json::parse(readFile(file)); }, + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); +} + +/** + * Golden test in the middle of something + */ +template +void checkpointJson(CharacterizationTest & test, PathView testStem, const T & got) +{ + using namespace nlohmann; + + auto file = test.goldenMaster(Path{testStem} + ".json"); + + json gotJson = static_cast(got); + + if (testAccept()) { + std::filesystem::create_directories(file.parent_path()); + writeFile(file, gotJson.dump(2) + "\n"); + ADD_FAILURE() << "Updating golden master " << file; + } else { + json expectedJson = json::parse(readFile(file)); + ASSERT_EQ(gotJson, expectedJson); + T expected = adl_serializer::from_json(expectedJson); + ASSERT_EQ(got, expected); + } +} + /** * Mixin class for writing characterization tests for `nlohmann::json` * conversions for a given type. @@ -24,14 +96,9 @@ struct JsonCharacterizationTest : virtual CharacterizationTest * @param test hook that takes the contents of the file and does the * actual work */ - void readJsonTest(PathView testStem, const T & expected) + void readJsonTest(PathView testStem, const T & expected, auto... args) { - using namespace nlohmann; - readTest(Path{testStem} + ".json", [&](const auto & encodedRaw) { - auto encoded = json::parse(encodedRaw); - T decoded = adl_serializer::from_json(encoded); - ASSERT_EQ(decoded, expected); - }); + nix::readJsonTest(*this, testStem, expected, args...); } /** @@ -42,12 +109,12 @@ struct JsonCharacterizationTest : virtual CharacterizationTest */ void writeJsonTest(PathView testStem, const T & value) { - using namespace nlohmann; - writeTest( - Path{testStem} + ".json", - [&]() -> json { return static_cast(value); }, - [](const auto & file) { return json::parse(readFile(file)); }, - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); + nix::writeJsonTest(*this, testStem, value); + } + + void checkpointJson(PathView testStem, const T & value) + { + nix::checkpointJson(*this, testStem, value); } }; diff --git a/src/libutil-test-support/meson.build b/src/libutil-test-support/meson.build index 1ca251ce8dc..64231107eb6 100644 --- a/src/libutil-test-support/meson.build +++ b/src/libutil-test-support/meson.build @@ -27,7 +27,6 @@ rapidcheck = dependency('rapidcheck') deps_public += rapidcheck subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( 'hash.cc', diff --git a/src/libutil-tests/alignment.cc b/src/libutil-tests/alignment.cc new file mode 100644 index 00000000000..bef0c435dc0 --- /dev/null +++ b/src/libutil-tests/alignment.cc @@ -0,0 +1,18 @@ +#include "nix/util/alignment.hh" + +#include + +namespace nix { + +TEST(alignUp, value) +{ + for (uint64_t i = 1; i <= 8; ++i) + EXPECT_EQ(alignUp(i, 8), 8); +} + +TEST(alignUp, notAPowerOf2) +{ + ASSERT_DEATH({ alignUp(1u, 42); }, "alignment must be a power of 2"); +} + +} // namespace nix diff --git a/src/libutil-tests/archive.cc b/src/libutil-tests/archive.cc index 386f7b857ba..427b29d416b 100644 --- a/src/libutil-tests/archive.cc +++ b/src/libutil-tests/archive.cc @@ -42,6 +42,20 @@ INSTANTIATE_TEST_SUITE_P( NarTest, InvalidNarTest, ::testing::Values( - std::pair{"invalid-tag-instead-of-contents", "bad archive: expected tag 'contents', got 'AAAAAAAA'"})); + std::pair{"invalid-tag-instead-of-contents", "bad archive: expected tag 'contents', got 'AAAAAAAA'"}, + // Unpacking a NAR with a NUL character in a file name should fail. + std::pair{"nul-character", "bad archive: NAR contains invalid file name 'f"}, + // Likewise for a '.' filename. + std::pair{"dot", "bad archive: NAR contains invalid file name '.'"}, + // Likewise for a '..' filename. + std::pair{"dotdot", "bad archive: NAR contains invalid file name '..'"}, + // Likewise for a filename containing a slash. + std::pair{"slash", "bad archive: NAR contains invalid file name 'x/y'"}, + // Likewise for an empty filename. + std::pair{"empty", "bad archive: NAR contains invalid file name ''"}, + // Test that the 'executable' field cannot come before the 'contents' field. + std::pair{"executable-after-contents", "bad archive: expected tag ')', got 'executable'"}, + // Test that the 'name' field cannot come before the 'node' field in a directory entry. + std::pair{"name-after-node", "bad archive: expected tag 'name'"})); } // namespace nix diff --git a/src/libutil-tests/canon-path.cc b/src/libutil-tests/canon-path.cc index 971a9cc967b..aae9285c4e3 100644 --- a/src/libutil-tests/canon-path.cc +++ b/src/libutil-tests/canon-path.cc @@ -42,6 +42,15 @@ TEST(CanonPath, basic) } } +TEST(CanonPath, nullBytes) +{ + std::string s = "/hello/world"; + s[8] = '\0'; + ASSERT_THROW(CanonPath("/").push(std::string(1, '\0')), BadCanonPath); + ASSERT_THROW(CanonPath(std::string_view(s)), BadCanonPath); + ASSERT_THROW(CanonPath(s, CanonPath::root), BadCanonPath); +} + TEST(CanonPath, from_existing) { CanonPath p0("foo//bar/"); diff --git a/src/libutil-tests/data/hash/blake3.json b/src/libutil-tests/data/hash/blake3.json new file mode 100644 index 00000000000..bdb9106703a --- /dev/null +++ b/src/libutil-tests/data/hash/blake3.json @@ -0,0 +1 @@ +"blake3-nnDuFEmWX7YtBJBAoe0G7Dd0MNpuwTFz58T//NKL6YA=" diff --git a/src/libutil-tests/data/hash/sha256.json b/src/libutil-tests/data/hash/sha256.json new file mode 100644 index 00000000000..5ef584bf1dc --- /dev/null +++ b/src/libutil-tests/data/hash/sha256.json @@ -0,0 +1 @@ +"sha256-8OTC92xYkW7CWPJGhRvqCR0U1CR6L8PhhpRGGxgW4Ts=" diff --git a/src/libutil-tests/data/hash/sha512.json b/src/libutil-tests/data/hash/sha512.json new file mode 100644 index 00000000000..5ef584bf1dc --- /dev/null +++ b/src/libutil-tests/data/hash/sha512.json @@ -0,0 +1 @@ +"sha256-8OTC92xYkW7CWPJGhRvqCR0U1CR6L8PhhpRGGxgW4Ts=" diff --git a/src/libutil-tests/data/memory-source-accessor/complex.json b/src/libutil-tests/data/memory-source-accessor/complex.json new file mode 100644 index 00000000000..924a39a5461 --- /dev/null +++ b/src/libutil-tests/data/memory-source-accessor/complex.json @@ -0,0 +1,24 @@ +{ + "entries": { + "bar": { + "entries": { + "baz": { + "contents": "good day,\n\u0000\n\tworld!", + "executable": true, + "type": "regular" + }, + "quux": { + "target": "/over/there", + "type": "symlink" + } + }, + "type": "directory" + }, + "foo": { + "contents": "hello\n\u0000\n\tworld!", + "executable": false, + "type": "regular" + } + }, + "type": "directory" +} diff --git a/src/libutil-tests/data/memory-source-accessor/simple.json b/src/libutil-tests/data/memory-source-accessor/simple.json new file mode 100644 index 00000000000..1ae328cc419 --- /dev/null +++ b/src/libutil-tests/data/memory-source-accessor/simple.json @@ -0,0 +1,5 @@ +{ + "contents": "asdf", + "executable": false, + "type": "regular" +} diff --git a/src/libutil-tests/data/nar-listing/deep.json b/src/libutil-tests/data/nar-listing/deep.json new file mode 100644 index 00000000000..a7ed47c4c03 --- /dev/null +++ b/src/libutil-tests/data/nar-listing/deep.json @@ -0,0 +1,23 @@ +{ + "entries": { + "bar": { + "entries": { + "baz": { + "executable": true, + "size": 19, + "type": "regular" + }, + "quux": { + "target": "/over/there", + "type": "symlink" + } + }, + "type": "directory" + }, + "foo": { + "size": 15, + "type": "regular" + } + }, + "type": "directory" +} diff --git a/src/libutil-tests/data/nar-listing/shallow.json b/src/libutil-tests/data/nar-listing/shallow.json new file mode 100644 index 00000000000..9826cd1a0bb --- /dev/null +++ b/src/libutil-tests/data/nar-listing/shallow.json @@ -0,0 +1,7 @@ +{ + "entries": { + "bar": {}, + "foo": {} + }, + "type": "directory" +} diff --git a/tests/functional/dot.nar b/src/libutil-tests/data/nars/dot.nar similarity index 100% rename from tests/functional/dot.nar rename to src/libutil-tests/data/nars/dot.nar diff --git a/tests/functional/dotdot.nar b/src/libutil-tests/data/nars/dotdot.nar similarity index 100% rename from tests/functional/dotdot.nar rename to src/libutil-tests/data/nars/dotdot.nar diff --git a/tests/functional/empty.nar b/src/libutil-tests/data/nars/empty.nar similarity index 100% rename from tests/functional/empty.nar rename to src/libutil-tests/data/nars/empty.nar diff --git a/tests/functional/executable-after-contents.nar b/src/libutil-tests/data/nars/executable-after-contents.nar similarity index 100% rename from tests/functional/executable-after-contents.nar rename to src/libutil-tests/data/nars/executable-after-contents.nar diff --git a/tests/functional/name-after-node.nar b/src/libutil-tests/data/nars/name-after-node.nar similarity index 100% rename from tests/functional/name-after-node.nar rename to src/libutil-tests/data/nars/name-after-node.nar diff --git a/tests/functional/nul-character.nar b/src/libutil-tests/data/nars/nul-character.nar similarity index 100% rename from tests/functional/nul-character.nar rename to src/libutil-tests/data/nars/nul-character.nar diff --git a/tests/functional/slash.nar b/src/libutil-tests/data/nars/slash.nar similarity index 100% rename from tests/functional/slash.nar rename to src/libutil-tests/data/nars/slash.nar diff --git a/src/libutil-tests/file-system.cc b/src/libutil-tests/file-system.cc index dfdd260887e..1551227cbd9 100644 --- a/src/libutil-tests/file-system.cc +++ b/src/libutil-tests/file-system.cc @@ -1,3 +1,4 @@ +#include "nix/util/fs-sink.hh" #include "nix/util/util.hh" #include "nix/util/types.hh" #include "nix/util/file-system.hh" @@ -306,10 +307,10 @@ TEST(DirectoryIterator, works) auto tmpDir = nix::createTempDir(); nix::AutoDelete delTmpDir(tmpDir, true); - nix::writeFile(tmpDir + "/somefile", ""); + nix::writeFile(tmpDir / "somefile", ""); for (auto path : DirectoryIterator(tmpDir)) { - ASSERT_EQ(path.path().string(), tmpDir + "/somefile"); + ASSERT_EQ(path.path(), tmpDir / "somefile"); } } @@ -318,4 +319,101 @@ TEST(DirectoryIterator, nonexistent) ASSERT_THROW(DirectoryIterator("/schnitzel/darmstadt/pommes"), SysError); } +/* ---------------------------------------------------------------------------- + * openFileEnsureBeneathNoSymlinks + * --------------------------------------------------------------------------*/ + +#ifndef _WIN32 + +TEST(openFileEnsureBeneathNoSymlinks, works) +{ + std::filesystem::path tmpDir = nix::createTempDir(); + nix::AutoDelete delTmpDir(tmpDir, /*recursive=*/true); + using namespace nix::unix; + + { + RestoreSink sink(/*startFsync=*/false); + sink.dstPath = tmpDir; + sink.dirFd = openDirectory(tmpDir); + sink.createDirectory(CanonPath("a")); + sink.createDirectory(CanonPath("c")); + sink.createDirectory(CanonPath("c/d")); + sink.createRegularFile(CanonPath("c/d/regular"), [](CreateRegularFileSink & crf) { crf("some contents"); }); + sink.createSymlink(CanonPath("a/absolute_symlink"), tmpDir.string()); + sink.createSymlink(CanonPath("a/relative_symlink"), "../."); + sink.createSymlink(CanonPath("a/broken_symlink"), "./nonexistent"); + sink.createDirectory(CanonPath("a/b"), [](FileSystemObjectSink & dirSink, const CanonPath & relPath) { + dirSink.createDirectory(CanonPath("d")); + dirSink.createSymlink(CanonPath("c"), "./d"); + }); + sink.createDirectory(CanonPath("a/b/c/e")); // FIXME: This still follows symlinks + ASSERT_THROW( + sink.createDirectory( + CanonPath("a/b/c/f"), [](FileSystemObjectSink & dirSink, const CanonPath & relPath) {}), + SymlinkNotAllowed); + ASSERT_THROW( + sink.createRegularFile( + CanonPath("a/b/c/regular"), [](CreateRegularFileSink & crf) { crf("some contents"); }), + SymlinkNotAllowed); + } + + AutoCloseFD dirFd = openDirectory(tmpDir); + + auto open = [&](std::string_view path, int flags, mode_t mode = 0) { + return openFileEnsureBeneathNoSymlinks(dirFd.get(), CanonPath(path), flags, mode); + }; + + EXPECT_THROW(open("a/absolute_symlink", O_RDONLY), SymlinkNotAllowed); + EXPECT_THROW(open("a/relative_symlink", O_RDONLY), SymlinkNotAllowed); + EXPECT_THROW(open("a/absolute_symlink/a", O_RDONLY), SymlinkNotAllowed); + EXPECT_THROW(open("a/absolute_symlink/c/d", O_RDONLY), SymlinkNotAllowed); + EXPECT_THROW(open("a/relative_symlink/c", O_RDONLY), SymlinkNotAllowed); + EXPECT_THROW(open("a/b/c/d", O_RDONLY), SymlinkNotAllowed); + EXPECT_EQ(open("a/broken_symlink", O_CREAT | O_WRONLY | O_EXCL, 0666), INVALID_DESCRIPTOR); + /* Sanity check, no symlink shenanigans and behaves the same as regular openat with O_EXCL | O_CREAT. */ + EXPECT_EQ(errno, EEXIST); + EXPECT_THROW(open("a/absolute_symlink/broken_symlink", O_CREAT | O_WRONLY | O_EXCL, 0666), SymlinkNotAllowed); + EXPECT_EQ(open("c/d/regular/a", O_RDONLY), INVALID_DESCRIPTOR); + EXPECT_EQ(open("c/d/regular", O_RDONLY | O_DIRECTORY), INVALID_DESCRIPTOR); + EXPECT_TRUE(AutoCloseFD{open("c/d/regular", O_RDONLY)}); + EXPECT_TRUE(AutoCloseFD{open("a/regular", O_CREAT | O_WRONLY | O_EXCL, 0666)}); +} + +#endif + +/* ---------------------------------------------------------------------------- + * createAnonymousTempFile + * --------------------------------------------------------------------------*/ + +TEST(createAnonymousTempFile, works) +{ + auto fd = createAnonymousTempFile(); + auto fd_ = fromDescriptorReadOnly(fd.get()); + writeFull(fd.get(), "test"); + lseek(fd_, 0, SEEK_SET); + FdSource source{fd.get()}; + EXPECT_EQ(source.drain(), "test"); + lseek(fd_, 0, SEEK_END); + writeFull(fd.get(), "test"); + lseek(fd_, 0, SEEK_SET); + EXPECT_EQ(source.drain(), "testtest"); +} + +/* ---------------------------------------------------------------------------- + * FdSource + * --------------------------------------------------------------------------*/ + +TEST(FdSource, restartWorks) +{ + auto fd = createAnonymousTempFile(); + auto fd_ = fromDescriptorReadOnly(fd.get()); + writeFull(fd.get(), "hello world"); + lseek(fd_, 0, SEEK_SET); + FdSource source{fd.get()}; + EXPECT_EQ(source.drain(), "hello world"); + source.restart(); + EXPECT_EQ(source.drain(), "hello world"); + EXPECT_EQ(source.drain(), ""); +} + } // namespace nix diff --git a/src/libutil-tests/git.cc b/src/libutil-tests/git.cc index 6180a4cfc7f..f761c443350 100644 --- a/src/libutil-tests/git.cc +++ b/src/libutil-tests/git.cc @@ -224,42 +224,15 @@ TEST_F(GitTest, tree_sha256_write) }); } +namespace memory_source_accessor { + +extern ref exampleComplex(); + +} + TEST_F(GitTest, both_roundrip) { - using File = MemorySourceAccessor::File; - - auto files = make_ref(); - files->root = File::Directory{ - .contents{ - { - "foo", - File::Regular{ - .contents = "hello\n\0\n\tworld!", - }, - }, - { - "bar", - File::Directory{ - .contents = - { - { - "baz", - File::Regular{ - .executable = true, - .contents = "good day,\n\0\n\tworld!", - }, - }, - { - "quux", - File::Symlink{ - .target = "/over/there", - }, - }, - }, - }, - }, - }, - }; + auto files = memory_source_accessor::exampleComplex(); for (const auto hashAlgo : {HashAlgorithm::SHA1, HashAlgorithm::SHA256}) { std::map cas; diff --git a/src/libutil-tests/hash.cc b/src/libutil-tests/hash.cc index 15e63918018..a6bb52d1953 100644 --- a/src/libutil-tests/hash.cc +++ b/src/libutil-tests/hash.cc @@ -4,30 +4,30 @@ #include #include "nix/util/hash.hh" -#include "nix/util/tests/characterization.hh" +#include "nix/util/tests/json-characterization.hh" namespace nix { -class HashTest : public CharacterizationTest +class HashTest : public virtual CharacterizationTest { std::filesystem::path unitTestData = getUnitTestData() / "hash"; public: - /** - * We set these in tests rather than the regular globals so we don't have - * to worry about race conditions if the tests run concurrently. - */ - ExperimentalFeatureSettings mockXpSettings; - std::filesystem::path goldenMaster(std::string_view testStem) const override { return unitTestData / testStem; } }; -class BLAKE3HashTest : public HashTest +struct BLAKE3HashTest : virtual HashTest { + /** + * We set these in tests rather than the regular globals so we don't have + * to worry about race conditions if the tests run concurrently. + */ + ExperimentalFeatureSettings mockXpSettings; + void SetUp() override { mockXpSettings.set("experimental-features", "blake3-hashes"); @@ -203,4 +203,69 @@ TEST(hashFormat, testParseHashFormatOptException) { ASSERT_EQ(parseHashFormatOpt("sha0042"), std::nullopt); } + +/* ---------------------------------------------------------------------------- + * JSON + * --------------------------------------------------------------------------*/ + +using nlohmann::json; + +struct HashJsonTest : virtual HashTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +struct BLAKE3HashJsonTest : virtual HashTest, + BLAKE3HashTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +TEST_P(HashJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + readJsonTest(name, expected); +} + +TEST_P(HashJsonTest, to_json) +{ + auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +TEST_P(BLAKE3HashJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + readJsonTest(name, expected, mockXpSettings); +} + +TEST_P(BLAKE3HashJsonTest, to_json) +{ + auto & [name, expected] = GetParam(); + writeJsonTest(name, expected); +} + +INSTANTIATE_TEST_SUITE_P( + HashJSON, + HashJsonTest, + ::testing::Values( + std::pair{ + "sha256", + hashString(HashAlgorithm::SHA256, "asdf"), + }, + std::pair{ + "sha512", + hashString(HashAlgorithm::SHA256, "asdf"), + })); + +INSTANTIATE_TEST_SUITE_P(BLAKE3HashJSON, BLAKE3HashJsonTest, ([] { + ExperimentalFeatureSettings mockXpSettings; + mockXpSettings.set("experimental-features", "blake3-hashes"); + return ::testing::Values( + std::pair{ + "blake3", + hashString(HashAlgorithm::BLAKE3, "asdf", mockXpSettings), + }); + }())); + } // namespace nix diff --git a/src/libutil-tests/json-utils.cc b/src/libutil-tests/json-utils.cc index 7d02894c614..b5c0113556b 100644 --- a/src/libutil-tests/json-utils.cc +++ b/src/libutil-tests/json-utils.cc @@ -70,7 +70,7 @@ TEST(valueAt, simpleObject) auto nested = R"({ "hello": { "world": "" } })"_json; - ASSERT_EQ(valueAt(valueAt(getObject(nested), "hello"), "world"), ""); + ASSERT_EQ(valueAt(getObject(valueAt(getObject(nested), "hello")), "world"), ""); } TEST(valueAt, missingKey) @@ -119,10 +119,12 @@ TEST(getArray, wrongAssertions) { auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json; - ASSERT_THROW(getArray(valueAt(json, "object")), Error); - ASSERT_THROW(getArray(valueAt(json, "string")), Error); - ASSERT_THROW(getArray(valueAt(json, "int")), Error); - ASSERT_THROW(getArray(valueAt(json, "boolean")), Error); + auto & obj = getObject(json); + + ASSERT_THROW(getArray(valueAt(obj, "object")), Error); + ASSERT_THROW(getArray(valueAt(obj, "string")), Error); + ASSERT_THROW(getArray(valueAt(obj, "int")), Error); + ASSERT_THROW(getArray(valueAt(obj, "boolean")), Error); } TEST(getString, rightAssertions) @@ -136,10 +138,12 @@ TEST(getString, wrongAssertions) { auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json; - ASSERT_THROW(getString(valueAt(json, "object")), Error); - ASSERT_THROW(getString(valueAt(json, "array")), Error); - ASSERT_THROW(getString(valueAt(json, "int")), Error); - ASSERT_THROW(getString(valueAt(json, "boolean")), Error); + auto & obj = getObject(json); + + ASSERT_THROW(getString(valueAt(obj, "object")), Error); + ASSERT_THROW(getString(valueAt(obj, "array")), Error); + ASSERT_THROW(getString(valueAt(obj, "int")), Error); + ASSERT_THROW(getString(valueAt(obj, "boolean")), Error); } TEST(getIntegralNumber, rightAssertions) @@ -156,18 +160,20 @@ TEST(getIntegralNumber, wrongAssertions) auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "signed": -256, "large": 128, "boolean": false })"_json; - ASSERT_THROW(getUnsigned(valueAt(json, "object")), Error); - ASSERT_THROW(getUnsigned(valueAt(json, "array")), Error); - ASSERT_THROW(getUnsigned(valueAt(json, "string")), Error); - ASSERT_THROW(getUnsigned(valueAt(json, "boolean")), Error); - ASSERT_THROW(getUnsigned(valueAt(json, "signed")), Error); + auto & obj = getObject(json); + + ASSERT_THROW(getUnsigned(valueAt(obj, "object")), Error); + ASSERT_THROW(getUnsigned(valueAt(obj, "array")), Error); + ASSERT_THROW(getUnsigned(valueAt(obj, "string")), Error); + ASSERT_THROW(getUnsigned(valueAt(obj, "boolean")), Error); + ASSERT_THROW(getUnsigned(valueAt(obj, "signed")), Error); - ASSERT_THROW(getInteger(valueAt(json, "object")), Error); - ASSERT_THROW(getInteger(valueAt(json, "array")), Error); - ASSERT_THROW(getInteger(valueAt(json, "string")), Error); - ASSERT_THROW(getInteger(valueAt(json, "boolean")), Error); - ASSERT_THROW(getInteger(valueAt(json, "large")), Error); - ASSERT_THROW(getInteger(valueAt(json, "signed")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "object")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "array")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "string")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "boolean")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "large")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "signed")), Error); } TEST(getBoolean, rightAssertions) @@ -181,24 +187,28 @@ TEST(getBoolean, wrongAssertions) { auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json; - ASSERT_THROW(getBoolean(valueAt(json, "object")), Error); - ASSERT_THROW(getBoolean(valueAt(json, "array")), Error); - ASSERT_THROW(getBoolean(valueAt(json, "string")), Error); - ASSERT_THROW(getBoolean(valueAt(json, "int")), Error); + auto & obj = getObject(json); + + ASSERT_THROW(getBoolean(valueAt(obj, "object")), Error); + ASSERT_THROW(getBoolean(valueAt(obj, "array")), Error); + ASSERT_THROW(getBoolean(valueAt(obj, "string")), Error); + ASSERT_THROW(getBoolean(valueAt(obj, "int")), Error); } TEST(optionalValueAt, existing) { auto json = R"({ "string": "ssh-rsa" })"_json; - ASSERT_EQ(optionalValueAt(json, "string"), std::optional{"ssh-rsa"}); + auto * ptr = optionalValueAt(getObject(json), "string"); + ASSERT_TRUE(ptr); + ASSERT_EQ(*ptr, R"("ssh-rsa")"_json); } TEST(optionalValueAt, empty) { auto json = R"({})"_json; - ASSERT_EQ(optionalValueAt(json, "string"), std::nullopt); + ASSERT_EQ(optionalValueAt(getObject(json), "string"), nullptr); } TEST(getNullable, null) diff --git a/src/libutil-tests/memory-source-accessor.cc b/src/libutil-tests/memory-source-accessor.cc new file mode 100644 index 00000000000..6c7c9ce9e81 --- /dev/null +++ b/src/libutil-tests/memory-source-accessor.cc @@ -0,0 +1,116 @@ +#include + +#include "nix/util/memory-source-accessor.hh" +#include "nix/util/tests/json-characterization.hh" + +namespace nix { + +namespace memory_source_accessor { + +using namespace std::literals; +using File = MemorySourceAccessor::File; + +ref exampleSimple() +{ + auto sc = make_ref(); + sc->root = File{File::Regular{ + .executable = false, + .contents = "asdf", + }}; + return sc; +} + +ref exampleComplex() +{ + auto files = make_ref(); + files->root = File::Directory{ + .entries{ + { + "foo", + File::Regular{ + .contents = "hello\n\0\n\tworld!"s, + }, + }, + { + "bar", + File::Directory{ + .entries = + { + { + "baz", + File::Regular{ + .executable = true, + .contents = "good day,\n\0\n\tworld!"s, + }, + }, + { + "quux", + File::Symlink{ + .target = "/over/there", + }, + }, + }, + }, + }, + }, + }; + return files; +} + +} // namespace memory_source_accessor + +/* ---------------------------------------------------------------------------- + * JSON + * --------------------------------------------------------------------------*/ + +class MemorySourceAccessorTest : public virtual CharacterizationTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "memory-source-accessor"; + +public: + + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; + +using nlohmann::json; + +struct MemorySourceAccessorJsonTest : MemorySourceAccessorTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +TEST_P(MemorySourceAccessorJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + /* Cannot use `readJsonTest` because need to compare `root` field of + the source accessors for equality. */ + readTest(Path{name} + ".json", [&](const auto & encodedRaw) { + auto encoded = json::parse(encodedRaw); + auto decoded = static_cast(encoded); + ASSERT_EQ(decoded.root, expected.root); + }); +} + +TEST_P(MemorySourceAccessorJsonTest, to_json) +{ + auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +INSTANTIATE_TEST_SUITE_P( + MemorySourceAccessorJSON, + MemorySourceAccessorJsonTest, + ::testing::Values( + std::pair{ + "simple", + *memory_source_accessor::exampleSimple(), + }, + std::pair{ + "complex", + *memory_source_accessor::exampleComplex(), + })); + +} // namespace nix diff --git a/src/libutil-tests/meson.build b/src/libutil-tests/meson.build index 83245a73ded..019bdb6d2a3 100644 --- a/src/libutil-tests/meson.build +++ b/src/libutil-tests/meson.build @@ -42,9 +42,9 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = files( + 'alignment.cc', 'archive.cc', 'args.cc', 'base-n.cc', @@ -63,7 +63,9 @@ sources = files( 'json-utils.cc', 'logging.cc', 'lru-cache.cc', + 'memory-source-accessor.cc', 'monitorfdhup.cc', + 'nar-listing.cc', 'nix_api_util.cc', 'nix_api_util_internal.cc', 'pool.cc', @@ -74,6 +76,7 @@ sources = files( 'strings.cc', 'suggestions.cc', 'terminal.cc', + 'topo-sort.cc', 'url.cc', 'util.cc', 'xml-writer.cc', @@ -98,7 +101,7 @@ this_exe = executable( test( meson.project_name(), this_exe, - env : asan_test_options_env + { + env : { '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, protocol : 'gtest', diff --git a/src/libutil-tests/monitorfdhup.cc b/src/libutil-tests/monitorfdhup.cc index d591b2fed05..02cd7e22ca9 100644 --- a/src/libutil-tests/monitorfdhup.cc +++ b/src/libutil-tests/monitorfdhup.cc @@ -1,4 +1,5 @@ -#ifndef _WIN32 +// TODO: investigate why this is hanging on cygwin +#if !defined(_WIN32) && !defined(__CYGWIN__) # include "nix/util/util.hh" # include "nix/util/monitor-fd.hh" diff --git a/src/libutil-tests/nar-listing.cc b/src/libutil-tests/nar-listing.cc new file mode 100644 index 00000000000..a2b8650481c --- /dev/null +++ b/src/libutil-tests/nar-listing.cc @@ -0,0 +1,83 @@ +#include + +#include "nix/util/nar-accessor.hh" +#include "nix/util/tests/json-characterization.hh" + +namespace nix { + +// Forward declaration from memory-source-accessor.cc +namespace memory_source_accessor { +ref exampleComplex(); +} + +/* ---------------------------------------------------------------------------- + * JSON + * --------------------------------------------------------------------------*/ + +class NarListingTest : public virtual CharacterizationTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "nar-listing"; + +public: + + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; + +using nlohmann::json; + +struct NarListingJsonTest : NarListingTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +TEST_P(NarListingJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + readJsonTest(name, expected); +} + +TEST_P(NarListingJsonTest, to_json) +{ + auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +INSTANTIATE_TEST_SUITE_P( + NarListingJSON, + NarListingJsonTest, + ::testing::Values( + std::pair{ + "deep", + listNarDeep(*memory_source_accessor::exampleComplex(), CanonPath::root), + })); + +struct ShallowNarListingJsonTest : NarListingTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +TEST_P(ShallowNarListingJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + readJsonTest(name, expected); +} + +TEST_P(ShallowNarListingJsonTest, to_json) +{ + auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +INSTANTIATE_TEST_SUITE_P( + ShallowNarListingJSON, + ShallowNarListingJsonTest, + ::testing::Values( + std::pair{ + "shallow", + listNarShallow(*memory_source_accessor::exampleComplex(), CanonPath::root), + })); + +} // namespace nix diff --git a/src/libutil-tests/package.nix b/src/libutil-tests/package.nix index 077d36a4d82..c06de6894af 100644 --- a/src/libutil-tests/package.nix +++ b/src/libutil-tests/package.nix @@ -61,7 +61,6 @@ mkMesonExecutable (finalAttrs: { mkdir -p "$HOME" '' + '' - export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0 export _NIX_TEST_UNIT_DATA=${./data} ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out diff --git a/src/libutil-tests/strings.cc b/src/libutil-tests/strings.cc index bd740ce0cf4..dbbecd514e5 100644 --- a/src/libutil-tests/strings.cc +++ b/src/libutil-tests/strings.cc @@ -494,4 +494,63 @@ TEST(shellSplitString, testUnbalancedQuotes) ASSERT_THROW(shellSplitString("foo\"bar\\\""), Error); } +/* ---------------------------------------------------------------------------- + * optionalBracket + * --------------------------------------------------------------------------*/ + +TEST(optionalBracket, emptyContent) +{ + ASSERT_EQ(optionalBracket(" (", "", ")"), ""); +} + +TEST(optionalBracket, nonEmptyContent) +{ + ASSERT_EQ(optionalBracket(" (", "foo", ")"), " (foo)"); +} + +TEST(optionalBracket, emptyPrefixAndSuffix) +{ + ASSERT_EQ(optionalBracket("", "foo", ""), "foo"); +} + +TEST(optionalBracket, emptyContentEmptyBrackets) +{ + ASSERT_EQ(optionalBracket("", "", ""), ""); +} + +TEST(optionalBracket, complexBrackets) +{ + ASSERT_EQ(optionalBracket(" [[[", "content", "]]]"), " [[[content]]]"); +} + +TEST(optionalBracket, onlyPrefix) +{ + ASSERT_EQ(optionalBracket("prefix", "content", ""), "prefixcontent"); +} + +TEST(optionalBracket, onlySuffix) +{ + ASSERT_EQ(optionalBracket("", "content", "suffix"), "contentsuffix"); +} + +TEST(optionalBracket, optionalWithValue) +{ + ASSERT_EQ(optionalBracket(" (", std::optional("foo"), ")"), " (foo)"); +} + +TEST(optionalBracket, optionalNullopt) +{ + ASSERT_EQ(optionalBracket(" (", std::optional(std::nullopt), ")"), ""); +} + +TEST(optionalBracket, optionalEmptyString) +{ + ASSERT_EQ(optionalBracket(" (", std::optional(""), ")"), ""); +} + +TEST(optionalBracket, optionalStringViewWithValue) +{ + ASSERT_EQ(optionalBracket(" (", std::optional("bar"), ")"), " (bar)"); +} + } // namespace nix diff --git a/src/libutil-tests/topo-sort.cc b/src/libutil-tests/topo-sort.cc new file mode 100644 index 00000000000..91030247e67 --- /dev/null +++ b/src/libutil-tests/topo-sort.cc @@ -0,0 +1,318 @@ +#include +#include +#include +#include +#include + +#include + +#include "nix/util/topo-sort.hh" +#include "nix/util/util.hh" + +namespace nix { + +/** + * Helper function to create a graph and run topoSort + */ +TopoSortResult +runTopoSort(const std::set & nodes, const std::map> & edges) +{ + return topoSort( + nodes, + std::function(const std::string &)>( + [&](const std::string & node) -> std::set { + auto it = edges.find(node); + return it != edges.end() ? it->second : std::set{}; + })); +} + +/** + * Helper to check if a sorted result respects dependencies + * + * @note `topoSort` returns results in REVERSE topological order (see + * line 61 of topo-sort.hh). This means dependents come BEFORE their + * dependencies in the output. + * + * In the edges std::map, if parent -> child, it means parent depends on + * child, so parent must come BEFORE child in the output from topoSort. + */ +bool isValidTopologicalOrder( + const std::vector & sorted, const std::map> & edges) +{ + std::map position; + for (size_t i = 0; i < sorted.size(); ++i) { + position[sorted[i]] = i; + } + + // For each edge parent -> children, parent depends on children + // topoSort reverses the output, so parent comes BEFORE children + for (const auto & [parent, children] : edges) { + for (const auto & child : children) { + if (position.count(parent) && position.count(child)) { + // parent should come before child (have a smaller index) + if (position[parent] > position[child]) { + return false; + } + } + } + } + return true; +} + +// ============================================================================ +// Parametrized Tests for Topological Sort +// ============================================================================ + +struct ExpectSuccess +{ + std::optional> order; // std::nullopt = any valid order is acceptable +}; + +struct ExpectCycle +{ + std::set involvedNodes; +}; + +using ExpectedResult = std::variant; + +struct TopoSortCase +{ + std::string name; + std::set nodes; + std::map> edges; + ExpectedResult expected; +}; + +class TopoSortTest : public ::testing::TestWithParam +{}; + +TEST_P(TopoSortTest, ProducesCorrectResult) +{ + const auto & testCase = GetParam(); + auto result = runTopoSort(testCase.nodes, testCase.edges); + + std::visit( + overloaded{ + [&](const ExpectSuccess & expect) { + // Success case + ASSERT_TRUE(holds_alternative>(result)) + << "Expected successful sort for: " << testCase.name; + + auto sorted = get>(result); + ASSERT_EQ(sorted.size(), testCase.nodes.size()) + << "Sorted output should contain all nodes for: " << testCase.name; + + ASSERT_TRUE(isValidTopologicalOrder(sorted, testCase.edges)) + << "Invalid topological order for: " << testCase.name; + + if (expect.order) { + ASSERT_EQ(sorted, *expect.order) << "Expected specific order for: " << testCase.name; + } + }, + [&](const ExpectCycle & expect) { + // Cycle detection case + ASSERT_TRUE(holds_alternative>(result)) + << "Expected cycle detection for: " << testCase.name; + + auto cycle = get>(result); + + // Verify that the cycle involves expected nodes + ASSERT_TRUE(expect.involvedNodes.count(cycle.path) > 0) + << "Cycle path '" << cycle.path << "' not in expected cycle nodes for: " << testCase.name; + ASSERT_TRUE(expect.involvedNodes.count(cycle.parent) > 0) + << "Cycle parent '" << cycle.parent << "' not in expected cycle nodes for: " << testCase.name; + + // Verify that there's actually an edge in the cycle + auto it = testCase.edges.find(cycle.parent); + ASSERT_TRUE(it != testCase.edges.end()) << "Parent node should have edges for: " << testCase.name; + ASSERT_TRUE(it->second.count(cycle.path) > 0) + << "Should be an edge from parent to path for: " << testCase.name; + }}, + testCase.expected); +} + +INSTANTIATE_TEST_SUITE_P( + TopoSort, + TopoSortTest, + ::testing::Values( + // Success cases + TopoSortCase{ + .name = "EmptySet", + .nodes = {}, + .edges = {}, + .expected = ExpectSuccess{.order = std::vector{}}, + }, + TopoSortCase{ + .name = "SingleNode", + .nodes = {"A"}, + .edges = {}, + .expected = ExpectSuccess{.order = std::vector{"A"}}, + }, + TopoSortCase{ + .name = "TwoIndependentNodes", + .nodes = {"A", "B"}, + .edges = {}, + // Order between independent nodes is unspecified + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "SimpleChain", + .nodes = {"A", "B", "C"}, + .edges{ + {"A", {"B"}}, + {"B", {"C"}}, + }, + .expected = ExpectSuccess{.order = std::vector{"A", "B", "C"}}, + }, + TopoSortCase{ + .name = "SimpleDag", + .nodes = {"A", "B", "C", "D"}, + .edges{ + {"A", {"B", "C"}}, + {"B", {"D"}}, + {"C", {"D"}}, + }, + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "DiamondDependency", + .nodes = {"A", "B", "C", "D"}, + .edges{ + {"A", {"B", "C"}}, + {"B", {"D"}}, + {"C", {"D"}}, + }, + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "DisconnectedComponents", + .nodes = {"A", "B", "C", "D"}, + .edges{ + {"A", {"B"}}, + {"C", {"D"}}, + }, + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "NodeWithNoReferences", + .nodes = {"A", "B", "C"}, + .edges{ + {"A", {"B"}}, + // C has no dependencies + }, + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "MissingReferences", + .nodes = {"A", "B"}, + .edges{ + // Z doesn't exist in nodes, should be ignored + {"A", {"B", "Z"}}, + }, + .expected = ExpectSuccess{.order = std::vector{"A", "B"}}, + }, + TopoSortCase{ + .name = "ComplexDag", + .nodes = {"A", "B", "C", "D", "E", "F", "G", "H"}, + .edges{ + {"A", {"B", "C", "D"}}, + {"B", {"E", "F"}}, + {"C", {"E", "F"}}, + {"D", {"G"}}, + {"E", {"H"}}, + {"F", {"H"}}, + {"G", {"H"}}, + }, + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "LongChain", + .nodes = {"A", "B", "C", "D", "E", "F", "G", "H"}, + .edges{ + {"A", {"B"}}, + {"B", {"C"}}, + {"C", {"D"}}, + {"D", {"E"}}, + {"E", {"F"}}, + {"F", {"G"}}, + {"G", {"H"}}, + }, + .expected = ExpectSuccess{.order = std::vector{"A", "B", "C", "D", "E", "F", "G", "H"}}, + }, + TopoSortCase{ + .name = "SelfLoopIgnored", + .nodes = {"A"}, + .edges{ + // Self-reference should be ignored per line 41 of topo-sort.hh + {"A", {"A"}}, + }, + .expected = ExpectSuccess{.order = std::vector{"A"}}, + }, + TopoSortCase{ + .name = "SelfLoopInChainIgnored", + .nodes = {"A", "B", "C"}, + .edges{ + // B has self-reference that should be ignored + {"A", {"B"}}, + {"B", {"B", "C"}}, + }, + .expected = ExpectSuccess{.order = std::vector{"A", "B", "C"}}, + }, + // Cycle detection cases + TopoSortCase{ + .name = "TwoNodeCycle", + .nodes = {"A", "B"}, + .edges{ + {"A", {"B"}}, + {"B", {"A"}}, + }, + .expected = ExpectCycle{.involvedNodes = {"A", "B"}}, + }, + TopoSortCase{ + .name = "ThreeNodeCycle", + .nodes = {"A", "B", "C"}, + .edges{ + {"A", {"B"}}, + {"B", {"C"}}, + {"C", {"A"}}, + }, + .expected = ExpectCycle{.involvedNodes = {"A", "B", "C"}}, + }, + TopoSortCase{ + .name = "CycleInLargerGraph", + .nodes = {"A", "B", "C", "D"}, + .edges{ + {"A", {"B"}}, + {"B", {"C"}}, + {"C", {"A"}}, + {"D", {"A"}}, + }, + .expected = ExpectCycle{.involvedNodes = {"A", "B", "C"}}, + }, + TopoSortCase{ + .name = "MultipleCycles", + .nodes = {"A", "B", "C", "D"}, + .edges{ + {"A", {"B"}}, + {"B", {"A"}}, + {"C", {"D"}}, + {"D", {"C"}}, + }, + // Either cycle is valid + .expected = ExpectCycle{.involvedNodes = {"A", "B", "C", "D"}}, + }, + TopoSortCase{ + .name = "ComplexCycleWithBranches", + .nodes = {"A", "B", "C", "D", "E"}, + .edges{ + // Cycle: B -> D -> E -> B + {"A", {"B", "C"}}, + {"B", {"D"}}, + {"C", {"D"}}, + {"D", {"E"}}, + {"E", {"B"}}, + }, + .expected = ExpectCycle{.involvedNodes = {"B", "D", "E"}}, + })); + +} // namespace nix diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index cd681609670..356a134dc9b 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -35,10 +35,10 @@ INSTANTIATE_TEST_SUITE_P( // Already proper URL with git+ssh FixGitURLParam{ .input = "git+ssh://user@domain:1234/path", - .expected = "git+ssh://user@domain:1234/path", + .expected = "ssh://user@domain:1234/path", .parsed = ParsedURL{ - .scheme = "git+ssh", + .scheme = "ssh", .authority = ParsedURL::Authority{ .host = "domain", diff --git a/src/libutil-tests/util.cc b/src/libutil-tests/util.cc index c48b97e8e72..a299cd97823 100644 --- a/src/libutil-tests/util.cc +++ b/src/libutil-tests/util.cc @@ -146,6 +146,59 @@ TEST(string2Int, trivialConversions) ASSERT_EQ(string2Int("-100"), -100); } +/* ---------------------------------------------------------------------------- + * getSizeUnit + * --------------------------------------------------------------------------*/ + +TEST(getSizeUnit, misc) +{ + ASSERT_EQ(getSizeUnit(0), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(100), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(100), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(972), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(973), SizeUnit::Base); // FIXME: should round down + ASSERT_EQ(getSizeUnit(1024), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(-1024), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(1024 * 1024), SizeUnit::Kilo); + ASSERT_EQ(getSizeUnit(1100 * 1024), SizeUnit::Mega); + ASSERT_EQ(getSizeUnit(2ULL * 1024 * 1024 * 1024), SizeUnit::Giga); + ASSERT_EQ(getSizeUnit(2100ULL * 1024 * 1024 * 1024), SizeUnit::Tera); +} + +/* ---------------------------------------------------------------------------- + * getCommonSizeUnit + * --------------------------------------------------------------------------*/ + +TEST(getCommonSizeUnit, misc) +{ + ASSERT_EQ(getCommonSizeUnit({0}), SizeUnit::Base); + ASSERT_EQ(getCommonSizeUnit({0, 100}), SizeUnit::Base); + ASSERT_EQ(getCommonSizeUnit({100, 0}), SizeUnit::Base); + ASSERT_EQ(getCommonSizeUnit({100, 1024 * 1024}), std::nullopt); + ASSERT_EQ(getCommonSizeUnit({1024 * 1024, 100}), std::nullopt); + ASSERT_EQ(getCommonSizeUnit({1024 * 1024, 1024 * 1024}), SizeUnit::Kilo); + ASSERT_EQ(getCommonSizeUnit({2100ULL * 1024 * 1024 * 1024, 2100ULL * 1024 * 1024 * 1024}), SizeUnit::Tera); +} + +/* ---------------------------------------------------------------------------- + * renderSizeWithoutUnit + * --------------------------------------------------------------------------*/ + +TEST(renderSizeWithoutUnit, misc) +{ + ASSERT_EQ(renderSizeWithoutUnit(0, SizeUnit::Base, true), " 0.0"); + ASSERT_EQ(renderSizeWithoutUnit(100, SizeUnit::Base, true), " 0.1"); + ASSERT_EQ(renderSizeWithoutUnit(100, SizeUnit::Base), "0.1"); + ASSERT_EQ(renderSizeWithoutUnit(972, SizeUnit::Base, true), " 0.9"); + ASSERT_EQ(renderSizeWithoutUnit(973, SizeUnit::Base, true), " 1.0"); // FIXME: should round down + ASSERT_EQ(renderSizeWithoutUnit(1024, SizeUnit::Base, true), " 1.0"); + ASSERT_EQ(renderSizeWithoutUnit(-1024, SizeUnit::Base, true), " -1.0"); + ASSERT_EQ(renderSizeWithoutUnit(1024 * 1024, SizeUnit::Kilo, true), "1024.0"); + ASSERT_EQ(renderSizeWithoutUnit(1100 * 1024, SizeUnit::Mega, true), " 1.1"); + ASSERT_EQ(renderSizeWithoutUnit(2ULL * 1024 * 1024 * 1024, SizeUnit::Giga, true), " 2.0"); + ASSERT_EQ(renderSizeWithoutUnit(2100ULL * 1024 * 1024 * 1024, SizeUnit::Tera, true), " 2.1"); +} + /* ---------------------------------------------------------------------------- * renderSize * --------------------------------------------------------------------------*/ @@ -158,6 +211,7 @@ TEST(renderSize, misc) ASSERT_EQ(renderSize(972, true), " 0.9 KiB"); ASSERT_EQ(renderSize(973, true), " 1.0 KiB"); // FIXME: should round down ASSERT_EQ(renderSize(1024, true), " 1.0 KiB"); + ASSERT_EQ(renderSize(-1024, true), " -1.0 KiB"); ASSERT_EQ(renderSize(1024 * 1024, true), "1024.0 KiB"); ASSERT_EQ(renderSize(1100 * 1024, true), " 1.1 MiB"); ASSERT_EQ(renderSize(2ULL * 1024 * 1024 * 1024, true), " 2.0 GiB"); diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index b8fef9ef3d7..0291d682729 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -6,6 +6,7 @@ #include // for strcasecmp #include "nix/util/archive.hh" +#include "nix/util/alignment.hh" #include "nix/util/config-global.hh" #include "nix/util/posix-source-accessor.hh" #include "nix/util/source-path.hh" @@ -46,12 +47,12 @@ void SourceAccessor::dumpPath(const CanonPath & path, Sink & sink, PathFilter & writePadding(*size, sink); }; - std::function dump; + sink << narVersionMagic1; - dump = [&](const CanonPath & path) { + [&, &this_(*this)](this const auto & dump, const CanonPath & path) -> void { checkInterrupt(); - auto st = lstat(path); + auto st = this_.lstat(path); sink << "("; @@ -68,7 +69,7 @@ void SourceAccessor::dumpPath(const CanonPath & path, Sink & sink, PathFilter & /* If we're on a case-insensitive system like macOS, undo the case hack applied by restorePath(). */ StringMap unhacked; - for (auto & i : readDirectory(path)) + for (auto & i : this_.readDirectory(path)) if (archiveSettings.useCaseHack) { std::string name(i.first); size_t pos = i.first.find(caseHackSuffix); @@ -91,23 +92,20 @@ void SourceAccessor::dumpPath(const CanonPath & path, Sink & sink, PathFilter & } else if (st.type == tSymlink) - sink << "type" << "symlink" << "target" << readLink(path); + sink << "type" << "symlink" << "target" << this_.readLink(path); else throw Error("file '%s' has an unsupported type", path); sink << ")"; - }; - - sink << narVersionMagic1; - dump(path); + }(path); } time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter) { - auto path2 = PosixSourceAccessor::createAtRoot(path); + auto path2 = PosixSourceAccessor::createAtRoot(path, /*trackLastModified=*/true); path2.dumpPath(sink, filter); - return path2.accessor.dynamic_pointer_cast()->mtime; + return path2.accessor->getLastModified().value(); } void dumpPath(const Path & path, Sink & sink, PathFilter & filter) @@ -133,7 +131,7 @@ static void parseContents(CreateRegularFileSink & sink, Source & source) sink.preallocateContents(size); if (sink.skipContents) { - source.skip(size + (size % 8 ? 8 - (size % 8) : 0)); + source.skip(alignUp(size, 8)); return; } @@ -202,54 +200,54 @@ static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath } else if (type == "directory") { - sink.createDirectory(path); + sink.createDirectory(path, [&](FileSystemObjectSink & dirSink, const CanonPath & relDirPath) { + std::map names; - std::map names; + std::string prevName; - std::string prevName; + while (1) { + auto tag = getString(); - while (1) { - auto tag = getString(); + if (tag == ")") + break; - if (tag == ")") - break; - - if (tag != "entry") - throw badArchive("expected tag 'entry' or ')', got '%s'", tag); - - expectTag("("); - - expectTag("name"); - - auto name = getString(); - if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos - || name.find((char) 0) != std::string::npos) - throw badArchive("NAR contains invalid file name '%1%'", name); - if (name <= prevName) - throw badArchive("NAR directory is not sorted"); - prevName = name; - if (archiveSettings.useCaseHack) { - auto i = names.find(name); - if (i != names.end()) { - debug("case collision between '%1%' and '%2%'", i->first, name); - name += caseHackSuffix; - name += std::to_string(++i->second); - auto j = names.find(name); - if (j != names.end()) - throw badArchive( - "NAR contains file name '%s' that collides with case-hacked file name '%s'", - prevName, - j->first); - } else - names[name] = 0; - } + if (tag != "entry") + throw badArchive("expected tag 'entry' or ')', got '%s'", tag); - expectTag("node"); + expectTag("("); - parse(sink, source, path / name); + expectTag("name"); - expectTag(")"); - } + auto name = getString(); + if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos + || name.find((char) 0) != std::string::npos) + throw badArchive("NAR contains invalid file name '%1%'", name); + if (name <= prevName) + throw badArchive("NAR directory is not sorted"); + prevName = name; + if (archiveSettings.useCaseHack) { + auto i = names.find(name); + if (i != names.end()) { + debug("case collision between '%1%' and '%2%'", i->first, name); + name += caseHackSuffix; + name += std::to_string(++i->second); + auto j = names.find(name); + if (j != names.end()) + throw badArchive( + "NAR contains file name '%s' that collides with case-hacked file name '%s'", + prevName, + j->first); + } else + names[name] = 0; + } + + expectTag("node"); + + parse(dirSink, source, relDirPath / name); + + expectTag(")"); + } + }); } else if (type == "symlink") { diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 5faf9dd43ea..bd3dc9c95df 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -371,13 +371,13 @@ void RootArgs::parseCmdline(const Strings & _cmdline, bool allowShebang) d.completer(*completions, d.n, d.prefix); } -Path Args::getCommandBaseDir() const +std::filesystem::path Args::getCommandBaseDir() const { assert(parent); return parent->getCommandBaseDir(); } -Path RootArgs::getCommandBaseDir() const +std::filesystem::path RootArgs::getCommandBaseDir() const { return commandBaseDir; } diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index 07a3a619386..22ca3e066a9 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -3,23 +3,41 @@ #include "nix/util/file-path-impl.hh" #include "nix/util/strings-inline.hh" +#include + namespace nix { -CanonPath CanonPath::root = CanonPath("/"); +const CanonPath CanonPath::root = CanonPath("/"); static std::string absPathPure(std::string_view path) { return canonPathInner(path, [](auto &, auto &) {}); } +static void ensureNoNullBytes(std::string_view s) +{ + if (std::memchr(s.data(), '\0', s.size())) [[unlikely]] { + using namespace std::string_view_literals; + auto str = replaceStrings(std::string(s), "\0"sv, "␀"sv); + throw BadCanonPath("path segment '%s' must not contain null (\\0) bytes", str); + } +} + CanonPath::CanonPath(std::string_view raw) : path(absPathPure(concatStrings("/", raw))) +{ + ensureNoNullBytes(raw); +} + +CanonPath::CanonPath(const char * raw) + : path(absPathPure(concatStrings("/", raw))) { } CanonPath::CanonPath(std::string_view raw, const CanonPath & root) : path(absPathPure(raw.size() > 0 && raw[0] == '/' ? raw : concatStrings(root.abs(), "/", raw))) { + ensureNoNullBytes(raw); } CanonPath::CanonPath(const std::vector & elems) @@ -80,6 +98,7 @@ void CanonPath::push(std::string_view c) { assert(c.find('/') == c.npos); assert(c != "." && c != ".."); + ensureNoNullBytes(c); if (!isRoot()) path += '/'; path += c; diff --git a/src/libutil/config-global.cc b/src/libutil/config-global.cc index cd461ea4850..b63b4aaa1bb 100644 --- a/src/libutil/config-global.cc +++ b/src/libutil/config-global.cc @@ -4,6 +4,12 @@ namespace nix { +GlobalConfig::ConfigRegistrations & GlobalConfig::configRegistrations() +{ + static GlobalConfig::ConfigRegistrations configRegistrations; + return configRegistrations; +} + bool GlobalConfig::set(const std::string & name, const std::string & value) { for (auto & config : configRegistrations()) diff --git a/src/libutil/configuration.cc b/src/libutil/configuration.cc index ca3c08cd9b3..407320a6b51 100644 --- a/src/libutil/configuration.cc +++ b/src/libutil/configuration.cc @@ -330,12 +330,27 @@ void BaseSetting::convertToArg(Args & args, const std::string & category) }); } +template<> +std::list BaseSetting>::parse(const std::string & str) const +{ + auto tokens = tokenizeString>(str); + return {tokens.begin(), tokens.end()}; +} + template<> Strings BaseSetting::parse(const std::string & str) const { return tokenizeString(str); } +template<> +void BaseSetting>::appendOrSet(std::list newValue, bool append) +{ + if (!append) + value.clear(); + value.insert(value.end(), std::make_move_iterator(newValue.begin()), std::make_move_iterator(newValue.end())); +} + template<> void BaseSetting::appendOrSet(Strings newValue, bool append) { @@ -344,6 +359,14 @@ void BaseSetting::appendOrSet(Strings newValue, bool append) value.insert(value.end(), std::make_move_iterator(newValue.begin()), std::make_move_iterator(newValue.end())); } +template<> +std::string BaseSetting>::to_string() const +{ + return concatStringsSep(" ", value | std::views::transform([](const auto & p) { + return p.string(); + }) | std::ranges::to>()); +} + template<> std::string BaseSetting::to_string() const { @@ -433,6 +456,42 @@ std::string BaseSetting::to_string() const [](const auto & kvpair) { return kvpair.first + "=" + kvpair.second; }); } +static Path parsePath(const AbstractSetting & s, const std::string & str) +{ + if (str == "") + throw UsageError("setting '%s' is a path and paths cannot be empty", s.name); + else + return canonPath(str); +} + +template<> +std::filesystem::path BaseSetting::parse(const std::string & str) const +{ + return parsePath(*this, str); +} + +template<> +std::string BaseSetting::to_string() const +{ + return value.string(); +} + +template<> +std::optional +BaseSetting>::parse(const std::string & str) const +{ + if (str == "") + return std::nullopt; + else + return parsePath(*this, str); +} + +template<> +std::string BaseSetting>::to_string() const +{ + return value ? value->string() : ""; +} + template class BaseSetting; template class BaseSetting; template class BaseSetting; @@ -441,18 +500,13 @@ template class BaseSetting; template class BaseSetting; template class BaseSetting; template class BaseSetting; +template class BaseSetting>; template class BaseSetting; template class BaseSetting; template class BaseSetting; template class BaseSetting>; - -static Path parsePath(const AbstractSetting & s, const std::string & str) -{ - if (str == "") - throw UsageError("setting '%s' is a path and paths cannot be empty", s.name); - else - return canonPath(str); -} +template class BaseSetting; +template class BaseSetting>; PathSetting::PathSetting( Config * options, @@ -500,10 +554,10 @@ bool ExperimentalFeatureSettings::isEnabled(const ExperimentalFeature & feature) return std::find(f.begin(), f.end(), feature) != f.end(); } -void ExperimentalFeatureSettings::require(const ExperimentalFeature & feature) const +void ExperimentalFeatureSettings::require(const ExperimentalFeature & feature, std::string reason) const { if (!isEnabled(feature)) - throw MissingExperimentalFeature(feature); + throw MissingExperimentalFeature(feature, std::move(reason)); } bool ExperimentalFeatureSettings::isEnabled(const std::optional & feature) const diff --git a/src/libutil/current-process.cc b/src/libutil/current-process.cc index c7d3b78d0a8..5c48a4f7770 100644 --- a/src/libutil/current-process.cc +++ b/src/libutil/current-process.cc @@ -7,6 +7,7 @@ #include "nix/util/file-system.hh" #include "nix/util/processes.hh" #include "nix/util/signals.hh" +#include "nix/util/environment-variables.hh" #include #ifdef __APPLE__ @@ -65,13 +66,27 @@ void setStackSize(size_t stackSize) struct rlimit limit; if (getrlimit(RLIMIT_STACK, &limit) == 0 && static_cast(limit.rlim_cur) < stackSize) { savedStackSize = limit.rlim_cur; - limit.rlim_cur = std::min(static_cast(stackSize), limit.rlim_max); + if (limit.rlim_max < static_cast(stackSize)) { + if (getEnv("_NIX_TEST_NO_ENVIRONMENT_WARNINGS") != "1") { + logger->log( + lvlWarn, + HintFmt( + "Stack size hard limit is %1%, which is less than the desired %2%. If possible, increase the hard limit, e.g. with 'ulimit -Hs %3%'.", + limit.rlim_max, + stackSize, + stackSize / 1024) + .str()); + } + } + auto requestedSize = std::min(static_cast(stackSize), limit.rlim_max); + limit.rlim_cur = requestedSize; if (setrlimit(RLIMIT_STACK, &limit) != 0) { logger->log( lvlError, HintFmt( - "Failed to increase stack size from %1% to %2% (maximum allowed stack size: %3%): %4%", + "Failed to increase stack size from %1% to %2% (desired: %3%, maximum allowed: %4%): %5%", savedStackSize, + requestedSize, stackSize, limit.rlim_max, std::strerror(errno)) @@ -109,7 +124,7 @@ std::optional getSelfExe() { static auto cached = []() -> std::optional { #if defined(__linux__) || defined(__GNU__) - return readLink("/proc/self/exe"); + return readLink(std::filesystem::path{"/proc/self/exe"}); #elif defined(__APPLE__) char buf[1024]; uint32_t size = sizeof(buf); @@ -134,6 +149,11 @@ std::optional getSelfExe() return std::nullopt; } + // FreeBSD's sysctl(KERN_PROC_PATHNAME) includes the null terminator in + // pathLen. Strip it to prevent Nix evaluation errors when the path is + // serialized to JSON and evaluated as a Nix string. + path.pop_back(); + return Path(path.begin(), path.end()); #else return std::nullopt; diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc index b9034821733..757578cff1c 100644 --- a/src/libutil/experimental-features.cc +++ b/src/libutil/experimental-features.cc @@ -1,5 +1,6 @@ #include "nix/util/experimental-features.hh" #include "nix/util/fmt.hh" +#include "nix/util/strings.hh" #include "nix/util/util.hh" #include @@ -79,7 +80,6 @@ constexpr std::array xpFeatureDetails Enable the use of the [`fetchTree`](@docroot@/language/builtins.md#builtins-fetchTree) built-in function in the Nix language. `fetchTree` exposes a generic interface for fetching remote file system trees from different types of remote sources. - The [`flakes`](#xp-feature-flakes) feature flag always enables `fetch-tree`. This built-in was previously guarded by the `flakes` experimental feature because of that overlap. Enabling just this feature serves as a "release candidate", allowing users to try it out in isolation. @@ -252,7 +252,7 @@ constexpr std::array xpFeatureDetails .tag = Xp::LocalOverlayStore, .name = "local-overlay-store", .description = R"( - Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-overlay-store). + Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#experimental-local-overlay-store). )", .trackingUrl = "https://github.com/NixOS/nix/milestone/50", }, @@ -382,11 +382,13 @@ std::set parseFeatures(const StringSet & rawFeatures) return res; } -MissingExperimentalFeature::MissingExperimentalFeature(ExperimentalFeature feature) +MissingExperimentalFeature::MissingExperimentalFeature(ExperimentalFeature feature, std::string reason) : Error( - "experimental Nix feature '%1%' is disabled; add '--extra-experimental-features %1%' to enable it", - showExperimentalFeature(feature)) + "experimental Nix feature '%1%' is disabled%2%; add '--extra-experimental-features %1%' to enable it", + showExperimentalFeature(feature), + Uncolored(optionalBracket(" (", reason, ")"))) , missingFeature(feature) + , reason{reason} { } diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index fba92dc8ec5..4851d8cfb57 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -101,9 +101,11 @@ Path absPath(PathView path, std::optional dir, bool resolveSymlinks) return canonPath(path, resolveSymlinks); } -std::filesystem::path absPath(const std::filesystem::path & path, bool resolveSymlinks) +std::filesystem::path +absPath(const std::filesystem::path & path, const std::filesystem::path * dir_, bool resolveSymlinks) { - return absPath(path.string(), std::nullopt, resolveSymlinks); + std::optional dir = dir_ ? std::optional{dir_->string()} : std::nullopt; + return absPath(PathView{path.string()}, dir.transform([](auto & p) { return PathView(p); }), resolveSymlinks); } Path canonPath(PathView path, bool resolveSymlinks) @@ -242,10 +244,15 @@ bool pathAccessible(const std::filesystem::path & path) } } -Path readLink(const Path & path) +std::filesystem::path readLink(const std::filesystem::path & path) { checkInterrupt(); - return std::filesystem::read_symlink(path).string(); + return std::filesystem::read_symlink(path); +} + +Path readLink(const Path & path) +{ + return readLink(std::filesystem::path{path}).string(); } std::string readFile(const Path & path) @@ -253,8 +260,7 @@ std::string readFile(const Path & path) AutoCloseFD fd = toDescriptor(open( path.c_str(), O_RDONLY -// TODO -#ifndef _WIN32 +#ifdef O_CLOEXEC | O_CLOEXEC #endif )); @@ -287,8 +293,7 @@ void readFile(const Path & path, Sink & sink, bool memory_map) AutoCloseFD fd = toDescriptor(open( path.c_str(), O_RDONLY -// TODO -#ifndef _WIN32 +#ifdef O_CLOEXEC | O_CLOEXEC #endif )); @@ -302,8 +307,7 @@ void writeFile(const Path & path, std::string_view s, mode_t mode, FsSync sync) AutoCloseFD fd = toDescriptor(open( path.c_str(), O_WRONLY | O_TRUNC | O_CREAT -// TODO -#ifndef _WIN32 +#ifdef O_CLOEXEC | O_CLOEXEC #endif , @@ -337,8 +341,7 @@ void writeFile(const Path & path, Source & source, mode_t mode, FsSync sync) AutoCloseFD fd = toDescriptor(open( path.c_str(), O_WRONLY | O_TRUNC | O_CREAT -// TODO -#ifndef _WIN32 +#ifdef O_CLOEXEC | O_CLOEXEC #endif , @@ -669,18 +672,18 @@ void AutoUnmount::cancel() ////////////////////////////////////////////////////////////////////// -std::string defaultTempDir() +std::filesystem::path defaultTempDir() { return getEnvNonEmpty("TMPDIR").value_or("/tmp"); } -Path createTempDir(const Path & tmpRoot, const Path & prefix, mode_t mode) +std::filesystem::path createTempDir(const std::filesystem::path & tmpRoot, const std::string & prefix, mode_t mode) { while (1) { checkInterrupt(); - Path tmpDir = makeTempPath(tmpRoot, prefix); + std::filesystem::path tmpDir = makeTempPath(tmpRoot, prefix); if (mkdir( - tmpDir.c_str() + tmpDir.string().c_str() #ifndef _WIN32 // TODO abstract mkdir perms for Windows , mode @@ -706,11 +709,31 @@ Path createTempDir(const Path & tmpRoot, const Path & prefix, mode_t mode) } } +AutoCloseFD createAnonymousTempFile() +{ + AutoCloseFD fd; +#ifdef O_TMPFILE + fd = ::open(defaultTempDir().c_str(), O_TMPFILE | O_CLOEXEC | O_RDWR, S_IWUSR | S_IRUSR); + if (!fd) + throw SysError("creating anonymous temporary file"); +#else + auto [fd2, path] = createTempFile("nix-anonymous"); + if (!fd2) + throw SysError("creating temporary file '%s'", path); + fd = std::move(fd2); +# ifndef _WIN32 + unlink(requireCString(path)); /* We only care about the file descriptor. */ +# endif +#endif + return fd; +} + std::pair createTempFile(const Path & prefix) { - Path tmpl(defaultTempDir() + "/" + prefix + ".XXXXXX"); + Path tmpl(defaultTempDir().string() + "/" + prefix + ".XXXXXX"); // Strictly speaking, this is UB, but who cares... // FIXME: use O_TMPFILE. + // FIXME: Windows should use FILE_ATTRIBUTE_TEMPORARY | FILE_FLAG_DELETE_ON_CLOSE AutoCloseFD fd = toDescriptor(mkstemp((char *) tmpl.c_str())); if (!fd) throw SysError("creating temporary file '%s'", tmpl); @@ -720,21 +743,20 @@ std::pair createTempFile(const Path & prefix) return {std::move(fd), tmpl}; } -Path makeTempPath(const Path & root, const Path & suffix) +std::filesystem::path makeTempPath(const std::filesystem::path & root, const std::string & suffix) { // start the counter at a random value to minimize issues with preexisting temp paths static std::atomic counter(std::random_device{}()); - auto tmpRoot = canonPath(root.empty() ? defaultTempDir() : root, true); + auto tmpRoot = canonPath(root.empty() ? defaultTempDir().string() : root.string(), true); return fmt("%1%/%2%-%3%-%4%", tmpRoot, suffix, getpid(), counter.fetch_add(1, std::memory_order_relaxed)); } void createSymlink(const Path & target, const Path & link) { - try { - std::filesystem::create_symlink(target, link); - } catch (std::filesystem::filesystem_error & e) { - throw SysError("creating symlink '%1%' -> '%2%'", link, target); - } + std::error_code ec; + std::filesystem::create_symlink(target, link, ec); + if (ec) + throw SysError(ec.value(), "creating symlink '%1%' -> '%2%'", link, target); } void replaceSymlink(const std::filesystem::path & target, const std::filesystem::path & link) diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index 45ef57a9f5b..521a10c9a77 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -34,11 +34,11 @@ void copyRecursive(SourceAccessor & accessor, const CanonPath & from, FileSystem } case SourceAccessor::tDirectory: { - sink.createDirectory(to); - for (auto & [name, _] : accessor.readDirectory(from)) { - copyRecursive(accessor, from / name, sink, to / name); - break; - } + sink.createDirectory(to, [&](FileSystemObjectSink & dirSink, const CanonPath & relDirPath) { + for (auto & [name, _] : accessor.readDirectory(from)) { + copyRecursive(accessor, from / name, dirSink, relDirPath / name); + } + }); break; } @@ -70,11 +70,61 @@ static std::filesystem::path append(const std::filesystem::path & src, const Can return dst; } +#ifndef _WIN32 +void RestoreSink::createDirectory(const CanonPath & path, DirectoryCreatedCallback callback) +{ + if (path.isRoot()) { + createDirectory(path); + callback(*this, path); + return; + } + + createDirectory(path); + assert(dirFd); // If that's not true the above call must have thrown an exception. + + RestoreSink dirSink{startFsync}; + dirSink.dstPath = append(dstPath, path); + dirSink.dirFd = + unix::openFileEnsureBeneathNoSymlinks(dirFd.get(), path, O_RDONLY | O_DIRECTORY | O_NOFOLLOW | O_CLOEXEC); + + if (!dirSink.dirFd) + throw SysError("opening directory '%s'", dirSink.dstPath.string()); + + callback(dirSink, CanonPath::root); +} +#endif + void RestoreSink::createDirectory(const CanonPath & path) { auto p = append(dstPath, path); + +#ifndef _WIN32 + if (dirFd) { + if (path.isRoot()) + /* Trying to create a directory that we already have a file descriptor for. */ + throw Error("path '%s' already exists", p.string()); + + if (::mkdirat(dirFd.get(), path.rel_c_str(), 0777) == -1) + throw SysError("creating directory '%s'", p.string()); + + return; + } +#endif + if (!std::filesystem::create_directory(p)) throw Error("path '%s' already exists", p.string()); + +#ifndef _WIN32 + if (path.isRoot()) { + assert(!dirFd); // Handled above + + /* Open directory for further *at operations relative to the sink root + directory. */ + dirFd = open(p.c_str(), O_RDONLY | O_DIRECTORY | O_NOFOLLOW | O_CLOEXEC); + if (!dirFd) + throw SysError("creating directory '%1%'", p.string()); + } +#endif }; struct RestoreRegularFile : CreateRegularFileSink @@ -114,7 +164,14 @@ void RestoreSink::createRegularFile(const CanonPath & path, std::function '%2%'", p.string(), target); + return; + } +#endif nix::createSymlink(target, p.string()); } diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index d306360ef79..e3d4bb4a8a2 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -13,6 +13,7 @@ #include "nix/util/split.hh" #include "nix/util/base-n.hh" #include "nix/util/base-nix-32.hh" +#include "nix/util/json-utils.hh" #include #include @@ -143,9 +144,13 @@ static HashFormat baseFromSize(std::string_view rest, HashAlgorithm algo) * * @param rest the string view to parse. Must not include any `(:|-)` prefix. */ -static Hash parseLowLevel(std::string_view rest, HashAlgorithm algo, DecodeNamePair pair) +static Hash parseLowLevel( + std::string_view rest, + HashAlgorithm algo, + DecodeNamePair pair, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings) { - Hash res{algo}; + Hash res{algo, xpSettings}; std::string d; try { d = pair.decode(rest); @@ -161,7 +166,7 @@ static Hash parseLowLevel(std::string_view rest, HashAlgorithm algo, DecodeNameP return res; } -Hash Hash::parseSRI(std::string_view original) +Hash Hash::parseSRI(std::string_view original, const ExperimentalFeatureSettings & xpSettings) { auto rest = original; @@ -169,9 +174,9 @@ Hash Hash::parseSRI(std::string_view original) auto hashRaw = splitPrefixTo(rest, '-'); if (!hashRaw) throw BadHash("hash '%s' is not SRI", original); - HashAlgorithm parsedType = parseHashAlgo(*hashRaw); + HashAlgorithm parsedType = parseHashAlgo(*hashRaw, xpSettings); - return parseLowLevel(rest, parsedType, {base64::decode, "SRI"}); + return parseLowLevel(rest, parsedType, {base64::decode, "SRI"}, xpSettings); } /** @@ -179,8 +184,10 @@ Hash Hash::parseSRI(std::string_view original) * * @param resolveAlgo resolves the parsed type (or throws an error when it is not * possible.) + * + * @return the parsed hash and the format it was parsed from */ -static Hash parseAnyHelper(std::string_view rest, auto resolveAlgo) +static std::pair parseAnyHelper(std::string_view rest, auto resolveAlgo) { bool isSRI = false; @@ -200,34 +207,45 @@ static Hash parseAnyHelper(std::string_view rest, auto resolveAlgo) HashAlgorithm algo = resolveAlgo(std::move(optParsedAlgo)); - auto [decode, formatName] = [&]() -> DecodeNamePair { + auto [decode, formatName, format] = [&]() -> std::tuple { if (isSRI) { /* In the SRI case, we always are using Base64. If the length is wrong, get an error later. */ - return {base64::decode, "SRI"}; + return {base64::decode, "SRI", HashFormat::SRI}; } else { /* Otherwise, decide via the length of the hash (for the given algorithm) what base encoding it is. */ - return baseExplicit(baseFromSize(rest, algo)); + auto format = baseFromSize(rest, algo); + auto [decode, formatName] = baseExplicit(format); + return {decode, formatName, format}; } }(); - return parseLowLevel(rest, algo, {decode, formatName}); + return {parseLowLevel(rest, algo, {decode, formatName}), format}; } Hash Hash::parseAnyPrefixed(std::string_view original) { - return parseAnyHelper(original, [&](std::optional optParsedAlgo) { - // Either the string or user must provide the type, if they both do they - // must agree. - if (!optParsedAlgo) - throw BadHash("hash '%s' does not include a type", original); + return parseAnyHelper( + original, + [&](std::optional optParsedAlgo) { + // Either the string or user must provide the type, if they both do they + // must agree. + if (!optParsedAlgo) + throw BadHash("hash '%s' does not include a type", original); - return *optParsedAlgo; - }); + return *optParsedAlgo; + }) + .first; } Hash Hash::parseAny(std::string_view original, std::optional optAlgo) +{ + return parseAnyReturningFormat(original, optAlgo).first; +} + +std::pair +Hash::parseAnyReturningFormat(std::string_view original, std::optional optAlgo) { return parseAnyHelper(original, [&](std::optional optParsedAlgo) { // Either the string or user must provide the type, if they both do they @@ -246,9 +264,10 @@ Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo) return parseExplicitFormatUnprefixed(s, algo, baseFromSize(s, algo)); } -Hash Hash::parseExplicitFormatUnprefixed(std::string_view s, HashAlgorithm algo, HashFormat format) +Hash Hash::parseExplicitFormatUnprefixed( + std::string_view s, HashAlgorithm algo, HashFormat format, const ExperimentalFeatureSettings & xpSettings) { - return parseLowLevel(s, algo, baseExplicit(format)); + return parseLowLevel(s, algo, baseExplicit(format), xpSettings); } Hash Hash::random(HashAlgorithm algo) @@ -448,10 +467,12 @@ std::string_view printHashFormat(HashFormat HashFormat) } } -std::optional parseHashAlgoOpt(std::string_view s) +std::optional parseHashAlgoOpt(std::string_view s, const ExperimentalFeatureSettings & xpSettings) { - if (s == "blake3") + if (s == "blake3") { + xpSettings.require(Xp::BLAKE3Hashes); return HashAlgorithm::BLAKE3; + } if (s == "md5") return HashAlgorithm::MD5; if (s == "sha1") @@ -463,9 +484,9 @@ std::optional parseHashAlgoOpt(std::string_view s) return std::nullopt; } -HashAlgorithm parseHashAlgo(std::string_view s) +HashAlgorithm parseHashAlgo(std::string_view s, const ExperimentalFeatureSettings & xpSettings) { - auto opt_h = parseHashAlgoOpt(s); + auto opt_h = parseHashAlgoOpt(s, xpSettings); if (opt_h) return *opt_h; else @@ -501,3 +522,20 @@ void to_json(nlohmann::json & json, const Hash & hash) } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +Hash adl_serializer::from_json(const json & json, const ExperimentalFeatureSettings & xpSettings) +{ + auto & s = getString(json); + return Hash::parseSRI(s, xpSettings); +} + +void adl_serializer::to_json(json & json, const Hash & hash) +{ + json = hash.to_string(HashFormat::SRI, true); +} + +} // namespace nlohmann diff --git a/src/libutil/include/nix/util/alignment.hh b/src/libutil/include/nix/util/alignment.hh new file mode 100644 index 00000000000..a4e5af4d6c0 --- /dev/null +++ b/src/libutil/include/nix/util/alignment.hh @@ -0,0 +1,23 @@ +#pragma once +///@file + +#include +#include +#include +#include + +namespace nix { + +/// Aligns val upwards to be a multiple of alignment. +/// +/// @pre alignment must be a power of 2. +template + requires std::is_unsigned_v +constexpr T alignUp(T val, unsigned alignment) +{ + assert(std::has_single_bit(alignment) && "alignment must be a power of 2"); + T mask = ~(T{alignment} - 1u); + return (val + alignment - 1) & mask; +} + +} // namespace nix diff --git a/src/libutil/include/nix/util/args.hh b/src/libutil/include/nix/util/args.hh index 99f6e23e8e9..d793411247b 100644 --- a/src/libutil/include/nix/util/args.hh +++ b/src/libutil/include/nix/util/args.hh @@ -59,7 +59,7 @@ public: * * This only returns the correct value after parseCmdline() has run. */ - virtual Path getCommandBaseDir() const; + virtual std::filesystem::path getCommandBaseDir() const; protected: diff --git a/src/libutil/include/nix/util/args/root.hh b/src/libutil/include/nix/util/args/root.hh index 86b677be4e7..15919a7ac00 100644 --- a/src/libutil/include/nix/util/args/root.hh +++ b/src/libutil/include/nix/util/args/root.hh @@ -38,7 +38,7 @@ protected: * * @see getCommandBaseDir() */ - Path commandBaseDir = "."; + std::filesystem::path commandBaseDir = "."; public: /** Parse the command line, throwing a UsageError if something goes @@ -48,7 +48,7 @@ public: std::shared_ptr completions; - Path getCommandBaseDir() const override; + std::filesystem::path getCommandBaseDir() const override; protected: diff --git a/src/libutil/include/nix/util/canon-path.hh b/src/libutil/include/nix/util/canon-path.hh index dd07929b4f4..2156b02fc41 100644 --- a/src/libutil/include/nix/util/canon-path.hh +++ b/src/libutil/include/nix/util/canon-path.hh @@ -1,17 +1,21 @@ #pragma once ///@file +#include "nix/util/error.hh" #include #include #include #include #include #include +#include #include namespace nix { +MakeError(BadCanonPath, Error); + /** * A canonical representation of a path. It ensures the following: * @@ -23,6 +27,8 @@ namespace nix { * * - There are no components equal to '.' or '..'. * + * - It does not contain NUL bytes. + * * `CanonPath` are "virtual" Nix paths for abstract file system objects; * they are always Unix-style paths, regardless of what OS Nix is * running on. The `/` root doesn't denote the ambient host file system @@ -51,10 +57,7 @@ public: */ CanonPath(std::string_view raw); - explicit CanonPath(const char * raw) - : CanonPath(std::string_view(raw)) - { - } + explicit CanonPath(const char * raw); struct unchecked_t {}; @@ -69,7 +72,7 @@ public: */ CanonPath(const std::vector & elems); - static CanonPath root; + static const CanonPath root; /** * If `raw` starts with a slash, return @@ -120,33 +123,70 @@ public: return &cs[1]; } - struct Iterator + class Iterator { + /** + * Helper class with overloaded operator-> for "drill-down" behavior. + * This was a "temporary" string_view doesn't have to be stored anywhere. + */ + class PointerProxy + { + std::string_view segment; + + public: + PointerProxy(std::string_view segment_) + : segment(segment_) + { + } + + const std::string_view * operator->() const + { + return &segment; + } + }; + + public: + using value_type = std::string_view; + using reference_type = const std::string_view; + using pointer_type = PointerProxy; + using difference_type = std::ptrdiff_t; + using iterator_category = std::forward_iterator_tag; + std::string_view remaining; size_t slash; + /** + * Dummy default constructor required for forward iterators. Doesn't return + * a usable iterator. + */ + Iterator() + : remaining() + , slash(0) + { + } + Iterator(std::string_view remaining) : remaining(remaining) , slash(remaining.find('/')) { } - bool operator!=(const Iterator & x) const + bool operator==(const Iterator & x) const { - return remaining.data() != x.remaining.data(); + return remaining.data() == x.remaining.data(); } - bool operator==(const Iterator & x) const + reference_type operator*() const { - return !(*this != x); + return remaining.substr(0, slash); } - const std::string_view operator*() const + pointer_type operator->() const { - return remaining.substr(0, slash); + return PointerProxy(**this); } - void operator++() + Iterator & operator++() { if (slash == remaining.npos) remaining = remaining.substr(remaining.size()); @@ -154,9 +194,19 @@ public: remaining = remaining.substr(slash + 1); slash = remaining.find('/'); } + return *this; + } + + Iterator operator++(int) + { + auto tmp = *this; + ++*this; + return tmp; } }; + static_assert(std::forward_iterator); + Iterator begin() const { return Iterator(rel()); @@ -263,6 +313,8 @@ public: friend std::size_t hash_value(const CanonPath &); }; +static_assert(std::ranges::forward_range); + std::ostream & operator<<(std::ostream & stream, const CanonPath & path); inline std::size_t hash_value(const CanonPath & path) diff --git a/src/libutil/include/nix/util/closure.hh b/src/libutil/include/nix/util/closure.hh index d55d52c879c..9e37b4cfb02 100644 --- a/src/libutil/include/nix/util/closure.hh +++ b/src/libutil/include/nix/util/closure.hh @@ -24,11 +24,9 @@ void computeClosure(const set startElts, set & res, GetEdgesAsync getEd Sync state_(State{0, res, 0}); - std::function enqueue; - std::condition_variable done; - enqueue = [&](const T & current) -> void { + auto enqueue = [&](this auto & enqueue, const T & current) -> void { { auto state(state_.lock()); if (state->exc) diff --git a/src/libutil/include/nix/util/config-global.hh b/src/libutil/include/nix/util/config-global.hh index 0e6f43ec4e9..5074351e046 100644 --- a/src/libutil/include/nix/util/config-global.hh +++ b/src/libutil/include/nix/util/config-global.hh @@ -9,11 +9,7 @@ struct GlobalConfig : public AbstractConfig { typedef std::vector ConfigRegistrations; - static ConfigRegistrations & configRegistrations() - { - static ConfigRegistrations configRegistrations; - return configRegistrations; - } + static ConfigRegistrations & configRegistrations(); bool set(const std::string & name, const std::string & value) override; diff --git a/src/libutil/include/nix/util/config-impl.hh b/src/libutil/include/nix/util/config-impl.hh index f407bc86244..8f6f9a358a4 100644 --- a/src/libutil/include/nix/util/config-impl.hh +++ b/src/libutil/include/nix/util/config-impl.hh @@ -16,6 +16,7 @@ #include "nix/util/configuration.hh" #include "nix/util/args.hh" #include "nix/util/logging.hh" +#include "nix/util/file-path.hh" namespace nix { @@ -134,6 +135,8 @@ DECLARE_CONFIG_SERIALISER(Strings) DECLARE_CONFIG_SERIALISER(StringSet) DECLARE_CONFIG_SERIALISER(StringMap) DECLARE_CONFIG_SERIALISER(std::set) +DECLARE_CONFIG_SERIALISER(std::filesystem::path) +DECLARE_CONFIG_SERIALISER(std::optional) template T BaseSetting::parse(const std::string & str) const diff --git a/src/libutil/include/nix/util/configuration.hh b/src/libutil/include/nix/util/configuration.hh index 73e3fb81a52..6b9f2d6f5d0 100644 --- a/src/libutil/include/nix/util/configuration.hh +++ b/src/libutil/include/nix/util/configuration.hh @@ -463,7 +463,20 @@ struct ExperimentalFeatureSettings : Config * Require an experimental feature be enabled, throwing an error if it is * not. */ - void require(const ExperimentalFeature &) const; + void require(const ExperimentalFeature &, std::string reason = "") const; + + /** + * Require an experimental feature be enabled, throwing an error if it is + * not. The reason is lazily evaluated only if the feature is disabled. + */ + template + requires std::invocable && std::convertible_to, std::string> + void require(const ExperimentalFeature & feature, GetReason && getReason) const + { + if (isEnabled(feature)) + return; + require(feature, getReason()); + } /** * `std::nullopt` pointer means no feature, which means there is nothing that could be diff --git a/src/libutil/include/nix/util/error.hh b/src/libutil/include/nix/util/error.hh index e564ca5b9cc..cc8460592a2 100644 --- a/src/libutil/include/nix/util/error.hh +++ b/src/libutil/include/nix/util/error.hh @@ -192,13 +192,27 @@ public: err.traces.push_front(trace); } + /** + * Prepends an item to the error trace, as is usual for extra context. + * + * @param pos Nullable source position to put in trace item + * @param fs Format string, see `HintFmt` + * @param args... Format string arguments. + */ template - void addTrace(std::shared_ptr && e, std::string_view fs, const Args &... args) + void addTrace(std::shared_ptr && pos, std::string_view fs, const Args &... args) { - addTrace(std::move(e), HintFmt(std::string(fs), args...)); + addTrace(std::move(pos), HintFmt(std::string(fs), args...)); } - void addTrace(std::shared_ptr && e, HintFmt hint, TracePrint print = TracePrint::Default); + /** + * Prepends an item to the error trace, as is usual for extra context. + * + * @param pos Nullable source position to put in trace item + * @param hint Formatted error message + * @param print Optional, whether to always print (used by `addErrorContext`) + */ + void addTrace(std::shared_ptr && pos, HintFmt hint, TracePrint print = TracePrint::Default); bool hasTrace() const { diff --git a/src/libutil/include/nix/util/experimental-features.hh b/src/libutil/include/nix/util/experimental-features.hh index 7b0592b8ccb..20a4610a390 100644 --- a/src/libutil/include/nix/util/experimental-features.hh +++ b/src/libutil/include/nix/util/experimental-features.hh @@ -90,7 +90,9 @@ public: */ ExperimentalFeature missingFeature; - MissingExperimentalFeature(ExperimentalFeature missingFeature); + std::string reason; + + MissingExperimentalFeature(ExperimentalFeature missingFeature, std::string reason = ""); }; /** diff --git a/src/libutil/include/nix/util/file-descriptor.hh b/src/libutil/include/nix/util/file-descriptor.hh index 3dd2dd8e69b..d049845883c 100644 --- a/src/libutil/include/nix/util/file-descriptor.hh +++ b/src/libutil/include/nix/util/file-descriptor.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include "nix/util/canon-path.hh" #include "nix/util/types.hh" #include "nix/util/error.hh" @@ -203,6 +204,26 @@ void closeOnExec(Descriptor fd); } // namespace unix #endif +#ifdef __linux__ +namespace linux { + +/** + * Wrapper around Linux's openat2 syscall introduced in Linux 5.6. + * + * @see https://man7.org/linux/man-pages/man2/openat2.2.html + * @see https://man7.org/linux/man-pages/man2/open_how.2type.html +v* + * @param flags O_* flags + * @param mode Mode for O_{CREAT,TMPFILE} + * @param resolve RESOLVE_* flags + * + * @return nullopt if openat2 is not supported by the kernel. + */ +std::optional openat2(Descriptor dirFd, const char * path, uint64_t flags, uint64_t mode, uint64_t resolve); + +} // namespace linux +#endif + #if defined(_WIN32) && _WIN32_WINNT >= 0x0600 namespace windows { @@ -212,6 +233,45 @@ std::wstring handleToFileName(Descriptor handle); } // namespace windows #endif +#ifndef _WIN32 +namespace unix { + +struct SymlinkNotAllowed : public Error +{ + CanonPath path; + + SymlinkNotAllowed(CanonPath path) + /* Can't provide better error message, since the parent directory is only known to the caller. */ + : Error("relative path '%s' points to a symlink, which is not allowed", path.rel()) + , path(std::move(path)) + { + } +}; + +/** + * Safe(r) function to open \param path file relative to \param dirFd, while + * disallowing escaping from a directory and resolving any symlinks in the + * process. + * + * @note When not on Linux or when openat2 is not available this is implemented + * via openat single path component traversal. Uses RESOLVE_BENEATH with openat2 + * or O_RESOLVE_BENEATH. + * + * @note Since this is Unix-only path is specified as CanonPath, which models + * Unix-style paths and ensures that there are no .. or . components. + * + * @param flags O_* flags + * @param mode Mode for O_{CREAT,TMPFILE} + * + * @pre path.isRoot() is false + * + * @throws SymlinkNotAllowed if any path components + */ +Descriptor openFileEnsureBeneathNoSymlinks(Descriptor dirFd, const CanonPath & path, int flags, mode_t mode = 0); + +} // namespace unix +#endif + MakeError(EndOfFile, Error); } // namespace nix diff --git a/src/libutil/include/nix/util/file-path.hh b/src/libutil/include/nix/util/file-path.hh index 25349eaf730..52dae32ff35 100644 --- a/src/libutil/include/nix/util/file-path.hh +++ b/src/libutil/include/nix/util/file-path.hh @@ -5,6 +5,7 @@ #include "nix/util/types.hh" #include "nix/util/os-string.hh" +#include "nix/util/json-non-null.hh" namespace nix { @@ -53,4 +54,8 @@ std::optional maybePath(PathView path); std::filesystem::path pathNG(PathView path); +template<> +struct json_avoids_null : std::true_type +{}; + } // namespace nix diff --git a/src/libutil/include/nix/util/file-system.hh b/src/libutil/include/nix/util/file-system.hh index 67d4ba0250a..7d88939e32e 100644 --- a/src/libutil/include/nix/util/file-system.hh +++ b/src/libutil/include/nix/util/file-system.hh @@ -55,7 +55,8 @@ inline Path absPath(const Path & path, std::optional dir = {}, bool re return absPath(PathView{path}, dir, resolveSymlinks); } -std::filesystem::path absPath(const std::filesystem::path & path, bool resolveSymlinks = false); +std::filesystem::path +absPath(const std::filesystem::path & path, const std::filesystem::path * dir = nullptr, bool resolveSymlinks = false); /** * Canonicalise a path by removing all `.` or `..` components and @@ -152,6 +153,12 @@ bool pathAccessible(const std::filesystem::path & path); */ Path readLink(const Path & path); +/** + * Read the contents (target) of a symbolic link. The result is not + * in any way canonicalised. + */ +std::filesystem::path readLink(const std::filesystem::path & path); + /** * Open a `Descriptor` with read-only access to the given directory. */ @@ -287,7 +294,7 @@ class AutoDelete public: AutoDelete(); - AutoDelete(AutoDelete && x) + AutoDelete(AutoDelete && x) noexcept { _path = std::move(x._path); del = x.del; @@ -296,6 +303,9 @@ public: } AutoDelete(const std::filesystem::path & p, bool recursive = true); + AutoDelete(const AutoDelete &) = delete; + AutoDelete & operator=(AutoDelete &&) = delete; + AutoDelete & operator=(const AutoDelete &) = delete; ~AutoDelete(); void cancel(); @@ -336,7 +346,14 @@ typedef std::unique_ptr AutoCloseDir; /** * Create a temporary directory. */ -Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix", mode_t mode = 0755); +std::filesystem::path +createTempDir(const std::filesystem::path & tmpRoot = "", const std::string & prefix = "nix", mode_t mode = 0755); + +/** + * Create an anonymous readable/writable temporary file, returning a file handle. + * On UNIX there resulting file isn't linked to any path on the filesystem. + */ +AutoCloseFD createAnonymousTempFile(); /** * Create a temporary file, returning a file handle and its path. @@ -346,7 +363,7 @@ std::pair createTempFile(const Path & prefix = "nix"); /** * Return `TMPDIR`, or the default temporary directory if unset or empty. */ -Path defaultTempDir(); +std::filesystem::path defaultTempDir(); /** * Interpret `exe` as a location in the ambient file system and return @@ -360,7 +377,7 @@ bool isExecutableFileAmbient(const std::filesystem::path & exe); * The constructed path looks like `--`. To create a * path nested in a directory, provide a suffix starting with `/`. */ -Path makeTempPath(const Path & root, const Path & suffix = ".tmp"); +std::filesystem::path makeTempPath(const std::filesystem::path & root, const std::string & suffix = ".tmp"); /** * Used in various places. diff --git a/src/libutil/include/nix/util/fs-sink.hh b/src/libutil/include/nix/util/fs-sink.hh index bd2db7f53e6..bd9c7205fa8 100644 --- a/src/libutil/include/nix/util/fs-sink.hh +++ b/src/libutil/include/nix/util/fs-sink.hh @@ -12,7 +12,7 @@ namespace nix { * * See `FileSystemObjectSink::createRegularFile`. */ -struct CreateRegularFileSink : Sink +struct CreateRegularFileSink : virtual Sink { /** * If set to true, the sink will not be called with the contents @@ -36,6 +36,23 @@ struct FileSystemObjectSink virtual void createDirectory(const CanonPath & path) = 0; + using DirectoryCreatedCallback = std::function; + + /** + * Create a directory and invoke a callback with a pair of sink + CanonPath + * of the created subdirectory relative to dirSink. + * + * @note This allows for UNIX RestoreSink implementations to implement + * *at-style accessors that always keep an open file descriptor for the + * freshly created directory. Use this when it's important to disallow any + * intermediate path components from being symlinks. + */ + virtual void createDirectory(const CanonPath & path, DirectoryCreatedCallback callback) + { + createDirectory(path); + callback(*this, path); + } + /** * This function in general is no re-entrant. Only one file can be * written at a time. @@ -82,6 +99,18 @@ struct NullFileSystemObjectSink : FileSystemObjectSink struct RestoreSink : FileSystemObjectSink { std::filesystem::path dstPath; +#ifndef _WIN32 + /** + * File descriptor for the directory located at dstPath. Used for *at + * operations relative to this file descriptor. This sink must *never* + * follow intermediate symlinks (starting from dstPath) in case a file + * collision is encountered for various reasons like case-insensitivity or + * other types on normalization. using appropriate *at system calls and traversing + * only one path component at a time ensures that writing is race-free and is + * is not susceptible to symlink replacement. + */ + AutoCloseFD dirFd; +#endif bool startFsync = false; explicit RestoreSink(bool startFsync) @@ -91,6 +120,10 @@ struct RestoreSink : FileSystemObjectSink void createDirectory(const CanonPath & path) override; +#ifndef _WIN32 + void createDirectory(const CanonPath & path, DirectoryCreatedCallback callback) override; +#endif + void createRegularFile(const CanonPath & path, std::function) override; void createSymlink(const CanonPath & path, const std::string & target) override; diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index 81e622b2217..427d49a42b7 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -5,6 +5,7 @@ #include "nix/util/types.hh" #include "nix/util/serialise.hh" #include "nix/util/file-system.hh" +#include "nix/util/json-impls.hh" #include @@ -80,6 +81,12 @@ struct Hash */ static Hash parseAny(std::string_view s, std::optional optAlgo); + /** + * Like `parseAny`, but also returns the format the hash was parsed from. + */ + static std::pair + parseAnyReturningFormat(std::string_view s, std::optional optAlgo); + /** * Parse a hash from a string representation like the above, except the * type prefix is mandatory is there is no separate argument. @@ -99,9 +106,14 @@ struct Hash * @param explicitFormat cannot be SRI, but must be one of the * "bases". */ - static Hash parseExplicitFormatUnprefixed(std::string_view s, HashAlgorithm algo, HashFormat explicitFormat); + static Hash parseExplicitFormatUnprefixed( + std::string_view s, + HashAlgorithm algo, + HashFormat explicitFormat, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); - static Hash parseSRI(std::string_view original); + static Hash + parseSRI(std::string_view original, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); public: /** @@ -190,12 +202,14 @@ std::string_view printHashFormat(HashFormat hashFormat); /** * Parse a string representing a hash algorithm. */ -HashAlgorithm parseHashAlgo(std::string_view s); +HashAlgorithm +parseHashAlgo(std::string_view s, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); /** * Will return nothing on parse error */ -std::optional parseHashAlgoOpt(std::string_view s); +std::optional +parseHashAlgoOpt(std::string_view s, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); /** * And the reverse. @@ -228,4 +242,29 @@ public: HashResult currentHash(); }; +template<> +struct json_avoids_null : std::true_type +{}; + } // namespace nix + +template<> +struct std::hash +{ + std::size_t operator()(const nix::Hash & hash) const noexcept + { + assert(hash.hashSize > sizeof(size_t)); + return *reinterpret_cast(&hash.hash); + } +}; + +namespace nix { + +inline std::size_t hash_value(const Hash & hash) +{ + return std::hash{}(hash); +} + +} // namespace nix + +JSON_IMPL_WITH_XP_FEATURES(Hash) diff --git a/src/libutil/include/nix/util/json-impls.hh b/src/libutil/include/nix/util/json-impls.hh index 751fc410f56..26a94472f25 100644 --- a/src/libutil/include/nix/util/json-impls.hh +++ b/src/libutil/include/nix/util/json-impls.hh @@ -3,14 +3,43 @@ #include +#include "nix/util/experimental-features.hh" + // Following https://github.com/nlohmann/json#how-can-i-use-get-for-non-default-constructiblenon-copyable-types -#define JSON_IMPL(TYPE) \ - namespace nlohmann { \ - using namespace nix; \ - template<> \ +#define JSON_IMPL_INNER_TO(TYPE) \ + struct adl_serializer \ + { \ + static void to_json(json & json, const TYPE & t); \ + } + +#define JSON_IMPL_INNER_FROM(TYPE) \ + struct adl_serializer \ + { \ + static TYPE from_json(const json & json); \ + } + +#define JSON_IMPL_INNER(TYPE) \ struct adl_serializer \ { \ static TYPE from_json(const json & json); \ static void to_json(json & json, const TYPE & t); \ - }; \ + } + +#define JSON_IMPL(TYPE) \ + namespace nlohmann { \ + using namespace nix; \ + template<> \ + JSON_IMPL_INNER(TYPE); \ + } + +#define JSON_IMPL_WITH_XP_FEATURES(TYPE) \ + namespace nlohmann { \ + using namespace nix; \ + template<> \ + struct adl_serializer \ + { \ + static TYPE \ + from_json(const json & json, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); \ + static void to_json(json & json, const TYPE & t); \ + }; \ } diff --git a/src/libutil/include/nix/util/json-utils.hh b/src/libutil/include/nix/util/json-utils.hh index 4b5fb4b21be..ec513ca25d6 100644 --- a/src/libutil/include/nix/util/json-utils.hh +++ b/src/libutil/include/nix/util/json-utils.hh @@ -2,7 +2,6 @@ ///@file #include -#include #include "nix/util/error.hh" #include "nix/util/types.hh" @@ -12,20 +11,25 @@ namespace nix { enum struct ExperimentalFeature; -const nlohmann::json * get(const nlohmann::json & map, const std::string & key); - -nlohmann::json * get(nlohmann::json & map, const std::string & key); - /** * Get the value of a json object at a key safely, failing with a nice * error if the key does not exist. * * Use instead of nlohmann::json::at() to avoid ugly exceptions. */ -const nlohmann::json & valueAt(const nlohmann::json::object_t & map, const std::string & key); +const nlohmann::json & valueAt(const nlohmann::json::object_t & map, std::string_view key); + +/** + * @return A pointer to the value assiocated with `key` if `value` + * contains `key`, otherwise return `nullptr` (not JSON `null`!). + */ +const nlohmann::json * optionalValueAt(const nlohmann::json::object_t & value, std::string_view key); -std::optional optionalValueAt(const nlohmann::json::object_t & value, const std::string & key); -std::optional nullableValueAt(const nlohmann::json::object_t & value, const std::string & key); +/** + * Prevents bugs; see `get` for the same trick. + */ +const nlohmann::json & valueAt(nlohmann::json::object_t && map, std::string_view key) = delete; +const nlohmann::json * optionalValueAt(nlohmann::json::object_t && value, std::string_view key) = delete; /** * Downcast the json object, failing with a nice error if the conversion fails. @@ -55,6 +59,17 @@ auto getInteger(const nlohmann::json & value) -> std::enable_if_t +std::map getMap(const nlohmann::json::object_t & jsonObject, auto && f) +{ + std::map map; + + for (const auto & [key, value] : jsonObject) + map.insert_or_assign(key, f(value)); + + return map; +} + const nlohmann::json::boolean_t & getBoolean(const nlohmann::json & value); Strings getStringList(const nlohmann::json & value); StringMap getStringMap(const nlohmann::json & value); @@ -99,4 +114,13 @@ struct adl_serializer> } }; +template +static inline std::optional ptrToOwned(const json * ptr) +{ + if (ptr) + return std::optional{*ptr}; + else + return std::nullopt; +} + } // namespace nlohmann diff --git a/src/libutil/include/nix/util/logging.hh b/src/libutil/include/nix/util/logging.hh index 5e211703daa..de2c3f683df 100644 --- a/src/libutil/include/nix/util/logging.hh +++ b/src/libutil/include/nix/util/logging.hh @@ -56,9 +56,9 @@ struct LoggerSettings : Config expression evaluation errors. )"}; - Setting jsonLogPath{ + Setting> jsonLogPath{ this, - "", + {}, "json-log-path", R"( A file or Unix domain socket to which JSON records of Nix's log output are diff --git a/src/libutil/include/nix/util/memory-source-accessor.hh b/src/libutil/include/nix/util/memory-source-accessor.hh index eba282fe1c1..fc00f34d9c0 100644 --- a/src/libutil/include/nix/util/memory-source-accessor.hh +++ b/src/libutil/include/nix/util/memory-source-accessor.hh @@ -4,59 +4,111 @@ #include "nix/util/source-path.hh" #include "nix/util/fs-sink.hh" #include "nix/util/variant-wrapper.hh" +#include "nix/util/json-impls.hh" namespace nix { /** - * An source accessor for an in-memory file system. + * File System Object definitions + * + * @see https://nix.dev/manual/nix/latest/store/file-system-object.html */ -struct MemorySourceAccessor : virtual SourceAccessor +namespace fso { + +template +struct Regular +{ + bool executable = false; + RegularContents contents; + + auto operator<=>(const Regular &) const = default; +}; + +/** + * Child parameter because sometimes we want "shallow" directories without + * full file children. + */ +template +struct DirectoryT +{ + using Name = std::string; + + std::map> entries; + + inline bool operator==(const DirectoryT &) const noexcept; + inline std::strong_ordering operator<=>(const DirectoryT &) const noexcept; +}; + +struct Symlink { + std::string target; + + auto operator<=>(const Symlink &) const = default; +}; + +/** + * For when we know there is child, but don't know anything about it. + * + * This is not part of the core File System Object data model --- this + * represents not knowing, not an additional type of file. + */ +struct Opaque +{ + auto operator<=>(const Opaque &) const = default; +}; + +/** + * `File` nicely defining what a "file system object" + * is in Nix. + * + * With a different type arugment, it is also can be a "skeletal" + * version is that abstract syntax for a "NAR listing". + */ +template +struct VariantT +{ + bool operator==(const VariantT &) const noexcept; + std::strong_ordering operator<=>(const VariantT &) const noexcept; + + using Regular = nix::fso::Regular; + /** - * In addition to being part of the implementation of - * `MemorySourceAccessor`, this has a side benefit of nicely - * defining what a "file system object" is in Nix. + * In the default case, we do want full file children for our directory. */ - struct File - { - bool operator==(const File &) const noexcept; - std::strong_ordering operator<=>(const File &) const noexcept; + using Directory = nix::fso::DirectoryT>; - struct Regular - { - bool executable = false; - std::string contents; + using Symlink = nix::fso::Symlink; - bool operator==(const Regular &) const = default; - auto operator<=>(const Regular &) const = default; - }; + using Raw = std::variant; + Raw raw; - struct Directory - { - using Name = std::string; + MAKE_WRAPPER_CONSTRUCTOR(VariantT); - std::map> contents; + SourceAccessor::Stat lstat() const; +}; - bool operator==(const Directory &) const noexcept; - // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. - bool operator<(const Directory &) const noexcept; - }; +template +inline bool DirectoryT::operator==(const DirectoryT &) const noexcept = default; - struct Symlink - { - std::string target; +template +inline std::strong_ordering DirectoryT::operator<=>(const DirectoryT &) const noexcept = default; - bool operator==(const Symlink &) const = default; - auto operator<=>(const Symlink &) const = default; - }; +template +inline bool +VariantT::operator==(const VariantT &) const noexcept = default; - using Raw = std::variant; - Raw raw; +template +inline std::strong_ordering +VariantT::operator<=>(const VariantT &) const noexcept = default; - MAKE_WRAPPER_CONSTRUCTOR(File); +} // namespace fso - Stat lstat() const; - }; +/** + * An source accessor for an in-memory file system. + */ +struct MemorySourceAccessor : virtual SourceAccessor +{ + using File = fso::VariantT; std::optional root; @@ -89,19 +141,6 @@ struct MemorySourceAccessor : virtual SourceAccessor SourcePath addFile(CanonPath path, std::string && contents); }; -inline bool MemorySourceAccessor::File::Directory::operator==( - const MemorySourceAccessor::File::Directory &) const noexcept = default; - -inline bool -MemorySourceAccessor::File::Directory::operator<(const MemorySourceAccessor::File::Directory & other) const noexcept -{ - return contents < other.contents; -} - -inline bool MemorySourceAccessor::File::operator==(const MemorySourceAccessor::File &) const noexcept = default; -inline std::strong_ordering -MemorySourceAccessor::File::operator<=>(const MemorySourceAccessor::File &) const noexcept = default; - /** * Write to a `MemorySourceAccessor` at the given path */ @@ -121,4 +160,53 @@ struct MemorySink : FileSystemObjectSink void createSymlink(const CanonPath & path, const std::string & target) override; }; +template<> +struct json_avoids_null : std::true_type +{}; + +template<> +struct json_avoids_null : std::true_type +{}; + +template<> +struct json_avoids_null : std::true_type +{}; + +template<> +struct json_avoids_null : std::true_type +{}; + +template<> +struct json_avoids_null : std::true_type +{}; + } // namespace nix + +namespace nlohmann { + +using namespace nix; + +#define ARG fso::Regular +template +JSON_IMPL_INNER(ARG); +#undef ARG + +#define ARG fso::DirectoryT +template +JSON_IMPL_INNER(ARG); +#undef ARG + +template<> +JSON_IMPL_INNER(fso::Symlink); + +template<> +JSON_IMPL_INNER(fso::Opaque); + +#define ARG fso::VariantT +template +JSON_IMPL_INNER(ARG); +#undef ARG + +} // namespace nlohmann + +JSON_IMPL(MemorySourceAccessor) diff --git a/src/libutil/include/nix/util/meson.build b/src/libutil/include/nix/util/meson.build index 8ec06b2c45c..5dd569d9146 100644 --- a/src/libutil/include/nix/util/meson.build +++ b/src/libutil/include/nix/util/meson.build @@ -4,6 +4,7 @@ include_dirs = [ include_directories('../..') ] headers = files( 'abstract-setting-to-json.hh', + 'alignment.hh', 'ansicolor.hh', 'archive.hh', 'args.hh', @@ -50,6 +51,7 @@ headers = files( 'memory-source-accessor.hh', 'mounted-source-accessor.hh', 'muxable-pipe.hh', + 'nar-accessor.hh', 'os-string.hh', 'pool.hh', 'pos-idx.hh', diff --git a/src/libutil/include/nix/util/nar-accessor.hh b/src/libutil/include/nix/util/nar-accessor.hh new file mode 100644 index 00000000000..745c79f607b --- /dev/null +++ b/src/libutil/include/nix/util/nar-accessor.hh @@ -0,0 +1,88 @@ +#pragma once +///@file + +#include "nix/util/memory-source-accessor.hh" + +#include + +#include + +namespace nix { + +struct Source; + +/** + * Return an object that provides access to the contents of a NAR + * file. + */ +ref makeNarAccessor(std::string && nar); + +ref makeNarAccessor(Source & source); + +/** + * Create a NAR accessor from a NAR listing (in the format produced by + * listNar()). The callback getNarBytes(offset, length) is used by the + * readFile() method of the accessor to get the contents of files + * inside the NAR. + */ +using GetNarBytes = std::function; + +/** + * The canonical GetNarBytes function for a seekable Source. + */ +GetNarBytes seekableGetNarBytes(const Path & path); + +GetNarBytes seekableGetNarBytes(Descriptor fd); + +ref makeLazyNarAccessor(const nlohmann::json & listing, GetNarBytes getNarBytes); + +/** + * Creates a NAR accessor from a given stream and a GetNarBytes getter. + * @param source Consumed eagerly. References to it are not persisted in the resulting SourceAccessor. + */ +ref makeLazyNarAccessor(Source & source, GetNarBytes getNarBytes); + +struct NarListingRegularFile +{ + /** + * @see `SourceAccessor::Stat::fileSize` + */ + std::optional fileSize; + + /** + * @see `SourceAccessor::Stat::narOffset` + * + * We only set to non-`std::nullopt` if it is also non-zero. + */ + std::optional narOffset; + + auto operator<=>(const NarListingRegularFile &) const = default; +}; + +/** + * Abstract syntax for a "NAR listing". + */ +using NarListing = fso::VariantT; + +/** + * Shallow NAR listing where directory children are not recursively expanded. + * Uses a variant that can hold Regular/Symlink fully, but Directory children + * are just unit types indicating presence without content. + */ +using ShallowNarListing = fso::VariantT; + +/** + * Return a deep structured representation of the contents of a NAR (except file + * contents), recursively listing all children. + */ +NarListing listNarDeep(SourceAccessor & accessor, const CanonPath & path); + +/** + * Return a shallow structured representation of the contents of a NAR (except file + * contents), only listing immediate children without recursing. + */ +ShallowNarListing listNarShallow(SourceAccessor & accessor, const CanonPath & path); + +// All json_avoids_null and JSON_IMPL covered by generic templates in memory-source-accessor.hh + +} // namespace nix diff --git a/src/libutil/include/nix/util/pos-table.hh b/src/libutil/include/nix/util/pos-table.hh index 4ef4b9af4cb..954138afbc8 100644 --- a/src/libutil/include/nix/util/pos-table.hh +++ b/src/libutil/include/nix/util/pos-table.hh @@ -121,6 +121,16 @@ public: return o->origin; return std::monostate{}; } + + /** + * Remove all origins from the table. + */ + void clear() + { + auto lines = linesCache.lock(); + lines->clear(); + state_.lock()->origins.clear(); + } }; } // namespace nix diff --git a/src/libutil/include/nix/util/posix-source-accessor.hh b/src/libutil/include/nix/util/posix-source-accessor.hh index 895e2e1c180..29561a3daaf 100644 --- a/src/libutil/include/nix/util/posix-source-accessor.hh +++ b/src/libutil/include/nix/util/posix-source-accessor.hh @@ -9,7 +9,7 @@ struct SourcePath; /** * A source accessor that uses the Unix filesystem. */ -struct PosixSourceAccessor : virtual SourceAccessor +class PosixSourceAccessor : virtual public SourceAccessor { /** * Optional root path to prefix all operations into the native file @@ -18,8 +18,12 @@ struct PosixSourceAccessor : virtual SourceAccessor */ const std::filesystem::path root; + const bool trackLastModified = false; + +public: + PosixSourceAccessor(); - PosixSourceAccessor(std::filesystem::path && root); + PosixSourceAccessor(std::filesystem::path && root, bool trackLastModified = false); /** * The most recent mtime seen by lstat(). This is a hack to @@ -43,6 +47,9 @@ struct PosixSourceAccessor : virtual SourceAccessor * Create a `PosixSourceAccessor` and `SourcePath` corresponding to * some native path. * + * @param Whether the accessor should return a non-null getLastModified. + * When true the accessor must be used only by a single thread. + * * The `PosixSourceAccessor` is rooted as far up the tree as * possible, (e.g. on Windows it could scoped to a drive like * `C:\`). This allows more `..` parent accessing to work. @@ -64,7 +71,12 @@ struct PosixSourceAccessor : virtual SourceAccessor * and * [`std::filesystem::path::relative_path`](https://en.cppreference.com/w/cpp/filesystem/path/relative_path). */ - static SourcePath createAtRoot(const std::filesystem::path & path); + static SourcePath createAtRoot(const std::filesystem::path & path, bool trackLastModified = false); + + std::optional getLastModified() override + { + return trackLastModified ? std::optional{mtime} : std::nullopt; + } private: diff --git a/src/libutil/include/nix/util/ref.hh b/src/libutil/include/nix/util/ref.hh index 7cf5ef25ebc..7ba5349a60b 100644 --- a/src/libutil/include/nix/util/ref.hh +++ b/src/libutil/include/nix/util/ref.hh @@ -17,6 +17,12 @@ private: std::shared_ptr p; + void assertNonNull() + { + if (!p) + throw std::invalid_argument("null pointer cast to ref"); + } + public: using element_type = T; @@ -24,15 +30,19 @@ public: explicit ref(const std::shared_ptr & p) : p(p) { - if (!p) - throw std::invalid_argument("null pointer cast to ref"); + assertNonNull(); + } + + explicit ref(std::shared_ptr && p) + : p(std::move(p)) + { + assertNonNull(); } explicit ref(T * p) : p(p) { - if (!p) - throw std::invalid_argument("null pointer cast to ref"); + assertNonNull(); } T * operator->() const @@ -45,14 +55,22 @@ public: return *p; } - operator std::shared_ptr() const + std::shared_ptr get_ptr() const & { return p; } - std::shared_ptr get_ptr() const + std::shared_ptr get_ptr() && { - return p; + return std::move(p); + } + + /** + * Convenience to avoid explicit `get_ptr()` call in some cases. + */ + operator std::shared_ptr(this auto && self) + { + return std::forward(self).get_ptr(); } template diff --git a/src/libutil/include/nix/util/serialise.hh b/src/libutil/include/nix/util/serialise.hh index d6845a494dc..6322156aa00 100644 --- a/src/libutil/include/nix/util/serialise.hh +++ b/src/libutil/include/nix/util/serialise.hh @@ -105,7 +105,7 @@ struct Source * A buffered abstract source. Warning: a BufferedSource should not be * used from multiple threads concurrently. */ -struct BufferedSource : Source +struct BufferedSource : virtual Source { size_t bufSize, bufPosIn, bufPosOut; std::unique_ptr buffer; @@ -132,6 +132,14 @@ protected: virtual size_t readUnbuffered(char * data, size_t len) = 0; }; +/** + * Source type that can be restarted. + */ +struct RestartableSource : virtual Source +{ + virtual void restart() = 0; +}; + /** * A sink that writes data to a file descriptor. */ @@ -174,7 +182,7 @@ private: /** * A source that reads data from a file descriptor. */ -struct FdSource : BufferedSource +struct FdSource : BufferedSource, RestartableSource { Descriptor fd; size_t read = 0; @@ -196,6 +204,7 @@ struct FdSource : BufferedSource FdSource & operator=(FdSource && s) = default; bool good() override; + void restart() override; /** * Return true if the buffer is not empty after a non-blocking @@ -233,7 +242,7 @@ struct StringSink : Sink /** * A source that reads data from a string. */ -struct StringSource : Source +struct StringSource : RestartableSource { std::string_view s; size_t pos; @@ -257,6 +266,55 @@ struct StringSource : Source size_t read(char * data, size_t len) override; void skip(size_t len) override; + + void restart() override + { + pos = 0; + } +}; + +/** + * Compresses a RestartableSource using the specified compression method. + * + * @note currently this buffers the entire compressed data stream in memory. In the future it may instead compress data + * on demand, lazily pulling from the original `RestartableSource`. In that case, the `size()` method would go away + * because we would not in fact know the compressed size in advance. + */ +struct CompressedSource : RestartableSource +{ +private: + std::string compressedData; + std::string compressionMethod; + StringSource stringSource; + +public: + /** + * Compress a RestartableSource using the specified compression method. + * + * @param source The source data to compress + * @param compressionMethod The compression method to use (e.g., "xz", "br") + */ + CompressedSource(RestartableSource & source, const std::string & compressionMethod); + + size_t read(char * data, size_t len) override + { + return stringSource.read(data, len); + } + + void restart() override + { + stringSource.restart(); + } + + uint64_t size() const + { + return compressedData.size(); + } + + std::string_view getCompressionMethod() const + { + return compressionMethod; + } }; /** @@ -381,18 +439,27 @@ struct LengthSource : Source */ struct LambdaSink : Sink { - typedef std::function lambda_t; + typedef std::function data_t; + typedef std::function cleanup_t; - lambda_t lambda; + data_t dataFun; + cleanup_t cleanupFun; - LambdaSink(const lambda_t & lambda) - : lambda(lambda) + LambdaSink( + const data_t & dataFun, const cleanup_t & cleanupFun = []() {}) + : dataFun(dataFun) + , cleanupFun(cleanupFun) + { + } + + ~LambdaSink() { + cleanupFun(); } void operator()(std::string_view data) override { - lambda(data); + dataFun(data); } }; diff --git a/src/libutil/include/nix/util/signals.hh b/src/libutil/include/nix/util/signals.hh index 8facec37f6c..ff26975ad60 100644 --- a/src/libutil/include/nix/util/signals.hh +++ b/src/libutil/include/nix/util/signals.hh @@ -21,11 +21,6 @@ static inline void setInterrupted(bool isInterrupted); */ static inline bool getInterrupted(); -/** - * @note Does nothing on Windows - */ -void setInterruptThrown(); - /** * @note Does nothing on Windows */ diff --git a/src/libutil/include/nix/util/source-accessor.hh b/src/libutil/include/nix/util/source-accessor.hh index 671444e6f37..1006895b33c 100644 --- a/src/libutil/include/nix/util/source-accessor.hh +++ b/src/libutil/include/nix/util/source-accessor.hh @@ -241,10 +241,4 @@ ref makeFSSourceAccessor(std::filesystem::path root); */ ref makeUnionSourceAccessor(std::vector> && accessors); -/** - * Creates a new source accessor which is confined to the subdirectory - * of the given source accessor. - */ -ref projectSubdirSourceAccessor(ref, CanonPath subdirectory); - } // namespace nix diff --git a/src/libutil/include/nix/util/strings.hh b/src/libutil/include/nix/util/strings.hh index ba37ce79f63..da8409e6a22 100644 --- a/src/libutil/include/nix/util/strings.hh +++ b/src/libutil/include/nix/util/strings.hh @@ -3,6 +3,7 @@ #include "nix/util/types.hh" #include +#include #include #include #include @@ -93,6 +94,44 @@ extern template std::string dropEmptyInitThenConcatStringsSep(std::string_view, */ std::list shellSplitString(std::string_view s); +/** + * Conditionally wrap a string with prefix and suffix brackets. + * + * If `content` is empty, returns an empty string. + * Otherwise, returns `prefix + content + suffix`. + * + * Example: + * optionalBracket(" (", "foo", ")") == " (foo)" + * optionalBracket(" (", "", ")") == "" + * + * Design note: this would have been called `optionalParentheses`, except this + * function is more general and more explicit. Parentheses typically *also* need + * to be prefixed with a space in order to fit nicely in a piece of natural + * language. + */ +std::string optionalBracket(std::string_view prefix, std::string_view content, std::string_view suffix); + +/** + * Overload for optional content. + * + * If `content` is nullopt or contains an empty string, returns an empty string. + * Otherwise, returns `prefix + *content + suffix`. + * + * Example: + * optionalBracket(" (", std::optional("foo"), ")") == " (foo)" + * optionalBracket(" (", std::nullopt, ")") == "" + * optionalBracket(" (", std::optional(""), ")") == "" + */ +template + requires std::convertible_to +std::string optionalBracket(std::string_view prefix, const std::optional & content, std::string_view suffix) +{ + if (!content || std::string_view(*content).empty()) { + return ""; + } + return optionalBracket(prefix, std::string_view(*content), suffix); +} + /** * Hash implementation that can be used for zero-copy heterogenous lookup from * P1690R1[1] in unordered containers. @@ -127,4 +166,10 @@ public: } }; +/** + * Check that the string does not contain any NUL bytes and return c_str(). + * @throws Error if str contains '\0' bytes. + */ +const char * requireCString(const std::string & str); + } // namespace nix diff --git a/src/libutil/include/nix/util/terminal.hh b/src/libutil/include/nix/util/terminal.hh index a09b71c5277..c70006bc51e 100644 --- a/src/libutil/include/nix/util/terminal.hh +++ b/src/libutil/include/nix/util/terminal.hh @@ -4,7 +4,15 @@ #include #include +#include "nix/util/file-descriptor.hh" + namespace nix { + +/** + * Determine whether \param fd is a terminal. + */ +bool isTTY(Descriptor fd); + /** * Determine whether ANSI escape sequences are appropriate for the * present output. diff --git a/src/libutil/include/nix/util/thread-pool.hh b/src/libutil/include/nix/util/thread-pool.hh index ce34516ef4b..6080ec6098a 100644 --- a/src/libutil/include/nix/util/thread-pool.hh +++ b/src/libutil/include/nix/util/thread-pool.hh @@ -36,7 +36,7 @@ public: /** * Enqueue a function to be executed by the thread pool. */ - void enqueue(const work_t & t); + void enqueue(work_t t); /** * Execute work items until the queue is empty. diff --git a/src/libutil/include/nix/util/topo-sort.hh b/src/libutil/include/nix/util/topo-sort.hh index 9f403e2e6b9..fb918117bda 100644 --- a/src/libutil/include/nix/util/topo-sort.hh +++ b/src/libutil/include/nix/util/topo-sort.hh @@ -2,41 +2,63 @@ ///@file #include "nix/util/error.hh" +#include +#include namespace nix { -template -std::vector topoSort( - std::set items, - std::function(const T &)> getChildren, - std::function makeCycleError) +template +struct Cycle +{ + T path; + T parent; +}; + +template +using TopoSortResult = std::variant, Cycle>; + +template F> + requires std::same_as>, std::set> +TopoSortResult topoSort(std::set items, F && getChildren) { std::vector sorted; decltype(items) visited, parents; - std::function dfsVisit; + std::function>(const T & path, const T * parent)> dfsVisit; - dfsVisit = [&](const T & path, const T * parent) { - if (parents.count(path)) - throw makeCycleError(path, *parent); + dfsVisit = [&](const T & path, const T * parent) -> std::optional> { + if (parents.count(path)) { + return Cycle{path, *parent}; + } - if (!visited.insert(path).second) - return; + if (!visited.insert(path).second) { + return std::nullopt; + } parents.insert(path); - auto references = getChildren(path); + auto && references = std::invoke(getChildren, path); for (auto & i : references) /* Don't traverse into items that don't exist in our starting set. */ - if (i != path && items.count(i)) - dfsVisit(i, &path); + if (i != path && items.count(i)) { + auto result = dfsVisit(i, &path); + if (result.has_value()) { + return result; + } + } sorted.push_back(path); parents.erase(path); + + return std::nullopt; }; - for (auto & i : items) - dfsVisit(i, nullptr); + for (auto & i : items) { + auto cycle = dfsVisit(i, nullptr); + if (cycle.has_value()) { + return *cycle; + } + } std::reverse(sorted.begin(), sorted.end()); diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 4ed80feb3a2..55c475df651 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -330,10 +330,13 @@ struct ParsedUrlScheme ParsedUrlScheme parseUrlScheme(std::string_view scheme); -/* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes - them by removing the `:` and assuming a scheme of `ssh://`. Also - changes absolute paths into file:// URLs. */ -ParsedURL fixGitURL(const std::string & url); +/** + * Detects scp-style uris (e.g. `git@github.com:NixOS/nix`) and fixes + * them by removing the `:` and assuming a scheme of `ssh://`. Also + * drops `git+` from the scheme (e.g. `git+https://` to `https://`) + * and changes absolute paths into `file://` URLs. + */ +ParsedURL fixGitURL(std::string url); /** * Whether a string is valid as RFC 3986 scheme name. @@ -408,6 +411,17 @@ struct VerbatimURL [](const ParsedURL & url) -> std::string_view { return url.scheme; }}, raw); } + + /** + * Get the last non-empty path segment from the URL. + * + * This is useful for extracting filenames from URLs. + * For example, "https://example.com/path/to/file.txt?query=value" + * returns "file.txt". + * + * @return The last non-empty path segment, or std::nullopt if no such segment exists. + */ + std::optional lastPathSegment() const; }; std::ostream & operator<<(std::ostream & os, const VerbatimURL & url); diff --git a/src/libutil/include/nix/util/users.hh b/src/libutil/include/nix/util/users.hh index f2c6caecfcd..7a556fa8b7b 100644 --- a/src/libutil/include/nix/util/users.hh +++ b/src/libutil/include/nix/util/users.hh @@ -1,6 +1,8 @@ #pragma once ///@file +#include + #include "nix/util/types.hh" #ifndef _WIN32 @@ -15,43 +17,43 @@ std::string getUserName(); /** * @return the given user's home directory from /etc/passwd. */ -Path getHomeOf(uid_t userId); +std::filesystem::path getHomeOf(uid_t userId); #endif /** * @return $HOME or the user's home directory from /etc/passwd. */ -Path getHome(); +std::filesystem::path getHome(); /** * @return $NIX_CACHE_HOME or $XDG_CACHE_HOME/nix or $HOME/.cache/nix. */ -Path getCacheDir(); +std::filesystem::path getCacheDir(); /** * @return $NIX_CONFIG_HOME or $XDG_CONFIG_HOME/nix or $HOME/.config/nix. */ -Path getConfigDir(); +std::filesystem::path getConfigDir(); /** * @return the directories to search for user configuration files */ -std::vector getConfigDirs(); +std::vector getConfigDirs(); /** * @return $NIX_DATA_HOME or $XDG_DATA_HOME/nix or $HOME/.local/share/nix. */ -Path getDataDir(); +std::filesystem::path getDataDir(); /** * @return $NIX_STATE_HOME or $XDG_STATE_HOME/nix or $HOME/.local/state/nix. */ -Path getStateDir(); +std::filesystem::path getStateDir(); /** * Create the Nix state directory and return the path to it. */ -Path createNixStateDir(); +std::filesystem::path createNixStateDir(); /** * Perform tilde expansion on a path, replacing tilde with the user's diff --git a/src/libutil/include/nix/util/util.hh b/src/libutil/include/nix/util/util.hh index 0bf9efcb5a5..8130c52ed27 100644 --- a/src/libutil/include/nix/util/util.hh +++ b/src/libutil/include/nix/util/util.hh @@ -6,6 +6,7 @@ #include "nix/util/logging.hh" #include "nix/util/strings.hh" +#include #include #include #include @@ -33,18 +34,37 @@ auto concatStrings(Parts &&... parts) return concatStringsSep({}, views); } +/** + * Add quotes around a string. + */ +inline std::string quoteString(std::string_view s, char quote = '\'') +{ + std::string result; + result.reserve(s.size() + 2); + result += quote; + result += s; + result += quote; + return result; +} + /** * Add quotes around a collection of strings. */ template -Strings quoteStrings(const C & c) +Strings quoteStrings(const C & c, char quote = '\'') { Strings res; for (auto & s : c) - res.push_back("'" + s + "'"); + res.push_back(quoteString(s, quote)); return res; } +inline Strings quoteFSPaths(const std::set & paths, char quote = '\'') +{ + return paths | std::views::transform([&](const auto & p) { return quoteString(p.string(), quote); }) + | std::ranges::to(); +} + /** * Remove trailing whitespace from a string. * @@ -99,12 +119,48 @@ N string2IntWithUnitPrefix(std::string_view s) throw UsageError("'%s' is not an integer", s); } +// Base also uses 'K', because it should also displayed as KiB => 100 Bytes => 0.1 KiB +#define NIX_UTIL_SIZE_UNITS \ + NIX_UTIL_DEFINE_SIZE_UNIT(Base, 'K') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Kilo, 'K') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Mega, 'M') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Giga, 'G') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Tera, 'T') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Peta, 'P') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Exa, 'E') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Zetta, 'Z') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Yotta, 'Y') + +enum class SizeUnit { +#define NIX_UTIL_DEFINE_SIZE_UNIT(name, suffix) name, + NIX_UTIL_SIZE_UNITS +#undef NIX_UTIL_DEFINE_SIZE_UNIT +}; + +constexpr inline auto sizeUnits = std::to_array({ +#define NIX_UTIL_DEFINE_SIZE_UNIT(name, suffix) SizeUnit::name, + NIX_UTIL_SIZE_UNITS +#undef NIX_UTIL_DEFINE_SIZE_UNIT +}); + +SizeUnit getSizeUnit(int64_t value); + +/** + * Returns the unit if all values would be rendered using the same unit + * otherwise returns `std::nullopt`. + */ +std::optional getCommonSizeUnit(std::initializer_list values); + +std::string renderSizeWithoutUnit(int64_t value, SizeUnit unit, bool align = false); + +char getSizeUnitSuffix(SizeUnit unit); + /** * Pretty-print a byte value, e.g. 12433615056 is rendered as `11.6 * GiB`. If `align` is set, the number will be right-justified by * padding with spaces on the left. */ -std::string renderSize(uint64_t value, bool align = false); +std::string renderSize(int64_t value, bool align = false); /** * Parse a string into a float. @@ -193,6 +249,28 @@ std::string stripIndentation(std::string_view s); */ std::pair getLine(std::string_view s); +/** + * Get a pointer to the contents of a `std::optional` if it is set, or a + * null pointer otherise. + * + * Const version. + */ +template +const T * get(const std::optional & opt) +{ + return opt ? &*opt : nullptr; +} + +/** + * Non-const counterpart of `const T * get(const std::optional)`. + * Takes a mutable reference, but returns a mutable pointer. + */ +template +T * get(std::optional & opt) +{ + return opt ? &*opt : nullptr; +} + /** * Get a value for the specified key from an associate container. */ @@ -339,8 +417,6 @@ struct overloaded : Ts... template overloaded(Ts...) -> overloaded; -std::string showBytes(uint64_t bytes); - /** * Provide an addition operator between strings and string_views * inexplicably omitted from the standard library. diff --git a/src/libutil/include/nix/util/variant-wrapper.hh b/src/libutil/include/nix/util/variant-wrapper.hh index 146ae07b635..14c9a37389c 100644 --- a/src/libutil/include/nix/util/variant-wrapper.hh +++ b/src/libutil/include/nix/util/variant-wrapper.hh @@ -22,10 +22,12 @@ * * The moral equivalent of `using Raw::Raw;` */ -#define MAKE_WRAPPER_CONSTRUCTOR(CLASS_NAME) \ - FORCE_DEFAULT_CONSTRUCTORS(CLASS_NAME) \ - \ - CLASS_NAME(auto &&... arg) \ - : raw(std::forward(arg)...) \ - { \ +#define MAKE_WRAPPER_CONSTRUCTOR(CLASS_NAME) \ + FORCE_DEFAULT_CONSTRUCTORS(CLASS_NAME) \ + \ + template \ + requires(!(sizeof...(Args) == 1 && (std::is_same_v, CLASS_NAME> && ...))) \ + CLASS_NAME(Args &&... arg) \ + : raw(std::forward(arg)...) \ + { \ } diff --git a/src/libutil/json-utils.cc b/src/libutil/json-utils.cc index 74b3b27cc4e..80779541e1d 100644 --- a/src/libutil/json-utils.cc +++ b/src/libutil/json-utils.cc @@ -1,52 +1,21 @@ #include "nix/util/json-utils.hh" #include "nix/util/error.hh" #include "nix/util/types.hh" -#include -#include -#include +#include "nix/util/util.hh" namespace nix { -const nlohmann::json * get(const nlohmann::json & map, const std::string & key) +const nlohmann::json & valueAt(const nlohmann::json::object_t & map, std::string_view key) { - auto i = map.find(key); - if (i == map.end()) - return nullptr; - return &*i; -} - -nlohmann::json * get(nlohmann::json & map, const std::string & key) -{ - auto i = map.find(key); - if (i == map.end()) - return nullptr; - return &*i; -} - -const nlohmann::json & valueAt(const nlohmann::json::object_t & map, const std::string & key) -{ - if (!map.contains(key)) + if (auto * p = optionalValueAt(map, key)) + return *p; + else throw Error("Expected JSON object to contain key '%s' but it doesn't: %s", key, nlohmann::json(map).dump()); - - return map.at(key); -} - -std::optional optionalValueAt(const nlohmann::json::object_t & map, const std::string & key) -{ - if (!map.contains(key)) - return std::nullopt; - - return std::optional{map.at(key)}; } -std::optional nullableValueAt(const nlohmann::json::object_t & map, const std::string & key) +const nlohmann::json * optionalValueAt(const nlohmann::json::object_t & map, std::string_view key) { - auto value = valueAt(map, key); - - if (value.is_null()) - return std::nullopt; - - return std::optional{std::move(value)}; + return get(map, key); } const nlohmann::json * getNullable(const nlohmann::json & value) @@ -122,14 +91,7 @@ Strings getStringList(const nlohmann::json & value) StringMap getStringMap(const nlohmann::json & value) { - auto & jsonObject = getObject(value); - - StringMap stringMap; - - for (const auto & [key, value] : jsonObject) - stringMap[getString(key)] = getString(value); - - return stringMap; + return getMap>(getObject(value), getString); } StringSet getStringSet(const nlohmann::json & value) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index b3e69405eb6..842381acf66 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -377,10 +377,10 @@ std::unique_ptr makeJSONLogger(const std::filesystem::path & path, bool void applyJSONLogger() { - if (!loggerSettings.jsonLogPath.get().empty()) { + if (auto & opt = loggerSettings.jsonLogPath.get()) { try { std::vector> loggers; - loggers.push_back(makeJSONLogger(std::filesystem::path(loggerSettings.jsonLogPath.get()), false)); + loggers.push_back(makeJSONLogger(*opt, false)); try { logger = makeTeeLogger(std::move(logger), std::move(loggers)); } catch (...) { diff --git a/src/libutil/memory-source-accessor.cc b/src/libutil/memory-source-accessor.cc index a9ffb77469c..ec21c846dde 100644 --- a/src/libutil/memory-source-accessor.cc +++ b/src/libutil/memory-source-accessor.cc @@ -1,4 +1,5 @@ #include "nix/util/memory-source-accessor.hh" +#include "nix/util/json-utils.hh" namespace nix { @@ -29,13 +30,13 @@ MemorySourceAccessor::File * MemorySourceAccessor::open(const CanonPath & path, return nullptr; auto & curDir = *curDirP; - auto i = curDir.contents.find(name); - if (i == curDir.contents.end()) { + auto i = curDir.entries.find(name); + if (i == curDir.entries.end()) { if (!create) return nullptr; else { newF = true; - i = curDir.contents.insert( + i = curDir.entries.insert( i, { std::string{name}, @@ -68,25 +69,26 @@ bool MemorySourceAccessor::pathExists(const CanonPath & path) return open(path, std::nullopt); } -MemorySourceAccessor::Stat MemorySourceAccessor::File::lstat() const +template<> +SourceAccessor::Stat MemorySourceAccessor::File::lstat() const { return std::visit( overloaded{ [](const Regular & r) { - return Stat{ - .type = tRegular, + return SourceAccessor::Stat{ + .type = SourceAccessor::tRegular, .fileSize = r.contents.size(), .isExecutable = r.executable, }; }, [](const Directory &) { - return Stat{ - .type = tDirectory, + return SourceAccessor::Stat{ + .type = SourceAccessor::tDirectory, }; }, [](const Symlink &) { - return Stat{ - .type = tSymlink, + return SourceAccessor::Stat{ + .type = SourceAccessor::tSymlink, }; }, }, @@ -106,7 +108,7 @@ MemorySourceAccessor::DirEntries MemorySourceAccessor::readDirectory(const Canon throw Error("file '%s' does not exist", path); if (auto * d = std::get_if(&f->raw)) { DirEntries res; - for (auto & [name, file] : d->contents) + for (auto & [name, file] : d->entries) res.insert_or_assign(name, file.lstat().type); return res; } else diff --git a/src/libutil/memory-source-accessor/json.cc b/src/libutil/memory-source-accessor/json.cc new file mode 100644 index 00000000000..84fbb71bb2e --- /dev/null +++ b/src/libutil/memory-source-accessor/json.cc @@ -0,0 +1,162 @@ +#include "nix/util/memory-source-accessor.hh" +#include "nix/util/nar-accessor.hh" +#include "nix/util/json-utils.hh" + +#include + +namespace nlohmann { + +using namespace nix; + +// fso::Regular +template<> +MemorySourceAccessor::File::Regular adl_serializer::from_json(const json & json) +{ + auto & obj = getObject(json); + return MemorySourceAccessor::File::Regular{ + .executable = getBoolean(valueAt(obj, "executable")), + .contents = getString(valueAt(obj, "contents")), + }; +} + +template<> +void adl_serializer::to_json( + json & json, const MemorySourceAccessor::File::Regular & r) +{ + json = { + {"executable", r.executable}, + {"contents", r.contents}, + }; +} + +template<> +NarListing::Regular adl_serializer::from_json(const json & json) +{ + auto & obj = getObject(json); + auto * execPtr = optionalValueAt(obj, "executable"); + auto * sizePtr = optionalValueAt(obj, "size"); + auto * offsetPtr = optionalValueAt(obj, "narOffset"); + return NarListing::Regular{ + .executable = execPtr ? getBoolean(*execPtr) : false, + .contents{ + .fileSize = ptrToOwned(sizePtr), + .narOffset = ptrToOwned(offsetPtr).and_then( + [](auto v) { return v != 0 ? std::optional{v} : std::nullopt; }), + }, + }; +} + +template<> +void adl_serializer::to_json(json & j, const NarListing::Regular & r) +{ + if (r.contents.fileSize) + j["size"] = *r.contents.fileSize; + if (r.executable) + j["executable"] = true; + if (r.contents.narOffset) + j["narOffset"] = *r.contents.narOffset; +} + +template +void adl_serializer>::to_json(json & j, const fso::DirectoryT & d) +{ + j["entries"] = d.entries; +} + +template +fso::DirectoryT adl_serializer>::from_json(const json & json) +{ + auto & obj = getObject(json); + return fso::DirectoryT{ + .entries = valueAt(obj, "entries"), + }; +} + +// fso::Symlink +fso::Symlink adl_serializer::from_json(const json & json) +{ + auto & obj = getObject(json); + return fso::Symlink{ + .target = getString(valueAt(obj, "target")), + }; +} + +void adl_serializer::to_json(json & json, const fso::Symlink & s) +{ + json = { + {"target", s.target}, + }; +} + +// fso::Opaque +fso::Opaque adl_serializer::from_json(const json &) +{ + return fso::Opaque{}; +} + +void adl_serializer::to_json(json & j, const fso::Opaque &) +{ + j = nlohmann::json::object(); +} + +// fso::VariantT - generic implementation +template +void adl_serializer>::to_json( + json & j, const fso::VariantT & val) +{ + using Variant = fso::VariantT; + j = nlohmann::json::object(); + std::visit( + overloaded{ + [&](const typename Variant::Regular & r) { + j = r; + j["type"] = "regular"; + }, + [&](const typename Variant::Directory & d) { + j = d; + j["type"] = "directory"; + }, + [&](const typename Variant::Symlink & s) { + j = s; + j["type"] = "symlink"; + }, + }, + val.raw); +} + +template +fso::VariantT +adl_serializer>::from_json(const json & json) +{ + using Variant = fso::VariantT; + auto & obj = getObject(json); + auto type = getString(valueAt(obj, "type")); + if (type == "regular") + return static_cast(json); + if (type == "directory") + return static_cast(json); + if (type == "symlink") + return static_cast(json); + else + throw Error("unknown type of file '%s'", type); +} + +// Explicit instantiations for VariantT types we use +template struct adl_serializer; +template struct adl_serializer; +template struct adl_serializer; + +// MemorySourceAccessor +MemorySourceAccessor adl_serializer::from_json(const json & json) +{ + MemorySourceAccessor res; + res.root = json; + return res; +} + +void adl_serializer::to_json(json & json, const MemorySourceAccessor & val) +{ + json = val.root; +} + +} // namespace nlohmann diff --git a/src/libutil/meson.build b/src/libutil/meson.build index 0eab48bcbf2..0b4a0841f90 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -118,7 +118,6 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') sources = [ config_priv_h ] + files( 'archive.cc', @@ -147,7 +146,9 @@ sources = [ config_priv_h ] + files( 'json-utils.cc', 'logging.cc', 'memory-source-accessor.cc', + 'memory-source-accessor/json.cc', 'mounted-source-accessor.cc', + 'nar-accessor.cc', 'pos-table.cc', 'position.cc', 'posix-source-accessor.cc', @@ -157,7 +158,6 @@ sources = [ config_priv_h ] + files( 'source-accessor.cc', 'source-path.cc', 'strings.cc', - 'subdir-source-accessor.cc', 'suggestions.cc', 'table.cc', 'tarfile.cc', diff --git a/src/libstore/nar-accessor.cc b/src/libutil/nar-accessor.cc similarity index 70% rename from src/libstore/nar-accessor.cc rename to src/libutil/nar-accessor.cc index 63fe774c978..5644ca4081b 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libutil/nar-accessor.cc @@ -1,4 +1,4 @@ -#include "nix/store/nar-accessor.hh" +#include "nix/util/nar-accessor.hh" #include "nix/util/archive.hh" #include @@ -111,6 +111,7 @@ struct NarAccessor : public SourceAccessor path, NarMember{.stat = {.type = Type::tRegular, .fileSize = 0, .isExecutable = false, .narOffset = 0}}); NarMemberConstructor nmc{nm, pos}; + nmc.skipContents = true; /* Don't care about contents. */ func(nmc); } @@ -141,14 +142,17 @@ struct NarAccessor : public SourceAccessor parseDump(indexer, indexer); } - NarAccessor(const std::string & listing, GetNarBytes getNarBytes) - : getNarBytes(getNarBytes) + NarAccessor(Source & source, GetNarBytes getNarBytes) + : getNarBytes(std::move(getNarBytes)) { - using json = nlohmann::json; - - std::function recurse; + NarIndexer indexer(*this, source); + parseDump(indexer, indexer); + } - recurse = [&](NarMember & member, json & v) { + NarAccessor(const nlohmann::json & listing, GetNarBytes getNarBytes) + : getNarBytes(getNarBytes) + { + [&](this const auto & recurse, NarMember & member, const nlohmann::json & v) -> void { std::string type = v["type"]; if (type == "directory") { @@ -167,10 +171,7 @@ struct NarAccessor : public SourceAccessor member.target = v.value("target", ""); } else return; - }; - - json v = json::parse(listing); - recurse(root, v); + }(root, listing); } NarMember * find(const CanonPath & path) @@ -251,46 +252,78 @@ ref makeNarAccessor(Source & source) return make_ref(source); } -ref makeLazyNarAccessor(const std::string & listing, GetNarBytes getNarBytes) +ref makeLazyNarAccessor(const nlohmann::json & listing, GetNarBytes getNarBytes) { return make_ref(listing, getNarBytes); } -using nlohmann::json; +ref makeLazyNarAccessor(Source & source, GetNarBytes getNarBytes) +{ + return make_ref(source, getNarBytes); +} + +GetNarBytes seekableGetNarBytes(const Path & path) +{ + AutoCloseFD fd = toDescriptor(open( + path.c_str(), + O_RDONLY +#ifdef O_CLOEXEC + | O_CLOEXEC +#endif + )); + if (!fd) + throw SysError("opening NAR cache file '%s'", path); + + return [inner = seekableGetNarBytes(fd.get()), fd = make_ref(std::move(fd))]( + uint64_t offset, uint64_t length) { return inner(offset, length); }; +} -json listNar(ref accessor, const CanonPath & path, bool recurse) +GetNarBytes seekableGetNarBytes(Descriptor fd) { - auto st = accessor->lstat(path); + return [fd](uint64_t offset, uint64_t length) { + if (::lseek(fromDescriptorReadOnly(fd), offset, SEEK_SET) == -1) + throw SysError("seeking in file"); - json obj = json::object(); + std::string buf(length, 0); + readFull(fd, buf.data(), length); + + return buf; + }; +} + +template +using ListNarResult = std::conditional_t; + +template +static ListNarResult listNarImpl(SourceAccessor & accessor, const CanonPath & path) +{ + auto st = accessor.lstat(path); switch (st.type) { case SourceAccessor::Type::tRegular: - obj["type"] = "regular"; - if (st.fileSize) - obj["size"] = *st.fileSize; - if (st.isExecutable) - obj["executable"] = true; - if (st.narOffset && *st.narOffset) - obj["narOffset"] = *st.narOffset; - break; - case SourceAccessor::Type::tDirectory: - obj["type"] = "directory"; - { - obj["entries"] = json::object(); - json & res2 = obj["entries"]; - for (const auto & [name, type] : accessor->readDirectory(path)) { - if (recurse) { - res2[name] = listNar(accessor, path / name, true); - } else - res2[name] = json::object(); + return typename ListNarResult::Regular{ + .executable = st.isExecutable, + .contents = + NarListingRegularFile{ + .fileSize = st.fileSize, + .narOffset = st.narOffset && *st.narOffset ? st.narOffset : std::nullopt, + }, + }; + case SourceAccessor::Type::tDirectory: { + typename ListNarResult::Directory dir; + for (const auto & [name, type] : accessor.readDirectory(path)) { + if constexpr (deep) { + dir.entries.emplace(name, listNarImpl(accessor, path / name)); + } else { + dir.entries.emplace(name, fso::Opaque{}); } } - break; + return dir; + } case SourceAccessor::Type::tSymlink: - obj["type"] = "symlink"; - obj["target"] = accessor->readLink(path); - break; + return typename ListNarResult::Symlink{ + .target = accessor.readLink(path), + }; case SourceAccessor::Type::tBlock: case SourceAccessor::Type::tChar: case SourceAccessor::Type::tSocket: @@ -298,7 +331,16 @@ json listNar(ref accessor, const CanonPath & path, bool recurse) case SourceAccessor::Type::tUnknown: assert(false); // cannot happen for NARs } - return obj; +} + +NarListing listNarDeep(SourceAccessor & accessor, const CanonPath & path) +{ + return listNarImpl(accessor, path); +} + +ShallowNarListing listNarShallow(SourceAccessor & accessor, const CanonPath & path) +{ + return listNarImpl(accessor, path); } } // namespace nix diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index fe3bcb1c1c7..abbab45db21 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -7,8 +7,9 @@ namespace nix { -PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && argRoot) +PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && argRoot, bool trackLastModified) : root(std::move(argRoot)) + , trackLastModified(trackLastModified) { assert(root.empty() || root.is_absolute()); displayPrefix = root.string(); @@ -19,11 +20,11 @@ PosixSourceAccessor::PosixSourceAccessor() { } -SourcePath PosixSourceAccessor::createAtRoot(const std::filesystem::path & path) +SourcePath PosixSourceAccessor::createAtRoot(const std::filesystem::path & path, bool trackLastModified) { std::filesystem::path path2 = absPath(path); return { - make_ref(path2.root_path()), + make_ref(path2.root_path(), trackLastModified), CanonPath{path2.relative_path().string()}, }; } @@ -114,9 +115,12 @@ std::optional PosixSourceAccessor::maybeLstat(const CanonP auto st = cachedLstat(path); if (!st) return std::nullopt; - // This makes the accessor thread-unsafe, but we only seem to use the actual value in a single threaded context in - // `src/libfetchers/path.cc`. - mtime = std::max(mtime, st->st_mtime); + + /* The contract is that trackLastModified implies that the caller uses the accessor + from a single thread. Thus this is not a CAS loop. */ + if (trackLastModified) + mtime = std::max(mtime, st->st_mtime); + return Stat{ .type = S_ISREG(st->st_mode) ? tRegular : S_ISDIR(st->st_mode) ? tDirectory diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index ba153625ee9..9791b4fed8d 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -1,4 +1,5 @@ #include "nix/util/serialise.hh" +#include "nix/util/compression.hh" #include "nix/util/signals.hh" #include "nix/util/util.hh" @@ -200,6 +201,17 @@ bool FdSource::hasData() } } +void FdSource::restart() +{ + if (!isSeekable) + throw Error("can't seek to the start of a file"); + buffer.reset(); + read = bufPosIn = bufPosOut = 0; + int fd_ = fromDescriptorReadOnly(fd); + if (lseek(fd_, 0, SEEK_SET) == -1) + throw SysError("seeking to the start of a file"); +} + void FdSource::skip(size_t len) { /* Discard data in the buffer. */ @@ -252,6 +264,19 @@ void StringSource::skip(size_t len) pos += len; } +CompressedSource::CompressedSource(RestartableSource & source, const std::string & compressionMethod) + : compressedData([&]() { + StringSink sink; + auto compressionSink = makeCompressionSink(compressionMethod, sink); + source.drainInto(*compressionSink); + compressionSink->finish(); + return std::move(sink.s); + }()) + , compressionMethod(compressionMethod) + , stringSource(compressedData) +{ +} + std::unique_ptr sourceToSink(std::function fun) { struct SourceToSink : FinishSink diff --git a/src/libutil/strings.cc b/src/libutil/strings.cc index a87567cefaf..91a0f73ec1d 100644 --- a/src/libutil/strings.cc +++ b/src/libutil/strings.cc @@ -5,6 +5,7 @@ #include "nix/util/strings-inline.hh" #include "nix/util/os-string.hh" #include "nix/util/error.hh" +#include "nix/util/util.hh" namespace nix { @@ -138,4 +139,28 @@ std::list shellSplitString(std::string_view s) return result; } + +std::string optionalBracket(std::string_view prefix, std::string_view content, std::string_view suffix) +{ + if (content.empty()) { + return ""; + } + std::string result; + result.reserve(prefix.size() + content.size() + suffix.size()); + result.append(prefix); + result.append(content); + result.append(suffix); + return result; +} + +const char * requireCString(const std::string & s) +{ + if (std::memchr(s.data(), '\0', s.size())) [[unlikely]] { + using namespace std::string_view_literals; + auto str = replaceStrings(s, "\0"sv, "␀"sv); + throw Error("string '%s' with null (\\0) bytes used where it's not allowed", str); + } + return s.c_str(); +} + } // namespace nix diff --git a/src/libutil/subdir-source-accessor.cc b/src/libutil/subdir-source-accessor.cc deleted file mode 100644 index d4f57e2f793..00000000000 --- a/src/libutil/subdir-source-accessor.cc +++ /dev/null @@ -1,59 +0,0 @@ -#include "nix/util/source-accessor.hh" - -namespace nix { - -struct SubdirSourceAccessor : SourceAccessor -{ - ref parent; - - CanonPath subdirectory; - - SubdirSourceAccessor(ref && parent, CanonPath && subdirectory) - : parent(std::move(parent)) - , subdirectory(std::move(subdirectory)) - { - displayPrefix.clear(); - } - - std::string readFile(const CanonPath & path) override - { - return parent->readFile(subdirectory / path); - } - - void readFile(const CanonPath & path, Sink & sink, std::function sizeCallback) override - { - return parent->readFile(subdirectory / path, sink, sizeCallback); - } - - bool pathExists(const CanonPath & path) override - { - return parent->pathExists(subdirectory / path); - } - - std::optional maybeLstat(const CanonPath & path) override - { - return parent->maybeLstat(subdirectory / path); - } - - DirEntries readDirectory(const CanonPath & path) override - { - return parent->readDirectory(subdirectory / path); - } - - std::string readLink(const CanonPath & path) override - { - return parent->readLink(subdirectory / path); - } - - std::string showPath(const CanonPath & path) override - { - return displayPrefix + parent->showPath(subdirectory / path) + displaySuffix; - } -}; - -ref projectSubdirSourceAccessor(ref parent, CanonPath subdirectory) -{ - return make_ref(std::move(parent), std::move(subdirectory)); -} - -} // namespace nix diff --git a/src/libutil/terminal.cc b/src/libutil/terminal.cc index a00892ac047..c52cc14975b 100644 --- a/src/libutil/terminal.cc +++ b/src/libutil/terminal.cc @@ -64,6 +64,16 @@ inline std::pair charWidthUTF8Helper(std::string_view s) namespace nix { +bool isTTY(Descriptor fd) +{ +#ifndef _WIN32 + return isatty(fd); +#else + DWORD mode; + return GetConsoleMode(fd, &mode); +#endif +} + bool isTTY() { static const bool tty = isatty(STDERR_FILENO) && getEnv("TERM").value_or("dumb") != "dumb" diff --git a/src/libutil/thread-pool.cc b/src/libutil/thread-pool.cc index b7740bc3e3b..24bdeef8670 100644 --- a/src/libutil/thread-pool.cc +++ b/src/libutil/thread-pool.cc @@ -41,12 +41,12 @@ void ThreadPool::shutdown() thr.join(); } -void ThreadPool::enqueue(const work_t & t) +void ThreadPool::enqueue(work_t t) { auto state(state_.lock()); if (quit) throw ThreadPoolShutDown("cannot enqueue a work item while the thread pool is shutting down"); - state->pending.push(t); + state->pending.push(std::move(t)); /* Note: process() also executes items, so count it as a worker. */ if (state->pending.size() > state->workers.size() + 1 && state->workers.size() + 1 < maxThreads) state->workers.emplace_back(&ThreadPool::doWork, this, false); diff --git a/src/libutil/unix/file-descriptor.cc b/src/libutil/unix/file-descriptor.cc index 2b612e85488..d90342ff0ec 100644 --- a/src/libutil/unix/file-descriptor.cc +++ b/src/libutil/unix/file-descriptor.cc @@ -1,3 +1,4 @@ +#include "nix/util/canon-path.hh" #include "nix/util/file-system.hh" #include "nix/util/signals.hh" #include "nix/util/finally.hh" @@ -7,6 +8,14 @@ #include #include +#if defined(__linux__) && defined(__NR_openat2) +# define HAVE_OPENAT2 1 +# include +# include +#else +# define HAVE_OPENAT2 0 +#endif + #include "util-config-private.hh" #include "util-unix-config-private.hh" @@ -223,4 +232,107 @@ void unix::closeOnExec(int fd) throw SysError("setting close-on-exec flag"); } +#ifdef __linux__ + +namespace linux { + +std::optional openat2(Descriptor dirFd, const char * path, uint64_t flags, uint64_t mode, uint64_t resolve) +{ +# if HAVE_OPENAT2 + /* Cache the result of whether openat2 is not supported. */ + static std::atomic_flag unsupported{}; + + if (!unsupported.test()) { + /* No glibc wrapper yet, but there's a patch: + * https://patchwork.sourceware.org/project/glibc/patch/20251029200519.3203914-1-adhemerval.zanella@linaro.org/ + */ + auto how = ::open_how{.flags = flags, .mode = mode, .resolve = resolve}; + auto res = ::syscall(__NR_openat2, dirFd, path, &how, sizeof(how)); + /* Cache that the syscall is not supported. */ + if (res < 0 && errno == ENOSYS) { + unsupported.test_and_set(); + return std::nullopt; + } + + return res; + } +# endif + return std::nullopt; +} + +} // namespace linux + +#endif + +static Descriptor +openFileEnsureBeneathNoSymlinksIterative(Descriptor dirFd, const CanonPath & path, int flags, mode_t mode) +{ + AutoCloseFD parentFd; + auto nrComponents = std::ranges::distance(path); + assert(nrComponents >= 1); + auto components = std::views::take(path, nrComponents - 1); /* Everything but last component */ + auto getParentFd = [&]() { return parentFd ? parentFd.get() : dirFd; }; + + /* This rather convoluted loop is necessary to avoid TOCTOU when validating that + no inner path component is a symlink. */ + for (auto it = components.begin(); it != components.end(); ++it) { + auto component = std::string(*it); /* Copy into a string to make NUL terminated. */ + assert(component != ".." && !component.starts_with('/')); /* In case invariant is broken somehow.. */ + + AutoCloseFD parentFd2 = ::openat( + getParentFd(), /* First iteration uses dirFd. */ + component.c_str(), + O_DIRECTORY | O_NOFOLLOW | O_CLOEXEC +#ifdef __linux__ + | O_PATH /* Linux-specific optimization. Files are open only for path resolution purposes. */ +#endif +#ifdef __FreeBSD__ + | O_RESOLVE_BENEATH /* Further guard against any possible SNAFUs. */ +#endif + ); + + if (!parentFd2) { + /* Construct the CanonPath for error message. */ + auto path2 = std::ranges::fold_left(components.begin(), ++it, CanonPath::root, [](auto lhs, auto rhs) { + lhs.push(rhs); + return lhs; + }); + + if (errno == ENOTDIR) /* Path component might be a symlink. */ { + struct ::stat st; + if (::fstatat(getParentFd(), component.c_str(), &st, AT_SYMLINK_NOFOLLOW) == 0 && S_ISLNK(st.st_mode)) + throw unix::SymlinkNotAllowed(path2); + errno = ENOTDIR; /* Restore the errno. */ + } else if (errno == ELOOP) { + throw unix::SymlinkNotAllowed(path2); + } + + return INVALID_DESCRIPTOR; + } + + parentFd = std::move(parentFd2); + } + + auto res = ::openat(getParentFd(), std::string(path.baseName().value()).c_str(), flags | O_NOFOLLOW, mode); + if (res < 0 && errno == ELOOP) + throw unix::SymlinkNotAllowed(path); + return res; +} + +Descriptor unix::openFileEnsureBeneathNoSymlinks(Descriptor dirFd, const CanonPath & path, int flags, mode_t mode) +{ + assert(!path.rel().starts_with('/')); /* Just in case the invariant is somehow broken. */ + assert(!path.isRoot()); +#ifdef __linux__ + auto maybeFd = linux::openat2( + dirFd, path.rel_c_str(), flags, static_cast(mode), RESOLVE_BENEATH | RESOLVE_NO_SYMLINKS); + if (maybeFd) { + if (*maybeFd < 0 && errno == ELOOP) + throw unix::SymlinkNotAllowed(path); + return *maybeFd; + } +#endif + return openFileEnsureBeneathNoSymlinksIterative(dirFd, path, flags, mode); +} + } // namespace nix diff --git a/src/libutil/unix/file-system.cc b/src/libutil/unix/file-system.cc index 8ff66328ba2..77b83858f6d 100644 --- a/src/libutil/unix/file-system.cc +++ b/src/libutil/unix/file-system.cc @@ -16,7 +16,7 @@ namespace nix { Descriptor openDirectory(const std::filesystem::path & path) { - return open(path.c_str(), O_RDONLY | O_DIRECTORY); + return open(path.c_str(), O_RDONLY | O_DIRECTORY | O_CLOEXEC); } void setWriteTime( diff --git a/src/libutil/unix/signals.cc b/src/libutil/unix/signals.cc index d6efd6aa7b1..de441492a89 100644 --- a/src/libutil/unix/signals.cc +++ b/src/libutil/unix/signals.cc @@ -12,24 +12,14 @@ using namespace unix; std::atomic unix::_isInterrupted = false; -namespace unix { -static thread_local bool interruptThrown = false; -} - thread_local std::function unix::interruptCheck; -void setInterruptThrown() -{ - unix::interruptThrown = true; -} - void unix::_interrupted() { /* Block user interrupts while an exception is being handled. Throwing an exception while another exception is being handled kills the program! */ - if (!interruptThrown && !std::uncaught_exceptions()) { - interruptThrown = true; + if (!std::uncaught_exceptions()) { throw Interrupted("interrupted by the user"); } } diff --git a/src/libutil/unix/users.cc b/src/libutil/unix/users.cc index 09b38be5e7f..870bbe3767f 100644 --- a/src/libutil/unix/users.cc +++ b/src/libutil/unix/users.cc @@ -18,7 +18,7 @@ std::string getUserName() return name; } -Path getHomeOf(uid_t userId) +std::filesystem::path getHomeOf(uid_t userId) { std::vector buf(16384); struct passwd pwbuf; @@ -28,9 +28,9 @@ Path getHomeOf(uid_t userId) return pw->pw_dir; } -Path getHome() +std::filesystem::path getHome() { - static Path homeDir = []() { + static std::filesystem::path homeDir = []() { std::optional unownedUserHomeDir = {}; auto homeDir = getEnv("HOME"); if (homeDir) { diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 7410e4062bd..0a8b6452814 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -4,6 +4,7 @@ #include "nix/util/split.hh" #include "nix/util/canon-path.hh" #include "nix/util/strings-inline.hh" +#include "nix/util/file-system.hh" #include @@ -326,8 +327,11 @@ Path renderUrlPathEnsureLegal(const std::vector & urlPath) /* This is only really valid for UNIX. Windows has more restrictions. */ if (comp.contains('/')) throw BadURL("URL path component '%s' contains '/', which is not allowed in file names", comp); - if (comp.contains(char(0))) - throw BadURL("URL path component '%s' contains NUL byte which is not allowed", comp); + if (comp.contains(char(0))) { + using namespace std::string_view_literals; + auto str = replaceStrings(comp, "\0"sv, "␀"sv); + throw BadURL("URL path component '%s' contains NUL byte which is not allowed", str); + } } return concatStringsSep("/", urlPath); @@ -408,21 +412,23 @@ ParsedUrlScheme parseUrlScheme(std::string_view scheme) }; } -ParsedURL fixGitURL(const std::string & url) +ParsedURL fixGitURL(std::string url) { std::regex scpRegex("([^/]*)@(.*):(.*)"); if (!hasPrefix(url, "/") && std::regex_match(url, scpRegex)) - return parseURL(std::regex_replace(url, scpRegex, "ssh://$1@$2/$3")); - if (hasPrefix(url, "file:")) - return parseURL(url); - if (url.find("://") == std::string::npos) { + url = std::regex_replace(url, scpRegex, "ssh://$1@$2/$3"); + if (!hasPrefix(url, "file:") && !hasPrefix(url, "git+file:") && url.find("://") == std::string::npos) return ParsedURL{ .scheme = "file", .authority = ParsedURL::Authority{}, .path = splitString>(url, "/"), }; - } - return parseURL(url); + auto parsed = parseURL(url); + // Drop the superfluous "git+" from the scheme. + auto scheme = parseUrlScheme(parsed.scheme); + if (scheme.application == "git") + parsed.scheme = scheme.transport; + return parsed; } // https://www.rfc-editor.org/rfc/rfc3986#section-3.1 @@ -440,4 +446,21 @@ std::ostream & operator<<(std::ostream & os, const VerbatimURL & url) return os; } +std::optional VerbatimURL::lastPathSegment() const +{ + try { + auto parsedUrl = parsed(); + auto segments = parsedUrl.pathSegments(/*skipEmpty=*/true); + if (std::ranges::empty(segments)) + return std::nullopt; + return segments.back(); + } catch (BadURL &) { + // Fall back to baseNameOf for unparsable URLs + auto name = baseNameOf(to_string()); + if (name.empty()) + return std::nullopt; + return std::string{name}; + } +} + } // namespace nix diff --git a/src/libutil/users.cc b/src/libutil/users.cc index f19a5d39c76..1fa643730cd 100644 --- a/src/libutil/users.cc +++ b/src/libutil/users.cc @@ -5,7 +5,7 @@ namespace nix { -Path getCacheDir() +std::filesystem::path getCacheDir() { auto dir = getEnv("NIX_CACHE_HOME"); if (dir) { @@ -13,14 +13,14 @@ Path getCacheDir() } else { auto xdgDir = getEnv("XDG_CACHE_HOME"); if (xdgDir) { - return *xdgDir + "/nix"; + return std::filesystem::path{*xdgDir} / "nix"; } else { - return getHome() + "/.cache/nix"; + return getHome() / ".cache" / "nix"; } } } -Path getConfigDir() +std::filesystem::path getConfigDir() { auto dir = getEnv("NIX_CONFIG_HOME"); if (dir) { @@ -28,26 +28,27 @@ Path getConfigDir() } else { auto xdgDir = getEnv("XDG_CONFIG_HOME"); if (xdgDir) { - return *xdgDir + "/nix"; + return std::filesystem::path{*xdgDir} / "nix"; } else { - return getHome() + "/.config/nix"; + return getHome() / ".config" / "nix"; } } } -std::vector getConfigDirs() +std::vector getConfigDirs() { - Path configHome = getConfigDir(); + std::filesystem::path configHome = getConfigDir(); auto configDirs = getEnv("XDG_CONFIG_DIRS").value_or("/etc/xdg"); - std::vector result = tokenizeString>(configDirs, ":"); - for (auto & p : result) { - p += "/nix"; + auto tokens = tokenizeString>(configDirs, ":"); + std::vector result; + result.push_back(configHome); + for (auto & token : tokens) { + result.push_back(std::filesystem::path{token} / "nix"); } - result.insert(result.begin(), configHome); return result; } -Path getDataDir() +std::filesystem::path getDataDir() { auto dir = getEnv("NIX_DATA_HOME"); if (dir) { @@ -55,14 +56,14 @@ Path getDataDir() } else { auto xdgDir = getEnv("XDG_DATA_HOME"); if (xdgDir) { - return *xdgDir + "/nix"; + return std::filesystem::path{*xdgDir} / "nix"; } else { - return getHome() + "/.local/share/nix"; + return getHome() / ".local" / "share" / "nix"; } } } -Path getStateDir() +std::filesystem::path getStateDir() { auto dir = getEnv("NIX_STATE_HOME"); if (dir) { @@ -70,16 +71,16 @@ Path getStateDir() } else { auto xdgDir = getEnv("XDG_STATE_HOME"); if (xdgDir) { - return *xdgDir + "/nix"; + return std::filesystem::path{*xdgDir} / "nix"; } else { - return getHome() + "/.local/state/nix"; + return getHome() / ".local" / "state" / "nix"; } } } -Path createNixStateDir() +std::filesystem::path createNixStateDir() { - Path dir = getStateDir(); + std::filesystem::path dir = getStateDir(); createDirs(dir); return dir; } @@ -89,7 +90,7 @@ std::string expandTilde(std::string_view path) // TODO: expand ~user ? auto tilde = path.substr(0, 2); if (tilde == "~/" || tilde == "~") - return getHome() + std::string(path.substr(1)); + return getHome().string() + std::string(path.substr(1)); else return std::string(path); } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 383a904adfc..d75aa4d67d9 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -132,16 +132,62 @@ std::optional string2Float(const std::string_view s) template std::optional string2Float(const std::string_view s); template std::optional string2Float(const std::string_view s); -std::string renderSize(uint64_t value, bool align) +static const int64_t conversionNumber = 1024; + +SizeUnit getSizeUnit(int64_t value) +{ + auto unit = sizeUnits.begin(); + uint64_t absValue = std::abs(value); + while (absValue > conversionNumber && unit < sizeUnits.end()) { + unit++; + absValue /= conversionNumber; + } + return *unit; +} + +std::optional getCommonSizeUnit(std::initializer_list values) { - static const std::array prefixes{{'K', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'}}; - size_t power = 0; - double res = value; - while (res > 1024 && power < prefixes.size()) { - ++power; - res /= 1024; + assert(values.size() > 0); + + auto it = values.begin(); + SizeUnit unit = getSizeUnit(*it); + it++; + + for (; it != values.end(); it++) { + if (unit != getSizeUnit(*it)) { + return std::nullopt; + } } - return fmt(align ? "%6.1f %ciB" : "%.1f %ciB", power == 0 ? res / 1024 : res, prefixes.at(power)); + + return unit; +} + +std::string renderSizeWithoutUnit(int64_t value, SizeUnit unit, bool align) +{ + // bytes should also displayed as KiB => 100 Bytes => 0.1 KiB + auto power = std::max>(1, std::to_underlying(unit)); + double denominator = std::pow(conversionNumber, power); + double result = (double) value / denominator; + return fmt(align ? "%6.1f" : "%.1f", result); +} + +char getSizeUnitSuffix(SizeUnit unit) +{ + switch (unit) { +#define NIX_UTIL_DEFINE_SIZE_UNIT(name, suffix) \ + case SizeUnit::name: \ + return suffix; + NIX_UTIL_SIZE_UNITS +#undef NIX_UTIL_DEFINE_SIZE_UNIT + } + + assert(false); +} + +std::string renderSize(int64_t value, bool align) +{ + SizeUnit unit = getSizeUnit(value); + return fmt("%s %ciB", renderSizeWithoutUnit(value, unit, align), getSizeUnitSuffix(unit)); } bool hasPrefix(std::string_view s, std::string_view prefix) @@ -256,9 +302,4 @@ std::pair getLine(std::string_view s) } } -std::string showBytes(uint64_t bytes) -{ - return fmt("%.2f MiB", bytes / (1024.0 * 1024.0)); -} - } // namespace nix diff --git a/src/libutil/windows/include/nix/util/signals-impl.hh b/src/libutil/windows/include/nix/util/signals-impl.hh index d1c79cab92e..af5a5336e76 100644 --- a/src/libutil/windows/include/nix/util/signals-impl.hh +++ b/src/libutil/windows/include/nix/util/signals-impl.hh @@ -17,11 +17,6 @@ static inline bool getInterrupted() return false; } -inline void setInterruptThrown() -{ - /* Do nothing for now */ -} - static inline bool isInterrupted() { /* Do nothing for now */ diff --git a/src/libutil/windows/users.cc b/src/libutil/windows/users.cc index 6cc753cec8e..eb92e7ab6ae 100644 --- a/src/libutil/windows/users.cc +++ b/src/libutil/windows/users.cc @@ -35,12 +35,12 @@ std::string getUserName() return name; } -Path getHome() +std::filesystem::path getHome() { - static Path homeDir = []() { - Path homeDir = getEnv("USERPROFILE").value_or("C:\\Users\\Default"); + static std::filesystem::path homeDir = []() { + std::filesystem::path homeDir = getEnv("USERPROFILE").value_or("C:\\Users\\Default"); assert(!homeDir.empty()); - return canonPath(homeDir); + return canonPath(homeDir.string()); }(); return homeDir; } diff --git a/src/nix/app.cc b/src/nix/app.cc index 9d3ea6b352e..07c7c55cfdb 100644 --- a/src/nix/app.cc +++ b/src/nix/app.cc @@ -145,9 +145,9 @@ App UnresolvedApp::resolve(ref evalStore, ref store) auto res = unresolved; auto builtContext = build(evalStore, store); - res.program = resolveString(*store, unresolved.program, builtContext); - if (!store->isInStore(res.program)) - throw Error("app program '%s' is not in the Nix store", res.program); + res.program = resolveString(*store, unresolved.program.string(), builtContext); + if (!store->isInStore(res.program.string())) + throw Error("app program '%s' is not in the Nix store", res.program.string()); return res; } diff --git a/src/nix/asan-options.cc b/src/nix/asan-options.cc deleted file mode 100644 index 256f34cbed1..00000000000 --- a/src/nix/asan-options.cc +++ /dev/null @@ -1,6 +0,0 @@ -extern "C" [[gnu::retain]] const char * __asan_default_options() -{ - // We leak a bunch of memory knowingly on purpose. It's not worthwhile to - // diagnose that memory being leaked for now. - return "abort_on_error=1:print_summary=1:detect_leaks=0"; -} diff --git a/src/nix/build-remote/build-remote.cc b/src/nix/build-remote/build-remote.cc index ffb77ddf1c8..f62712d30ea 100644 --- a/src/nix/build-remote/build-remote.cc +++ b/src/nix/build-remote/build-remote.cc @@ -322,7 +322,7 @@ static int main_build_remote(int argc, char ** argv) // output ids, which break CA derivations if (!drv.inputDrvs.map.empty()) drv.inputSrcs = store->parseStorePathSet(inputs); - optResult = sshStore->buildDerivation(*drvPath, (const BasicDerivation &) drv); + optResult = sshStore->buildDerivation(*drvPath, static_cast(drv)); auto & result = *optResult; if (auto * failureP = result.tryGetFailure()) { if (settings.keepFailed) { diff --git a/src/nix/cat.cc b/src/nix/cat.cc index 145336723f1..dcf47f1fa2b 100644 --- a/src/nix/cat.cc +++ b/src/nix/cat.cc @@ -1,6 +1,11 @@ #include "nix/cmd/command.hh" #include "nix/store/store-api.hh" -#include "nix/store/nar-accessor.hh" +#include "nix/util/archive.hh" +#include "nix/util/nar-accessor.hh" +#include "nix/util/serialise.hh" +#include "nix/util/source-accessor.hh" + +#include using namespace nix; @@ -41,10 +46,7 @@ struct CmdCatStore : StoreCommand, MixCat void run(ref store) override { auto [storePath, rest] = store->toStorePath(path); - auto accessor = store->getFSAccessor(storePath); - if (!accessor) - throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); - cat(ref{std::move(accessor)}, CanonPath{rest}); + cat(store->requireStoreObjectAccessor(storePath), CanonPath{rest}); } }; @@ -74,7 +76,44 @@ struct CmdCatNar : StoreCommand, MixCat void run(ref store) override { - cat(makeNarAccessor(readFile(narPath)), CanonPath{path}); + AutoCloseFD fd = toDescriptor(open(narPath.c_str(), O_RDONLY)); + if (!fd) + throw SysError("opening NAR file '%s'", narPath); + auto source = FdSource{fd.get()}; + + struct CatRegularFileSink : NullFileSystemObjectSink + { + CanonPath neededPath = CanonPath::root; + bool found = false; + + void createRegularFile(const CanonPath & path, std::function crf) override + { + struct : CreateRegularFileSink, FdSink + { + void isExecutable() override {} + } crfSink; + + crfSink.fd = INVALID_DESCRIPTOR; + + if (path == neededPath) { + logger->stop(); + crfSink.skipContents = false; + crfSink.fd = getStandardOutput(); + found = true; + } else { + crfSink.skipContents = true; + } + + crf(crfSink); + } + } sink; + + sink.neededPath = CanonPath(path); + /* NOTE: We still parse the whole file to validate that it's a correct NAR. */ + parseDump(sink, source); + + if (!sink.found) + throw Error("NAR does not contain regular file '%1%'", path); } }; diff --git a/src/nix/derivation-add.cc b/src/nix/derivation-add.cc index 2d13aba52c9..bbaa8759715 100644 --- a/src/nix/derivation-add.cc +++ b/src/nix/derivation-add.cc @@ -33,12 +33,10 @@ struct CmdAddDerivation : MixDryRun, StoreCommand { auto json = nlohmann::json::parse(drainFD(STDIN_FILENO)); - auto drv = Derivation::fromJSON(json); + auto drv = Derivation::parseJsonAndValidate(*store, json); auto drvPath = writeDerivation(*store, drv, NoRepair, /* read only */ dryRun); - drv.checkInvariants(*store, drvPath); - writeDerivation(*store, drv, NoRepair, dryRun); logger->cout("%s", store->printStorePath(drvPath)); diff --git a/src/nix/derivation-add.md b/src/nix/derivation-add.md index 35507d9ad46..69a821d4ee0 100644 --- a/src/nix/derivation-add.md +++ b/src/nix/derivation-add.md @@ -12,8 +12,7 @@ a Nix expression evaluates. [store derivation]: @docroot@/glossary.md#gloss-store-derivation -`nix derivation add` takes a single derivation in the following format: - -{{#include ../../protocols/json/derivation.md}} +`nix derivation add` takes a single derivation in the JSON format. +See [the manual](@docroot@/protocols/json/derivation/index.md) for a documentation of this format. )"" diff --git a/src/nix/derivation-show.cc b/src/nix/derivation-show.cc index 20e54bba76b..ce2594ddcab 100644 --- a/src/nix/derivation-show.cc +++ b/src/nix/derivation-show.cc @@ -58,9 +58,13 @@ struct CmdShowDerivation : InstallablesCommand, MixPrintJSON if (!drvPath.isDerivation()) continue; - jsonRoot[drvPath.to_string()] = store->readDerivation(drvPath).toJSON(); + jsonRoot[drvPath.to_string()] = store->readDerivation(drvPath); } - printJSON(jsonRoot); + printJSON( + nlohmann::json{ + {"version", expectedJsonVersionDerivation}, + {"derivations", std::move(jsonRoot)}, + }); } }; diff --git a/src/nix/derivation-show.md b/src/nix/derivation-show.md index 9fff58ef97a..6948b5ba72e 100644 --- a/src/nix/derivation-show.md +++ b/src/nix/derivation-show.md @@ -48,10 +48,9 @@ By default, this command only shows top-level derivations, but with [store derivation]: @docroot@/glossary.md#gloss-store-derivation -`nix derivation show` outputs a JSON map of [store path]s to derivations in the following format: +`nix derivation show` outputs a JSON map of [store path]s to derivations in JSON format. +See [the manual](@docroot@/protocols/json/derivation/index.md) for a documentation of this format. [store path]: @docroot@/store/store-path.md -{{#include ../../protocols/json/derivation.md}} - )"" diff --git a/src/nix/develop.cc b/src/nix/develop.cc index e914d5f6cfb..b5f8db26d17 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -257,10 +257,15 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore drv.args = {store->printStorePath(getEnvShPath)}; /* Remove derivation checks. */ - drv.env.erase("allowedReferences"); - drv.env.erase("allowedRequisites"); - drv.env.erase("disallowedReferences"); - drv.env.erase("disallowedRequisites"); + if (drv.structuredAttrs) { + drv.structuredAttrs->structuredAttrs.erase("outputChecks"); + } else { + drv.env.erase("allowedReferences"); + drv.env.erase("allowedRequisites"); + drv.env.erase("disallowedReferences"); + drv.env.erase("disallowedRequisites"); + } + drv.env.erase("name"); /* Rehash and write the derivation. FIXME: would be nice to use @@ -268,26 +273,24 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore drv.name += "-env"; drv.env.emplace("name", drv.name); drv.inputSrcs.insert(std::move(getEnvShPath)); - if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { - for (auto & output : drv.outputs) { - output.second = DerivationOutput::Deferred{}, drv.env[output.first] = hashPlaceholder(output.first); - } - } else { - for (auto & output : drv.outputs) { - output.second = DerivationOutput::Deferred{}; - drv.env[output.first] = ""; - } - auto hashesModulo = hashDerivationModulo(*evalStore, drv, true); - - for (auto & output : drv.outputs) { - Hash h = hashesModulo.hashes.at(output.first); - auto outPath = store->makeOutputPath(output.first, h, drv.name); - output.second = DerivationOutput::InputAddressed{ - .path = outPath, - }; - drv.env[output.first] = store->printStorePath(outPath); - } + for (auto & [outputName, output] : drv.outputs) { + std::visit( + overloaded{ + [&](const DerivationOutput::InputAddressed &) { + output = DerivationOutput::Deferred{}; + drv.env[outputName] = ""; + }, + [&](const DerivationOutput::CAFixed &) { + output = DerivationOutput::Deferred{}; + drv.env[outputName] = ""; + }, + [&](const auto &) { + // Do nothing for other types (CAFloating, Deferred, Impure) + }, + }, + output.raw); } + drv.fillInOutputPaths(*evalStore); auto shellDrvPath = writeDerivation(*evalStore, drv); @@ -304,10 +307,9 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore // path, so return the first non-empty output path. for (auto & [_0, optPath] : evalStore->queryPartialDerivationOutputMap(shellDrvPath)) { assert(optPath); - auto & outPath = *optPath; - auto st = store->getFSAccessor()->lstat(CanonPath(outPath.to_string())); - if (st.fileSize.value_or(0)) - return outPath; + auto accessor = evalStore->requireStoreObjectAccessor(*optPath); + if (auto st = accessor->maybeLstat(CanonPath::root); st && st->fileSize.value_or(0)) + return *optPath; } throw Error("get-env.sh failed to produce an environment"); @@ -504,7 +506,10 @@ struct Common : InstallableCommand, MixProfile debug("reading environment file '%s'", store->printStorePath(shellOutPath)); - return {BuildEnvironment::parseJSON(store->getFSAccessor()->readFile(shellOutPath.to_string())), shellOutPath}; + return { + BuildEnvironment::parseJSON(store->requireStoreObjectAccessor(shellOutPath)->readFile(CanonPath::root)), + shellOutPath, + }; } }; diff --git a/src/nix/diff-closures.cc b/src/nix/diff-closures.cc index 0ce83628d33..a71efa042e8 100644 --- a/src/nix/diff-closures.cc +++ b/src/nix/diff-closures.cc @@ -112,8 +112,7 @@ void printClosureDiff( items.push_back(fmt("%s added", showVersions(added))); } if (showDelta) - items.push_back( - fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0)); + items.push_back(fmt("%s%s" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, renderSize(sizeDelta))); logger->cout("%s%s: %s", indent, name, concatStringsSep(", ", items)); } } diff --git a/src/nix/dump-path.cc b/src/nix/dump-path.cc index f375b0ac8e4..62fd8987761 100644 --- a/src/nix/dump-path.cc +++ b/src/nix/dump-path.cc @@ -1,13 +1,14 @@ #include "nix/cmd/command.hh" #include "nix/store/store-api.hh" #include "nix/util/archive.hh" +#include "nix/util/terminal.hh" using namespace nix; static FdSink getNarSink() { auto fd = getStandardOutput(); - if (isatty(fd)) + if (isTTY(fd)) throw UsageError("refusing to write NAR to a terminal"); return FdSink(std::move(fd)); } diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 33c091a3511..2f1ba63956f 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -85,9 +85,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption if (pathExists(*writeTo)) throw Error("path '%s' already exists", writeTo->string()); - std::function recurse; - - recurse = [&](Value & v, const PosIdx pos, const std::filesystem::path & path) { + [&](this const auto & recurse, Value & v, const PosIdx pos, const std::filesystem::path & path) -> void { state->forceValue(v, pos); if (v.type() == nString) // FIXME: disallow strings with contexts? @@ -111,9 +109,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption } else state->error("value at '%s' is not a string or an attribute set", state->positions[pos]) .debugThrow(); - }; - - recurse(*v, pos, *writeTo); + }(*v, pos, *writeTo); } else if (raw) { diff --git a/src/nix/flake-prefetch-inputs.cc b/src/nix/flake-prefetch-inputs.cc index 6c7f1e8c842..19fbb0b574b 100644 --- a/src/nix/flake-prefetch-inputs.cc +++ b/src/nix/flake-prefetch-inputs.cc @@ -38,8 +38,7 @@ struct CmdFlakePrefetchInputs : FlakeCommand std::atomic nrFailed{0}; - std::function visit; - visit = [&](const Node & node) { + auto visit = [&](this const auto & visit, const Node & node) { if (!state_.lock()->done.insert(&node).second) return; @@ -48,7 +47,7 @@ struct CmdFlakePrefetchInputs : FlakeCommand return; try { Activity act(*logger, lvlInfo, actUnknown, fmt("fetching '%s'", lockedNode->lockedRef)); - auto accessor = lockedNode->lockedRef.input.getAccessor(fetchSettings, store).first; + auto accessor = lockedNode->lockedRef.input.getAccessor(fetchSettings, *store).first; if (!evalSettings.lazyTrees) fetchToStore( fetchSettings, *store, accessor, FetchMode::Copy, lockedNode->lockedRef.input.getName()); diff --git a/src/nix/flake.cc b/src/nix/flake.cc index be4e4e11506..dc7e82d98a0 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -251,7 +251,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON if (storePath) j["path"] = store->printStorePath(*storePath); j["locks"] = lockedFlake.lockFile.toJSON().first; - if (auto fingerprint = lockedFlake.getFingerprint(store, fetchSettings)) + if (auto fingerprint = lockedFlake.getFingerprint(*store, fetchSettings)) j["fingerprint"] = fingerprint->to_string(HashFormat::Base16, false); printJSON(j); } else { @@ -272,18 +272,16 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON logger->cout( ANSI_BOLD "Last modified:" ANSI_NORMAL " %s", std::put_time(std::localtime(&*lastModified), "%F %T")); - if (auto fingerprint = lockedFlake.getFingerprint(store, fetchSettings)) + if (auto fingerprint = lockedFlake.getFingerprint(*store, fetchSettings)) logger->cout( ANSI_BOLD "Fingerprint:" ANSI_NORMAL " %s", fingerprint->to_string(HashFormat::Base16, false)); if (!lockedFlake.lockFile.root->inputs.empty()) logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL); - std::set> visited; + std::set> visited{lockedFlake.lockFile.root}; - std::function recurse; - - recurse = [&](const Node & node, const std::string & prefix) { + [&](this const auto & recurse, const Node & node, const std::string & prefix) -> void { for (const auto & [i, input] : enumerate(node.inputs)) { bool last = i + 1 == node.inputs.size(); @@ -310,10 +308,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON printInputAttrPath(*follows)); } } - }; - - visited.insert(lockedFlake.lockFile.root); - recurse(*lockedFlake.lockFile.root, ""); + }(*lockedFlake.lockFile.root, ""); } } }; @@ -379,7 +374,7 @@ struct CmdFlakeCheck : FlakeCommand throw; } catch (Error & e) { if (settings.keepGoing) { - ignoreExceptionExceptInterrupt(); + logError(e.info()); hasErrors = true; } else throw; @@ -388,7 +383,7 @@ struct CmdFlakeCheck : FlakeCommand Sync> drvPaths_; Sync> omittedSystems; - Sync>> derivedPathToAttrPaths_; + Sync>> derivedPathToAttrPaths_; // FIXME: rewrite to use EvalCache. @@ -487,7 +482,7 @@ struct CmdFlakeCheck : FlakeCommand if (!v.isLambda()) { throw Error("overlay is not a function, but %s instead", showType(v)); } - if (v.lambda().fun->hasFormals() || !argHasName(v.lambda().fun->arg, "final")) + if (v.lambda().fun->getFormals() || !argHasName(v.lambda().fun->arg, "final")) throw Error("overlay does not take an argument named 'final'"); // FIXME: if we have a 'nixpkgs' input, use it to // evaluate the overlay. @@ -843,7 +838,7 @@ struct CmdFlakeCheck : FlakeCommand for (auto & attrPath : attrPaths) notice( "✅ " ANSI_BOLD "%s" ANSI_NORMAL ANSI_ITALIC ANSI_FAINT " (previously built)" ANSI_NORMAL, - eval_cache::toAttrPathStr(*state, attrPath)); + attrPath.to_string(*state)); // FIXME: should start building while evaluating. Activity act(*logger, lvlInfo, actUnknown, fmt("running %d flake checks", toBuild.size())); @@ -854,15 +849,13 @@ struct CmdFlakeCheck : FlakeCommand for (auto & buildResult : buildResults) if (buildResult.tryGetSuccess()) for (auto & attrPath : (*derivedPathToAttrPaths)[buildResult.path]) - notice("✅ " ANSI_BOLD "%s" ANSI_NORMAL, eval_cache::toAttrPathStr(*state, attrPath)); + notice("✅ " ANSI_BOLD "%s" ANSI_NORMAL, attrPath.to_string(*state)); // Then cancelled builds. for (auto & buildResult : buildResults) if (buildResult.isCancelled()) for (auto & attrPath : (*derivedPathToAttrPaths)[buildResult.path]) - notice( - "❓ " ANSI_BOLD "%s" ANSI_NORMAL ANSI_FAINT " (cancelled)", - eval_cache::toAttrPathStr(*state, attrPath)); + notice("❓ " ANSI_BOLD "%s" ANSI_NORMAL ANSI_FAINT " (cancelled)", attrPath.to_string(*state)); // Then failures. for (auto & buildResult : buildResults) @@ -870,7 +863,7 @@ struct CmdFlakeCheck : FlakeCommand try { hasErrors = true; for (auto & attrPath : (*derivedPathToAttrPaths)[buildResult.path]) - printError("❌ " ANSI_RED "%s" ANSI_NORMAL, eval_cache::toAttrPathStr(*state, attrPath)); + printError("❌ " ANSI_RED "%s" ANSI_NORMAL, attrPath.to_string(*state)); failure->rethrow(); } catch (Error & e) { logError(e.info()); @@ -948,8 +941,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand std::vector changedFiles; std::vector conflictedFiles; - std::function copyDir; - copyDir = [&](const SourcePath & from, const std::filesystem::path & to) { + [&](this const auto & copyDir, const SourcePath & from, const std::filesystem::path & to) -> void { createDirs(to); for (auto & [name, entry] : from.readDirectory()) { @@ -999,9 +991,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand changedFiles.push_back(to2); notice("wrote: %s", to2); } - }; - - copyDir(templateDir, flakeDir); + }(templateDir, flakeDir); if (!changedFiles.empty() && std::filesystem::exists(std::filesystem::path{flakeDir} / ".git")) { Strings args = {"-C", flakeDir, "add", "--intent-to-add", "--force", "--"}; @@ -1092,7 +1082,7 @@ struct CmdFlakeClone : FlakeCommand if (destDir.empty()) throw Error("missing flag '--dest'"); - getFlakeRef().resolve(fetchSettings, store).input.clone(fetchSettings, store, destDir); + getFlakeRef().resolve(fetchSettings, *store).input.clone(fetchSettings, *store, destDir); } }; @@ -1131,7 +1121,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun, MixNoCheckSigs StorePathSet sources; auto storePath = dryRun ? flake.flake.lockedRef.input.computeStorePath(*store) - : std::get(flake.flake.lockedRef.input.fetchToStore(fetchSettings, store)); + : std::get(flake.flake.lockedRef.input.fetchToStore(fetchSettings, *store)); sources.insert(storePath); @@ -1143,10 +1133,9 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun, MixNoCheckSigs if (auto inputNode = std::get_if<0>(&input)) { std::optional storePath; if (!(*inputNode)->lockedRef.input.isRelative()) { - storePath = - dryRun - ? (*inputNode)->lockedRef.input.computeStorePath(*store) - : std::get((*inputNode)->lockedRef.input.fetchToStore(fetchSettings, store)); + storePath = dryRun ? (*inputNode)->lockedRef.input.computeStorePath(*store) + : std::get( + (*inputNode)->lockedRef.input.fetchToStore(fetchSettings, *store)); sources.insert(*storePath); } if (json) { @@ -1215,7 +1204,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON evalSettings.enableImportFromDerivation.setDefault(false); auto state = getEvalState(); - auto flake = std::make_shared(lockFlake()); + auto flake = make_ref(lockFlake()); auto localSystem = std::string(settings.thisSystem.get()); auto cache = openEvalCache(*state, flake); @@ -1228,9 +1217,9 @@ struct CmdFlakeShow : FlakeCommand, MixJSON visit = [&](eval_cache::AttrCursor & visitor, nlohmann::json & j) { auto attrPath = visitor.getAttrPath(); - auto attrPathS = state->symbols.resolve(attrPath); + auto attrPathS = attrPath.resolve(*state); - Activity act(*logger, lvlInfo, actUnknown, fmt("evaluating '%s'", concatStringsSep(".", attrPathS))); + Activity act(*logger, lvlInfo, actUnknown, fmt("evaluating '%s'", attrPath.to_string(*state))); try { auto recurse = [&]() { @@ -1265,7 +1254,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON auto omit = [&](std::string_view flag) { if (json) - logger->warn(fmt("%s omitted (use '%s' to show)", concatStringsSep(".", attrPathS), flag)); + logger->warn(fmt("%s omitted (use '%s' to show)", attrPath.to_string(*state), flag)); else { j.emplace("type", "omitted"); j.emplace("message", fmt(ANSI_WARNING "omitted" ANSI_NORMAL " (use '%s' to show)", flag)); @@ -1295,11 +1284,13 @@ struct CmdFlakeShow : FlakeCommand, MixJSON try { if (visitor.isDerivation()) showDerivation(); - else - throw Error("expected a derivation"); + else { + auto name = visitor.getAttrPathStr(state->s.name); + logger->warn(fmt("%s is not a derivation", name)); + } } catch (IFDError & e) { - logger->warn(fmt( - "%s omitted due to use of import from derivation", concatStringsSep(".", attrPathS))); + logger->warn( + fmt("%s omitted due to use of import from derivation", attrPath.to_string(*state))); } } } @@ -1312,7 +1303,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON recurse(); } catch (IFDError & e) { logger->warn( - fmt("%s omitted due to use of import from derivation", concatStringsSep(".", attrPathS))); + fmt("%s omitted due to use of import from derivation", attrPath.to_string(*state))); } } @@ -1331,8 +1322,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON // FIXME: handle recurseIntoAttrs recurse(); } catch (IFDError & e) { - logger->warn(fmt( - "%s omitted due to use of import from derivation", concatStringsSep(".", attrPathS))); + logger->warn( + fmt("%s omitted due to use of import from derivation", attrPath.to_string(*state))); } } } @@ -1484,8 +1475,8 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON void run(ref store) override { auto originalRef = getFlakeRef(); - auto resolvedRef = originalRef.resolve(fetchSettings, store); - auto [accessor, lockedRef] = resolvedRef.lazyFetch(getEvalState()->fetchSettings, store); + auto resolvedRef = originalRef.resolve(fetchSettings, *store); + auto [accessor, lockedRef] = resolvedRef.lazyFetch(getEvalState()->fetchSettings, *store); auto storePath = fetchToStore(getEvalState()->fetchSettings, *store, accessor, FetchMode::Copy, lockedRef.input.getName()); auto hash = store->queryPathInfo(storePath)->narHash; diff --git a/src/nix/flake.md b/src/nix/flake.md index 950b9fa4f74..290e4694eb5 100644 --- a/src/nix/flake.md +++ b/src/nix/flake.md @@ -187,7 +187,7 @@ Currently the `type` attribute can be one of the following: * `nixpkgs/nixos-unstable/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293` * `sub/dir` (if a flake named `sub` is in the registry) -* `path`: arbitrary local directories. The required attribute `path` +* `path`: arbitrary local directories. The required attribute `path` specifies the path of the flake. The URL form is ``` diff --git a/src/nix/formatter.cc b/src/nix/formatter.cc index f5eb966d609..2c0b5c62b39 100644 --- a/src/nix/formatter.cc +++ b/src/nix/formatter.cc @@ -84,7 +84,7 @@ struct CmdFormatterRun : MixFormatter, MixJSON assert(maybeFlakeDir.has_value()); auto flakeDir = maybeFlakeDir.value(); - Strings programArgs{app.program}; + Strings programArgs{app.program.string()}; // Propagate arguments from the CLI for (auto & i : args) { @@ -103,7 +103,7 @@ struct CmdFormatterRun : MixFormatter, MixJSON execProgramInStore( store, UseLookupPath::DontUse, - app.program, + app.program.string(), programArgs, std::nullopt, // Use default system env); diff --git a/src/nix/hash-convert.md b/src/nix/hash-convert.md index dfb2154436e..dcebda74a3e 100644 --- a/src/nix/hash-convert.md +++ b/src/nix/hash-convert.md @@ -27,7 +27,7 @@ R""( ```console # nix hash convert --hash-algo sha256 --from nix32 ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= - error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' + error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' has format 'base64', but '--from nix32' was specified # nix hash convert --hash-algo sha256 --from nix32 1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= diff --git a/src/nix/hash.cc b/src/nix/hash.cc index d3c9ccb66a7..2945c672c2c 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -248,11 +248,13 @@ struct CmdHashConvert : Command void run() override { for (const auto & s : hashStrings) { - Hash h = from == HashFormat::SRI ? Hash::parseSRI(s) : Hash::parseAny(s, algo); - if (from && from != HashFormat::SRI - && h.to_string(*from, false) != (from == HashFormat::Base16 ? toLower(s) : s)) { - auto from_as_string = printHashFormat(*from); - throw BadHash("input hash '%s' does not have the expected format for '--from %s'", s, from_as_string); + auto [h, parsedFormat] = Hash::parseAnyReturningFormat(s, algo); + if (from && *from != parsedFormat) { + throw BadHash( + "input hash '%s' has format '%s', but '--from %s' was specified", + s, + printHashFormat(parsedFormat), + printHashFormat(*from)); } logger->cout(h.to_string(to, to == HashFormat::SRI)); } diff --git a/src/nix/ls.cc b/src/nix/ls.cc index 0565299e2f5..012850cc05d 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -1,6 +1,6 @@ #include "nix/cmd/command.hh" #include "nix/store/store-api.hh" -#include "nix/store/nar-accessor.hh" +#include "nix/util/nar-accessor.hh" #include "nix/main/common-args.hh" #include @@ -79,7 +79,12 @@ struct MixLs : virtual Args, MixJSON, MixLongListing if (json) { if (showDirectory) throw UsageError("'--directory' is useless with '--json'"); - logger->cout("%s", listNar(accessor, path, recursive)); + nlohmann::json j; + if (recursive) + j = listNarDeep(*accessor, path); + else + j = listNarShallow(*accessor, path); + logger->cout("%s", j.dump()); } else listText(accessor, std::move(path)); } @@ -109,10 +114,7 @@ struct CmdLsStore : StoreCommand, MixLs void run(ref store) override { auto [storePath, rest] = store->toStorePath(path); - auto accessor = store->getFSAccessor(storePath); - if (!accessor) - throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); - list(ref{std::move(accessor)}, CanonPath{rest}); + list(store->requireStoreObjectAccessor(storePath), CanonPath{rest}); } }; @@ -142,7 +144,11 @@ struct CmdLsNar : Command, MixLs void run() override { - list(makeNarAccessor(readFile(narPath)), CanonPath{path}); + AutoCloseFD fd = toDescriptor(open(narPath.c_str(), O_RDONLY)); + if (!fd) + throw SysError("opening NAR file '%s'", narPath); + auto source = FdSource{fd.get()}; + list(makeLazyNarAccessor(source, seekableGetNarBytes(fd.get())), CanonPath{path}); } }; diff --git a/src/nix/main.cc b/src/nix/main.cc index 3621b738fb0..47ef6ea20a1 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -204,20 +204,38 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs std::string dumpCli() { - auto res = nlohmann::json::object(); + using nlohmann::json; + + auto res = json::object(); res["args"] = toJSON(); - auto stores = nlohmann::json::object(); - for (auto & [storeName, implem] : Implementations::registered()) { - auto & j = stores[storeName]; - j["doc"] = implem.doc; - j["uri-schemes"] = implem.uriSchemes; - j["settings"] = implem.getConfig()->toJSON(); - j["experimentalFeature"] = implem.experimentalFeature; + { + auto & stores = res["stores"] = json::object(); + for (auto & [storeName, implem] : Implementations::registered()) { + auto & j = stores[storeName]; + j["doc"] = implem.doc; + j["uri-schemes"] = implem.uriSchemes; + j["settings"] = implem.getConfig()->toJSON(); + j["experimentalFeature"] = implem.experimentalFeature; + } } - res["stores"] = std::move(stores); - res["fetchers"] = fetchers::dumpRegisterInputSchemeInfo(); + + { + auto & fetchers = res["fetchers"] = json::object(); + + for (const auto & [schemeName, scheme] : fetchers::getAllInputSchemes()) { + auto & s = fetchers[schemeName] = json::object(); + s["description"] = scheme->schemeDescription(); + auto & attrs = s["allowedAttrs"] = json::object(); + for (auto & [fieldName, field] : scheme->allowedAttrs()) { + auto & f = attrs[fieldName] = json::object(); + f["type"] = field.type; + f["required"] = field.required; + f["doc"] = stripIndentation(field.doc); + } + } + }; return res.dump(); } @@ -264,7 +282,7 @@ static void showHelp(std::vector subcommand, NixArgs & toplevel) ); auto vDump = state.allocValue(); - vDump->mkString(toplevel.dumpCli()); + vDump->mkString(toplevel.dumpCli(), state.mem); auto vRes = state.allocValue(); Value * args[]{&state.getBuiltin("false"), vDump}; @@ -450,7 +468,7 @@ void mainWrapped(int argc, char ** argv) if (!primOp->doc) continue; b["args"] = primOp->args; - b["doc"] = trim(stripIndentation(primOp->doc)); + b["doc"] = trim(stripIndentation(*primOp->doc)); if (primOp->experimentalFeature) b["experimental-feature"] = primOp->experimentalFeature; builtinsJson.emplace(state.symbols[builtin.name], std::move(b)); diff --git a/src/nix/meson.build b/src/nix/meson.build index 3fdc1fcb2c0..77e3c05dc5e 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -56,13 +56,11 @@ config_priv_h = configure_file( ) subdir('nix-meson-build-support/common') -subdir('nix-meson-build-support/asan-options') subdir('nix-meson-build-support/generate-header') nix_sources = [ config_priv_h ] + files( 'add-to-store.cc', 'app.cc', - 'asan-options.cc', 'build.cc', 'bundle.cc', 'cat.cc', @@ -234,6 +232,7 @@ foreach linkname : nix_symlinks env : {'MSYS' : 'winsymlinks:lnk'}, # TODO(Ericson2314): Don't do this once we have the `meson.override_find_program` working) build_by_default : true, + depends : this_exe, ) # TODO(Ericson3214): Doesn't yet work #meson.override_find_program(linkname, t) @@ -255,6 +254,7 @@ custom_target( env : {'MSYS' : 'winsymlinks:lnk'}, # TODO(Ericson2314): Don't do this once we have the `meson.override_find_program` working) build_by_default : true, + depends : this_exe, ) # TODO(Ericson3214): Doesn't yet work #meson.override_find_program(linkname, t) diff --git a/src/nix/nar.md b/src/nix/nar.md index b0f70ce93a3..c29c2092ab6 100644 --- a/src/nix/nar.md +++ b/src/nix/nar.md @@ -8,7 +8,7 @@ R""( # File format For the definition of the Nix Archive file format, see -[within the protocols chapter](@docroot@/protocols/nix-archive.md) +[within the protocols chapter](@docroot@/protocols/nix-archive/index.md) of the manual. [Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive diff --git a/src/nix/nario.cc b/src/nix/nario.cc index c655711354c..df9ae16340b 100644 --- a/src/nix/nario.cc +++ b/src/nix/nario.cc @@ -275,7 +275,8 @@ struct CmdNarioList : Command, MixJSON, MixLongListing source.skip(info.narSize); if (json) { - auto obj = info.toJSON(*this, true, HashFormat::SRI); + // FIXME: make the JSON format configurable. + auto obj = info.toJSON(this, true, PathInfoJsonFormat::V1); if (contents) obj.emplace("contents", *contents); json->emplace(printStorePath(info.path), std::move(obj)); @@ -305,7 +306,7 @@ struct CmdNarioList : Command, MixJSON, MixLongListing } void queryRealisationUncached( - const DrvOutput &, Callback> callback) noexcept override + const DrvOutput &, Callback> callback) noexcept override { callback(nullptr); } diff --git a/src/nix/nix-build/nix-build.cc b/src/nix/nix-build/nix-build.cc index 8a39bf63239..217382ef8ee 100644 --- a/src/nix/nix-build/nix-build.cc +++ b/src/nix/nix-build/nix-build.cc @@ -410,17 +410,18 @@ static void main_nix_build(int argc, char ** argv) Value vRoot; state->eval(e, vRoot); - std::function takesNixShellAttr; - takesNixShellAttr = [&](const Value & v) { + auto takesNixShellAttr = [&](const Value & v) { if (!isNixShell) { return false; } bool add = false; - if (v.type() == nFunction && v.lambda().fun->hasFormals()) { - for (auto & i : v.lambda().fun->formals->formals) { - if (state->symbols[i.name] == "inNixShell") { - add = true; - break; + if (v.type() == nFunction) { + if (auto formals = v.lambda().fun->getFormals()) { + for (auto & i : formals->formals) { + if (state->symbols[i.name] == "inNixShell") { + add = true; + break; + } } } } @@ -493,10 +494,9 @@ static void main_nix_build(int argc, char ** argv) } } - std::function, const DerivedPathMap::ChildNode &)> accumDerivedPath; - - accumDerivedPath = [&](ref inputDrv, - const DerivedPathMap::ChildNode & inputNode) { + auto accumDerivedPath = [&](this auto & self, + ref inputDrv, + const DerivedPathMap::ChildNode & inputNode) -> void { if (!inputNode.value.empty()) pathsToBuild.push_back( DerivedPath::Built{ @@ -504,8 +504,7 @@ static void main_nix_build(int argc, char ** argv) .outputs = OutputsSpec::Names{inputNode.value}, }); for (const auto & [outputName, childNode] : inputNode.childMap) - accumDerivedPath( - make_ref(SingleDerivedPath::Built{inputDrv, outputName}), childNode); + self(make_ref(SingleDerivedPath::Built{inputDrv, outputName}), childNode); }; // Build or fetch all dependencies of the derivation. @@ -558,9 +557,9 @@ static void main_nix_build(int argc, char ** argv) env["NIX_STORE"] = store->storeDir; env["NIX_BUILD_CORES"] = fmt("%d", settings.buildCores ? settings.buildCores : settings.getDefaultCores()); - DerivationOptions drvOptions; + DerivationOptions drvOptions; try { - drvOptions = DerivationOptions::fromStructuredAttrs(drv.env, drv.structuredAttrs); + drvOptions = derivationOptionsFromStructuredAttrs(*store, drv.env, get(drv.structuredAttrs)); } catch (Error & e) { e.addTrace({}, "while parsing derivation '%s'", store->printStorePath(packageInfo.requireDrvPath())); throw; @@ -603,7 +602,7 @@ static void main_nix_build(int argc, char ** argv) structuredAttrsRC = StructuredAttrs::writeShell(json); auto attrsJSON = (tmpDir.path() / ".attrs.json").string(); - writeFile(attrsJSON, json.dump()); + writeFile(attrsJSON, static_cast(std::move(json)).dump()); auto attrsSH = (tmpDir.path() / ".attrs.sh").string(); writeFile(attrsSH, structuredAttrsRC); diff --git a/src/nix/nix-channel/nix-channel.cc b/src/nix/nix-channel/nix-channel.cc index 354c44cbc01..00723ba2b09 100644 --- a/src/nix/nix-channel/nix-channel.cc +++ b/src/nix/nix-channel/nix-channel.cc @@ -121,38 +121,34 @@ static void update(const StringSet & channelNames) // We want to download the url to a file to see if it's a tarball while also checking if we // got redirected in the process, so that we can grab the various parts of a nix channel // definition from a consistent location if the redirect changes mid-download. - auto result = fetchers::downloadFile(store, fetchSettings, url, std::string(baseNameOf(url))); - auto filename = store->toRealPath(result.storePath); + auto result = fetchers::downloadFile(*store, fetchSettings, url, std::string(baseNameOf(url))); url = result.effectiveUrl; bool unpacked = false; - if (std::regex_search(filename, std::regex("\\.tar\\.(gz|bz2|xz)$"))) { + if (std::regex_search(std::string{result.storePath.to_string()}, std::regex("\\.tar\\.(gz|bz2|xz)$"))) { runProgram( getNixBin("nix-build").string(), false, {"--no-out-link", "--expr", "import " + unpackChannelPath + "{ name = \"" + cname + "\"; channelName = \"" + name - + "\"; src = builtins.storePath \"" + filename + "\"; }"}); + + "\"; src = builtins.storePath \"" + store->printStorePath(result.storePath) + "\"; }"}); unpacked = true; } if (!unpacked) { // Download the channel tarball. try { - filename = store->toRealPath( - fetchers::downloadFile(store, fetchSettings, url + "/nixexprs.tar.xz", "nixexprs.tar.xz") - .storePath); + result = fetchers::downloadFile(*store, fetchSettings, url + "/nixexprs.tar.xz", "nixexprs.tar.xz"); } catch (FileTransferError & e) { - filename = store->toRealPath( - fetchers::downloadFile(store, fetchSettings, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2") - .storePath); + result = + fetchers::downloadFile(*store, fetchSettings, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2"); } } // Regardless of where it came from, add the expression representing this channel to accumulated expression exprs.push_back( "f: f { name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" - + filename + "\"; " + extraAttrs + " }"); + + store->printStorePath(result.storePath) + "\"; " + extraAttrs + " }"); } } diff --git a/src/nix/nix-collect-garbage/nix-collect-garbage.cc b/src/nix/nix-collect-garbage/nix-collect-garbage.cc index 4d6e60bf31d..29ca17a5de2 100644 --- a/src/nix/nix-collect-garbage/nix-collect-garbage.cc +++ b/src/nix/nix-collect-garbage/nix-collect-garbage.cc @@ -91,7 +91,7 @@ static int main_nix_collect_garbage(int argc, char ** argv) std::set dirsToClean = { profilesDir(), std::filesystem::path{settings.nixStateDir} / "profiles", - std::filesystem::path{getDefaultProfile()}.parent_path(), + getDefaultProfile().parent_path(), }; for (auto & dir : dirsToClean) removeOldGenerations(dir); diff --git a/src/nix/nix-env/nix-env.cc b/src/nix/nix-env/nix-env.cc index ee2458b10aa..f3e6d2acd81 100644 --- a/src/nix/nix-env/nix-env.cc +++ b/src/nix/nix-env/nix-env.cc @@ -133,7 +133,7 @@ static void getAllExprs(EvalState & state, const SourcePath & path, StringSet & } /* Load the expression on demand. */ auto vArg = state.allocValue(); - vArg->mkPath(path2); + vArg->mkPath(path2, state.mem); if (seen.size() == maxAttrs) throw Error("too many Nix expressions in directory '%1%'", path); attrs.alloc(attrName).mkApp(&state.getBuiltin("import"), vArg); @@ -484,7 +484,7 @@ static bool keep(PackageInfo & drv) static void setMetaFlag(EvalState & state, PackageInfo & drv, const std::string & name, const std::string & value) { auto v = state.allocValue(); - v->mkString(value); + v->mkString(value, state.mem); drv.setMeta(name, v); } @@ -764,7 +764,7 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) globals.state->store->buildPaths(paths, globals.state->repair ? bmRepair : bmNormal); debug("switching to new user environment"); - Path generation = createGeneration(*store2, globals.profile, drv.queryOutPath()); + auto generation = createGeneration(*store2, globals.profile, drv.queryOutPath()); switchLink(globals.profile, generation); } @@ -1199,7 +1199,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) else { if (v->type() == nString) { attrs2["type"] = "string"; - attrs2["value"] = v->c_str(); + attrs2["value"] = v->string_view(); xml.writeEmptyElement("meta", attrs2); } else if (v->type() == nInt) { attrs2["type"] = "int"; @@ -1220,7 +1220,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) if (elem->type() != nString) continue; XMLAttrs attrs3; - attrs3["value"] = elem->c_str(); + attrs3["value"] = elem->string_view(); xml.writeEmptyElement("string", attrs3); } } else if (v->type() == nAttrs) { @@ -1231,7 +1231,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) continue; XMLAttrs attrs3; attrs3["type"] = globals.state->symbols[i.name]; - attrs3["value"] = i.value->c_str(); + attrs3["value"] = i.value->string_view(); xml.writeEmptyElement("string", attrs3); } } @@ -1263,7 +1263,7 @@ static void opSwitchProfile(Globals & globals, Strings opFlags, Strings opArgs) throw UsageError("exactly one argument expected"); Path profile = absPath(opArgs.front()); - Path profileLink = settings.useXDGBaseDirectories ? createNixStateDir() + "/profile" : getHome() + "/.nix-profile"; + auto profileLink = settings.useXDGBaseDirectories ? createNixStateDir() / "profile" : getHome() / ".nix-profile"; switchLink(profileLink, profile); } @@ -1378,14 +1378,15 @@ static int main_nix_env(int argc, char ** argv) globals.instSource.type = srcUnknown; globals.instSource.systemFilter = "*"; - Path nixExprPath = getNixDefExpr(); + std::filesystem::path nixExprPath = getNixDefExpr(); if (!pathExists(nixExprPath)) { try { createDirs(nixExprPath); - replaceSymlink(defaultChannelsDir(), nixExprPath + "/channels"); + replaceSymlink(defaultChannelsDir(), nixExprPath / "channels"); if (!isRootUser()) - replaceSymlink(rootChannelsDir(), nixExprPath + "/channels_root"); + replaceSymlink(rootChannelsDir(), nixExprPath / "channels_root"); + } catch (std::filesystem::filesystem_error &) { } catch (Error &) { } } @@ -1482,7 +1483,8 @@ static int main_nix_env(int argc, char ** argv) globals.state->repair = myArgs.repair; globals.instSource.nixExprPath = std::make_shared( - file != "" ? lookupFileArg(*globals.state, file) : globals.state->rootPath(CanonPath(nixExprPath))); + file != "" ? lookupFileArg(*globals.state, file) + : globals.state->rootPath(CanonPath(nixExprPath.string()))); globals.instSource.autoArgs = myArgs.getAutoArgs(*globals.state); @@ -1490,7 +1492,7 @@ static int main_nix_env(int argc, char ** argv) globals.profile = getEnv("NIX_PROFILE").value_or(""); if (globals.profile == "") - globals.profile = getDefaultProfile(); + globals.profile = getDefaultProfile().string(); op(globals, std::move(opFlags), std::move(opArgs)); diff --git a/src/nix/nix-env/user-env.cc b/src/nix/nix-env/user-env.cc index 43b386d37d5..ac36bf97011 100644 --- a/src/nix/nix-env/user-env.cc +++ b/src/nix/nix-env/user-env.cc @@ -9,6 +9,7 @@ #include "nix/expr/eval-inline.hh" #include "nix/store/profiles.hh" #include "nix/expr/print-ambiguous.hh" +#include "nix/expr/static-string-data.hh" #include #include @@ -58,21 +59,21 @@ bool createUserEnv( auto attrs = state.buildBindings(7 + outputs.size()); - attrs.alloc(state.s.type).mkStringNoCopy("derivation"); - attrs.alloc(state.s.name).mkString(i.queryName()); + attrs.alloc(state.s.type).mkStringNoCopy("derivation"_sds); + attrs.alloc(state.s.name).mkString(i.queryName(), state.mem); auto system = i.querySystem(); if (!system.empty()) - attrs.alloc(state.s.system).mkString(system); - attrs.alloc(state.s.outPath).mkString(state.store->printStorePath(i.queryOutPath())); + attrs.alloc(state.s.system).mkString(system, state.mem); + attrs.alloc(state.s.outPath).mkString(state.store->printStorePath(i.queryOutPath()), state.mem); if (drvPath) - attrs.alloc(state.s.drvPath).mkString(state.store->printStorePath(*drvPath)); + attrs.alloc(state.s.drvPath).mkString(state.store->printStorePath(*drvPath), state.mem); // Copy each output meant for installation. auto outputsList = state.buildList(outputs.size()); for (const auto & [m, j] : enumerate(outputs)) { - (outputsList[m] = state.allocValue())->mkString(j.first); + (outputsList[m] = state.allocValue())->mkString(j.first, state.mem); auto outputAttrs = state.buildBindings(2); - outputAttrs.alloc(state.s.outPath).mkString(state.store->printStorePath(*j.second)); + outputAttrs.alloc(state.s.outPath).mkString(state.store->printStorePath(*j.second), state.mem); attrs.alloc(j.first).mkAttrs(outputAttrs); /* This is only necessary when installing store paths, e.g., @@ -163,14 +164,14 @@ bool createUserEnv( PathLocks lock; lockProfile(lock, profile); - Path lockTokenCur = optimisticLockProfile(profile); + std::filesystem::path lockTokenCur = optimisticLockProfile(profile); if (lockToken != lockTokenCur) { printInfo("profile '%1%' changed while we were busy; restarting", profile); return false; } debug("switching to new user environment"); - Path generation = createGeneration(*store2, profile, topLevelOut); + std::filesystem::path generation = createGeneration(*store2, profile, topLevelOut); switchLink(profile, generation); } diff --git a/src/nix/nix-store/nix-store.cc b/src/nix/nix-store/nix-store.cc index 2f632510c23..74697ade110 100644 --- a/src/nix/nix-store/nix-store.cc +++ b/src/nix/nix-store/nix-store.cc @@ -603,7 +603,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) #endif if (!hashGiven) { HashResult hash = hashPath( - {ref{store->getFSAccessor(info->path, false)}}, + {store->requireStoreObjectAccessor(info->path, /*requireValidPath=*/false)}, FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); info->narHash = hash.hash; @@ -1057,7 +1057,10 @@ static void opServe(Strings opFlags, Strings opArgs) auto deriver = readString(in); ValidPathInfo info{ store->parseStorePath(path), - Hash::parseAny(readString(in), HashAlgorithm::SHA256), + { + *store, + Hash::parseAny(readString(in), HashAlgorithm::SHA256), + }, }; if (deriver != "") info.deriver = store->parseStorePath(deriver); diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index fef3ae1207c..6bffe2424e4 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -34,14 +34,21 @@ static uint64_t getStoreObjectsTotalSize(Store & store, const StorePathSet & clo * * @param showClosureSize If true, the closure size of each path is * included. + * @param format The JSON format version to use. */ -static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool showClosureSize) +static json +pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool showClosureSize, PathInfoJsonFormat format) { json::object_t jsonAllObjects = json::object(); + auto makeKey = [&](const StorePath & path) { + return format == PathInfoJsonFormat::V1 ? store.printStorePath(path) : std::string(path.to_string()); + }; + for (auto & storePath : storePaths) { json jsonObject; - auto printedStorePath = store.printStorePath(storePath); + + std::string key = makeKey(storePath); try { auto info = store.queryPathInfo(storePath); @@ -49,9 +56,13 @@ static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool // `storePath` has the representation `-x` rather than // `-` in case of binary-cache stores & `--all` because we don't // know the name yet until we've read the NAR info. - printedStorePath = store.printStorePath(info->path); + key = makeKey(info->path); + + jsonObject = info->toJSON(format == PathInfoJsonFormat::V1 ? &store : nullptr, true, format); - jsonObject = info->toJSON(store, true, HashFormat::SRI); + /* Hack in the store dir for now. TODO update the data type + instead. */ + jsonObject["storeDir"] = store.storeDir; if (showClosureSize) { StorePathSet closure; @@ -74,14 +85,22 @@ static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool jsonObject["closureDownloadSize"] = totalDownloadSize; } } - } catch (InvalidPath &) { jsonObject = nullptr; } - jsonAllObjects[printedStorePath] = std::move(jsonObject); + jsonAllObjects[key] = std::move(jsonObject); + } + + if (format == PathInfoJsonFormat::V1) { + return jsonAllObjects; + } else { + return { + {"version", format}, + {"storeDir", store.storeDir}, + {"info", std::move(jsonAllObjects)}, + }; } - return jsonAllObjects; } struct CmdPathInfo : StorePathsCommand, MixJSON @@ -90,6 +109,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON bool showClosureSize = false; bool humanReadable = false; bool showSigs = false; + std::optional jsonFormat; CmdPathInfo() { @@ -119,6 +139,16 @@ struct CmdPathInfo : StorePathsCommand, MixJSON .description = "Show signatures.", .handler = {&showSigs, true}, }); + + addFlag({ + .longName = "json-format", + .description = + "JSON format version to use (1 or 2). Version 1 uses string hashes and full store paths. Version 2 uses structured hashes and store path base names. This flag will be required in a future release.", + .labels = {"version"}, + .handler = {[this](std::string s) { + jsonFormat = parsePathInfoJsonFormat(string2IntWithUnitPrefix(s)); + }}, + }); } std::string description() override @@ -141,7 +171,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON void printSize(std::ostream & str, uint64_t value) { if (humanReadable) - str << fmt("\t%s", renderSize(value, true)); + str << fmt("\t%s", renderSize((int64_t) value, true)); else str << fmt("\t%11d", value); } @@ -157,7 +187,14 @@ struct CmdPathInfo : StorePathsCommand, MixJSON *store, // FIXME: preserve order? StorePathSet(storePaths.begin(), storePaths.end()), - showClosureSize)); + showClosureSize, + jsonFormat + .or_else([&]() { + warn( + "'--json' without '--json-format' is deprecated; please specify '--json-format 1' or '--json-format 2'. This will become an error in a future release."); + return std::optional{PathInfoJsonFormat::V1}; + }) + .value())); } else { diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 18abfa0aae2..d494b098686 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -13,6 +13,8 @@ #include "nix/cmd/misc-store-flags.hh" #include "nix/util/terminal.hh" #include "nix/util/environment-variables.hh" +#include "nix/util/url.hh" +#include "nix/store/path.hh" #include "man-pages.hh" @@ -56,8 +58,8 @@ std::string resolveMirrorUrl(EvalState & state, const std::string & url) std::tuple prefetchFile( ref store, - std::string_view url, - std::optional name, + const VerbatimURL & url, + std::optional maybeName, HashAlgorithm hashAlgo, std::optional expectedHash, bool unpack, @@ -66,11 +68,22 @@ std::tuple prefetchFile( ContentAddressMethod method = unpack || executable ? ContentAddressMethod::Raw::NixArchive : ContentAddressMethod::Raw::Flat; - /* Figure out a name in the Nix store. */ - if (!name) { - name = baseNameOf(url); - if (name->empty()) - throw Error("cannot figure out file name for '%s'", url); + std::string name = maybeName + .or_else([&]() { + /* Figure out a name in the Nix store. */ + auto derivedFromUrl = url.lastPathSegment(); + if (!derivedFromUrl || derivedFromUrl->empty()) + throw Error("cannot figure out file name for '%s'", url.to_string()); + return derivedFromUrl; + }) + .value(); + + try { + checkName(name); + } catch (BadStorePathName & e) { + if (!maybeName) + e.addTrace({}, "file name '%s' was extracted from URL '%s'", name, url.to_string()); + throw; } std::optional storePath; @@ -81,7 +94,7 @@ std::tuple prefetchFile( if (expectedHash) { hashAlgo = expectedHash->algo; storePath = - store->makeFixedOutputPathFromCA(*name, ContentAddressWithReferences::fromParts(method, *expectedHash, {})); + store->makeFixedOutputPathFromCA(name, ContentAddressWithReferences::fromParts(method, *expectedHash, {})); if (store->isValidPath(*storePath)) hash = expectedHash; else @@ -105,14 +118,14 @@ std::tuple prefetchFile( FdSink sink(fd.get()); - FileTransferRequest req(VerbatimURL{url}); + FileTransferRequest req(url); req.decompress = false; getFileTransfer()->download(std::move(req), sink); } /* Optionally unpack the file. */ if (unpack) { - Activity act(*logger, lvlChatty, actUnknown, fmt("unpacking '%s'", url)); + Activity act(*logger, lvlChatty, actUnknown, fmt("unpacking '%s'", url.to_string())); auto unpacked = (tmpDir.path() / "unpacked").string(); createDirs(unpacked); unpackTarfile(tmpFile.string(), unpacked); @@ -128,10 +141,9 @@ std::tuple prefetchFile( } } - Activity act(*logger, lvlChatty, actUnknown, fmt("adding '%s' to the store", url)); + Activity act(*logger, lvlChatty, actUnknown, fmt("adding '%s' to the store", url.to_string())); - auto info = store->addToStoreSlow( - *name, PosixSourceAccessor::createAtRoot(tmpFile), method, hashAlgo, {}, expectedHash); + auto info = store->addToStoreSlow(name, makeFSSourceAccessor(tmpFile), method, hashAlgo, {}, expectedHash); storePath = info.path; assert(info.ca); hash = info.ca->hash; diff --git a/src/nix/profile-diff-closures.md b/src/nix/profile-diff-closures.md index 295d1252bcd..1cfed160f31 100644 --- a/src/nix/profile-diff-closures.md +++ b/src/nix/profile-diff-closures.md @@ -23,6 +23,6 @@ R""( This command shows the difference between the closures of subsequent versions of a profile. See [`nix store -diff-closures`](nix3-store-diff-closures.md) for details. +diff-closures`](./nix3-store-diff-closures.md) for details. )"" diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 42a8f9dcfec..ba17b6a6212 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -247,13 +247,13 @@ struct ProfileManifest } } - buildProfile(tempDir, std::move(pkgs)); + buildProfile(tempDir.string(), std::move(pkgs)); - writeFile(tempDir + "/manifest.json", toJSON(*store).dump()); + writeFile(tempDir / "manifest.json", toJSON(*store).dump()); /* Add the symlink tree to the store. */ StringSink sink; - dumpPath(tempDir, sink); + dumpPath(tempDir.string(), sink); auto narHash = hashString(HashAlgorithm::SHA256, sink.s); @@ -854,7 +854,10 @@ struct CmdProfileDiffClosures : virtual StoreCommand, MixDefaultProfile first = false; logger->cout("Version %d -> %d:", prevGen->number, gen.number); printClosureDiff( - store, store->followLinksToStorePath(prevGen->path), store->followLinksToStorePath(gen.path), " "); + store, + store->followLinksToStorePath(prevGen->path.string()), + store->followLinksToStorePath(gen.path.string()), + " "); } prevGen = gen; diff --git a/src/nix/registry.cc b/src/nix/registry.cc index 38e20283ee5..6d913adcd6c 100644 --- a/src/nix/registry.cc +++ b/src/nix/registry.cc @@ -43,7 +43,7 @@ class RegistryCommand : virtual Args Path getRegistryPath() { if (registry_path.empty()) { - return fetchers::getUserRegistryPath(); + return fetchers::getUserRegistryPath().string(); } else { return registry_path; } @@ -68,7 +68,7 @@ struct CmdRegistryList : StoreCommand { using namespace fetchers; - auto registries = getRegistries(fetchSettings, store); + auto registries = getRegistries(fetchSettings, *store); for (auto & registry : registries) { for (auto & entry : registry->entries) { @@ -189,14 +189,14 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand auto registry = getRegistry(); auto ref = parseFlakeRef(fetchSettings, url); auto lockedRef = parseFlakeRef(fetchSettings, locked); - registry->remove(ref.input); - auto resolvedInput = lockedRef.resolve(fetchSettings, store).input; - auto resolved = resolvedInput.getAccessor(fetchSettings, store).second; + auto resolvedInput = lockedRef.resolve(fetchSettings, *store).input; + auto resolved = resolvedInput.getAccessor(fetchSettings, *store).second; if (!resolved.isLocked(fetchSettings)) warn("flake '%s' is not locked", resolved.to_string()); fetchers::Attrs extraAttrs; if (ref.subdir != "") extraAttrs["dir"] = ref.subdir; + registry->remove(ref.input); registry->add(ref.input, resolved, extraAttrs); registry->write(getRegistryPath()); } @@ -230,7 +230,7 @@ struct CmdRegistryResolve : StoreCommand { for (auto & url : urls) { auto ref = parseFlakeRef(fetchSettings, url); - auto resolved = ref.resolve(fetchSettings, store); + auto resolved = ref.resolve(fetchSettings, *store); logger->cout("%s", resolved.to_string()); } } diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 5dd53e9328b..19f02e759c5 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -11,7 +11,7 @@ namespace nix { -void runNix(Path program, const Strings & args, const std::optional & input = {}) +void runNix(const std::string & program, const Strings & args, const std::optional & input = {}) { auto subprocessEnv = getEnv(); subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue(); diff --git a/src/nix/run.cc b/src/nix/run.cc index 368a5ed5701..324b736a6a5 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -160,7 +160,7 @@ struct CmdRun : InstallableValueCommand, MixEnvironment lockFlags.applyNixConfig = true; auto app = installable->toApp(*state).resolve(getEvalStore(), store); - Strings allArgs{app.program}; + Strings allArgs{app.program.string()}; for (auto & i : args) allArgs.push_back(i); @@ -170,7 +170,7 @@ struct CmdRun : InstallableValueCommand, MixEnvironment setEnviron(); - execProgramInStore(store, UseLookupPath::DontUse, app.program, allArgs); + execProgramInStore(store, UseLookupPath::DontUse, app.program.string(), allArgs); } }; diff --git a/src/nix/run.md b/src/nix/run.md index eb96e6b319e..17998001016 100644 --- a/src/nix/run.md +++ b/src/nix/run.md @@ -33,6 +33,16 @@ R""( # nix run nixpkgs#vim -- --help ``` +* Run the default app from the current directory with arguments: + + ```console + # nix run . -- arg1 arg2 + ``` + + Note: The first positional argument is always treated as the *installable*, + even after `--`. To pass arguments to the default installable, specify it + explicitly: `nix run . -- arg1 arg2` or `nix run -- . arg1 arg2`. + # Description `nix run` builds and runs [*installable*](./nix.md#installables), which must evaluate to an diff --git a/src/nix/search.cc b/src/nix/search.cc index 3323db01057..729a505f534 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -93,15 +93,14 @@ struct CmdSearch : InstallableValueCommand, MixJSON FutureVector futures(*state->executor); - std::function & attrPath, bool initialRecurse)> - visit; + std::function visit; - visit = [&](eval_cache::AttrCursor & cursor, const std::vector & attrPath, bool initialRecurse) { - auto attrPathS = state->symbols.resolve(attrPath); + visit = [&](eval_cache::AttrCursor & cursor, const AttrPath & attrPath, bool initialRecurse) { + auto attrPathS = state->symbols.resolve({attrPath}); + auto attrPathStr = attrPath.to_string(*state); /* - Activity act(*logger, lvlInfo, actUnknown, - fmt("evaluating '%s'", concatStringsSep(".", attrPathS))); + Activity act(*logger, lvlInfo, actUnknown, fmt("evaluating '%s'", attrPathStr)); */ try { auto recurse = [&]() { @@ -124,7 +123,6 @@ struct CmdSearch : InstallableValueCommand, MixJSON auto aDescription = aMeta ? aMeta->maybeGetAttr(state->s.description) : nullptr; auto description = aDescription ? aDescription->getString() : ""; std::replace(description.begin(), description.end(), '\n', ' '); - auto attrPath2 = concatStringsSep(".", attrPathS); std::vector attrPathMatches; std::vector descriptionMatches; @@ -132,7 +130,7 @@ struct CmdSearch : InstallableValueCommand, MixJSON bool found = false; for (auto & regex : excludeRegexes) { - if (std::regex_search(attrPath2, regex) || std::regex_search(name.name, regex) + if (std::regex_search(attrPathStr, regex) || std::regex_search(name.name, regex) || std::regex_search(description, regex)) return; } @@ -147,7 +145,7 @@ struct CmdSearch : InstallableValueCommand, MixJSON } }; - addAll(std::sregex_iterator(attrPath2.begin(), attrPath2.end(), regex), attrPathMatches); + addAll(std::sregex_iterator(attrPathStr.begin(), attrPathStr.end(), regex), attrPathMatches); addAll(std::sregex_iterator(name.name.begin(), name.name.end(), regex), nameMatches); addAll(std::sregex_iterator(description.begin(), description.end(), regex), descriptionMatches); @@ -158,7 +156,7 @@ struct CmdSearch : InstallableValueCommand, MixJSON if (found) { results++; if (json) { - (*jsonOut->lock())[attrPath2] = { + (*jsonOut->lock())[attrPathStr] = { {"pname", name.name}, {"version", name.version}, {"description", description}, @@ -167,8 +165,8 @@ struct CmdSearch : InstallableValueCommand, MixJSON auto out = fmt("%s* %s%s", results > 1 ? "\n" : "", - wrap("\e[0;1m", hiliteMatches(attrPath2, attrPathMatches, ANSI_GREEN, "\e[0;1m")), - name.version != "" ? " (" + name.version + ")" : ""); + wrap("\e[0;1m", hiliteMatches(attrPathStr, attrPathMatches, ANSI_GREEN, "\e[0;1m")), + optionalBracket(" (", name.version, ")")); if (description != "") out += fmt( "\n %s", hiliteMatches(description, descriptionMatches, ANSI_GREEN, ANSI_NORMAL)); diff --git a/src/nix/unix/daemon.cc b/src/nix/unix/daemon.cc index cb105a385cc..406258ff821 100644 --- a/src/nix/unix/daemon.cc +++ b/src/nix/unix/daemon.cc @@ -87,7 +87,7 @@ struct AuthorizationSettings : Config {"*"}, "allowed-users", R"( - A list user names, separated by whitespace. + A list of user names, separated by whitespace. These users are allowed to connect to the Nix daemon. You can specify groups by prefixing names with `@`. @@ -174,22 +174,23 @@ static bool matchUser(std::string_view user, const struct group & gr) * * Otherwise: No. */ -static bool matchUser(const std::string & user, const std::string & group, const Strings & users) +static bool +matchUser(const std::optional & user, const std::optional & group, const Strings & users) { if (find(users.begin(), users.end(), "*") != users.end()) return true; - if (find(users.begin(), users.end(), user) != users.end()) + if (user && find(users.begin(), users.end(), *user) != users.end()) return true; for (auto & i : users) if (i.substr(0, 1) == "@") { - if (group == i.substr(1)) + if (group && *group == i.substr(1)) return true; struct group * gr = getgrnam(i.c_str() + 1); if (!gr) continue; - if (matchUser(user, *gr)) + if (user && matchUser(*user, *gr)) return true; } @@ -198,12 +199,9 @@ static bool matchUser(const std::string & user, const std::string & group, const struct PeerInfo { - bool pidKnown; - pid_t pid; - bool uidKnown; - uid_t uid; - bool gidKnown; - gid_t gid; + std::optional pid; + std::optional uid; + std::optional gid; }; /** @@ -211,7 +209,7 @@ struct PeerInfo */ static PeerInfo getPeerInfo(int remote) { - PeerInfo peer = {false, 0, false, 0, false, 0}; + PeerInfo peer; #if defined(SO_PEERCRED) @@ -221,9 +219,11 @@ static PeerInfo getPeerInfo(int remote) ucred cred; # endif socklen_t credLen = sizeof(cred); - if (getsockopt(remote, SOL_SOCKET, SO_PEERCRED, &cred, &credLen) == -1) - throw SysError("getting peer credentials"); - peer = {true, cred.pid, true, cred.uid, true, cred.gid}; + if (getsockopt(remote, SOL_SOCKET, SO_PEERCRED, &cred, &credLen) == 0) { + peer.pid = cred.pid; + peer.uid = cred.uid; + peer.gid = cred.gid; + } #elif defined(LOCAL_PEERCRED) @@ -233,9 +233,8 @@ static PeerInfo getPeerInfo(int remote) xucred cred; socklen_t credLen = sizeof(cred); - if (getsockopt(remote, SOL_LOCAL, LOCAL_PEERCRED, &cred, &credLen) == -1) - throw SysError("getting peer credentials"); - peer = {false, 0, true, cred.cr_uid, false, 0}; + if (getsockopt(remote, SOL_LOCAL, LOCAL_PEERCRED, &cred, &credLen) == 0) + peer.uid = cred.cr_uid; #endif @@ -266,15 +265,19 @@ static ref openUncachedStore() * * If the potential client is not allowed to talk to us, we throw an `Error`. */ -static std::pair authPeer(const PeerInfo & peer) +static std::pair> authPeer(const PeerInfo & peer) { TrustedFlag trusted = NotTrusted; - struct passwd * pw = peer.uidKnown ? getpwuid(peer.uid) : 0; - std::string user = pw ? pw->pw_name : std::to_string(peer.uid); + auto pw = peer.uid ? getpwuid(*peer.uid) : nullptr; + auto user = pw ? std::optional(pw->pw_name) + : peer.uid ? std::optional(std::to_string(*peer.uid)) + : std::nullopt; - struct group * gr = peer.gidKnown ? getgrgid(peer.gid) : 0; - std::string group = gr ? gr->gr_name : std::to_string(peer.gid); + auto gr = peer.gid ? getgrgid(*peer.gid) : 0; + auto group = gr ? std::optional(gr->gr_name) + : peer.gid ? std::optional(std::to_string(*peer.gid)) + : std::nullopt; const Strings & trustedUsers = authorizationSettings.trustedUsers; const Strings & allowedUsers = authorizationSettings.allowedUsers; @@ -283,7 +286,7 @@ static std::pair authPeer(const PeerInfo & peer) trusted = Trusted; if ((!trusted && !matchUser(user, group, allowedUsers)) || group == settings.buildUsersGroup) - throw Error("user '%1%' is not allowed to connect to the Nix daemon", user); + throw Error("user '%1%' is not allowed to connect to the Nix daemon", user.value_or("")); return {trusted, std::move(user)}; } @@ -360,23 +363,23 @@ static void daemonLoop(std::optional forceTrustClientOpt) unix::closeOnExec(remote.get()); - PeerInfo peer{.pidKnown = false}; + PeerInfo peer; TrustedFlag trusted; - std::string user; + std::optional userName; if (forceTrustClientOpt) trusted = *forceTrustClientOpt; else { peer = getPeerInfo(remote.get()); - auto [_trusted, _user] = authPeer(peer); + auto [_trusted, _userName] = authPeer(peer); trusted = _trusted; - user = _user; + userName = _userName; }; printInfo( (std::string) "accepted connection from pid %1%, user %2%" + (trusted ? " (trusted)" : ""), - peer.pidKnown ? std::to_string(peer.pid) : "", - peer.uidKnown ? user : ""); + peer.pid ? std::to_string(*peer.pid) : "", + userName.value_or("")); // Fork a child to handle the connection. ProcessOptions options; @@ -396,8 +399,8 @@ static void daemonLoop(std::optional forceTrustClientOpt) setSigChldAction(false); // For debugging, stuff the pid into argv[1]. - if (peer.pidKnown && savedArgv[1]) { - auto processName = std::to_string(peer.pid); + if (peer.pid && savedArgv[1]) { + auto processName = std::to_string(*peer.pid); strncpy(savedArgv[1], processName.c_str(), strlen(savedArgv[1])); } @@ -414,7 +417,7 @@ static void daemonLoop(std::optional forceTrustClientOpt) } catch (Error & error) { auto ei = error.info(); // FIXME: add to trace? - ei.msg = HintFmt("error processing connection: %1%", ei.msg.str()); + ei.msg = HintFmt("while processing connection: %1%", ei.msg.str()); logError(ei); } } diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index 473827a9344..29da9e953e8 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -1,5 +1,6 @@ #include "nix/cmd/command.hh" #include "nix/store/store-api.hh" +#include "nix/store/path-references.hh" #include "nix/util/source-accessor.hh" #include "nix/main/shared.hh" @@ -191,7 +192,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions /* Sort the references by distance to `dependency` to ensure that the shortest path is printed first. */ std::multimap refs; - StringSet hashes; + StorePathSet refPaths; for (auto & ref : node.refs) { if (ref == node.path && packagePath != dependencyPath) @@ -200,67 +201,59 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions if (node2.dist == inf) continue; refs.emplace(node2.dist, &node2); - hashes.insert(std::string(node2.path.hashPart())); + refPaths.insert(node2.path); } /* For each reference, find the files and symlinks that contain the reference. */ std::map hits; - auto accessor = store->getFSAccessor(node.path); + auto accessor = store->requireStoreObjectAccessor(node.path); - auto visitPath = [&](this auto && recur, const CanonPath & p) -> void { - auto st = accessor->maybeLstat(p); - assert(st); - - auto p2 = p.isRoot() ? p.abs() : p.rel(); - - auto getColour = [&](const std::string & hash) { - return hash == dependencyPathHash ? ANSI_GREEN : ANSI_BLUE; - }; - - if (st->type == SourceAccessor::Type::tDirectory) { - auto names = accessor->readDirectory(p); - for (auto & [name, type] : names) - recur(p / name); - } - - else if (st->type == SourceAccessor::Type::tRegular) { - auto contents = accessor->readFile(p); + auto getColour = [&](const std::string & hash) { + return hash == dependencyPathHash ? ANSI_GREEN : ANSI_BLUE; + }; - for (auto & hash : hashes) { - auto pos = contents.find(hash); - if (pos != std::string::npos) { - size_t margin = 32; - auto pos2 = pos >= margin ? pos - margin : 0; - hits[hash].emplace_back( - fmt("%s: …%s…", + if (precise) { + // Use scanForReferencesDeep to find files containing references + scanForReferencesDeep(*accessor, CanonPath::root, refPaths, [&](FileRefScanResult result) { + auto p2 = result.filePath.isRoot() ? result.filePath.abs() : result.filePath.rel(); + auto st = accessor->lstat(result.filePath); + + if (st.type == SourceAccessor::Type::tRegular) { + auto contents = accessor->readFile(result.filePath); + + // For each reference found in this file, extract context + for (auto & foundRef : result.foundRefs) { + std::string hash(foundRef.hashPart()); + auto pos = contents.find(hash); + if (pos != std::string::npos) { + size_t margin = 32; + auto pos2 = pos >= margin ? pos - margin : 0; + hits[hash].emplace_back(fmt( + "%s: …%s…", p2, hilite( filterPrintable(std::string(contents, pos2, pos - pos2 + hash.size() + margin)), pos - pos2, StorePath::HashLen, getColour(hash)))); + } + } + } else if (st.type == SourceAccessor::Type::tSymlink) { + auto target = accessor->readLink(result.filePath); + + // For each reference found in this symlink, show it + for (auto & foundRef : result.foundRefs) { + std::string hash(foundRef.hashPart()); + auto pos = target.find(hash); + if (pos != std::string::npos) + hits[hash].emplace_back( + fmt("%s -> %s", p2, hilite(target, pos, StorePath::HashLen, getColour(hash)))); } } - } - - else if (st->type == SourceAccessor::Type::tSymlink) { - auto target = accessor->readLink(p); - - for (auto & hash : hashes) { - auto pos = target.find(hash); - if (pos != std::string::npos) - hits[hash].emplace_back( - fmt("%s -> %s", p2, hilite(target, pos, StorePath::HashLen, getColour(hash)))); - } - } - }; - - // FIXME: should use scanForReferences(). - - if (precise) - visitPath(CanonPath::root); + }); + } for (auto & ref : refs) { std::string hash(ref.second->path.hashPart()); diff --git a/src/perl/package.nix b/src/perl/package.nix index 424e38d3070..b2a1f697583 100644 --- a/src/perl/package.nix +++ b/src/perl/package.nix @@ -45,11 +45,6 @@ perl.pkgs.toPerlModule ( buildInputs = [ nix-store - ] - ++ finalAttrs.passthru.externalBuildInputs; - - # Hack for sake of the dev shell - passthru.externalBuildInputs = [ bzip2 libsodium ]; @@ -79,5 +74,9 @@ perl.pkgs.toPerlModule ( ]; strictDeps = false; + + meta = { + platforms = lib.platforms.unix; + }; }) ) diff --git a/tests/functional/binary-cache.sh b/tests/functional/binary-cache.sh index 2c102df0771..445845bba2a 100755 --- a/tests/functional/binary-cache.sh +++ b/tests/functional/binary-cache.sh @@ -18,7 +18,7 @@ outPath=$(nix-build dependencies.nix --no-out-link) nix copy --to "file://$cacheDir" "$outPath" -readarray -t paths < <(nix path-info --all --json --store "file://$cacheDir" | jq 'keys|sort|.[]' -r) +readarray -t paths < <(nix path-info --all --json --json-format 2 --store "file://$cacheDir" | jq '.info|keys|sort|.[]' -r) [[ "${#paths[@]}" -eq 3 ]] for path in "${paths[@]}"; do [[ "$path" =~ -dependencies-input-0$ ]] \ @@ -111,7 +111,13 @@ clearStore mv "$cacheDir/nar" "$cacheDir/nar2" -nix-build --substituters "file://$cacheDir" --no-require-sigs dependencies.nix -o "$TEST_ROOT/result" +nix-build --substituters "file://$cacheDir" --no-require-sigs dependencies.nix -o "$TEST_ROOT/result" 2>&1 | tee "$TEST_ROOT/log" + +# Verify that missing NARs produce warnings, not errors +# The build should succeed despite the warnings +grepQuiet "does not exist in binary cache" "$TEST_ROOT/log" +# Ensure the message is not at error level by checking that the command succeeded +[ -e "$TEST_ROOT/result" ] mv "$cacheDir/nar2" "$cacheDir/nar" diff --git a/tests/functional/build.sh b/tests/functional/build.sh index 4b13eb7ebc0..51f2e2423f3 100755 --- a/tests/functional/build.sh +++ b/tests/functional/build.sh @@ -175,19 +175,23 @@ test "$(<<<"$out" grep -cE '^error:')" = 3 out="$(nix build -f fod-failing.nix -L x4 2>&1)" && status=0 || status=$? test "$status" = 1 -test "$(<<<"$out" grep -cE '^error:')" = 2 +# Precise number of errors depends on daemon version / goal refactorings +(( "$(<<<"$out" grep -cE '^error:')" >= 2 )) if isDaemonNewer "2.29pre"; then <<<"$out" grepQuiet -E "error: Cannot build '.*-x4\\.drv'" <<<"$out" grepQuiet -E "Reason: 1 dependency failed." + <<<"$out" grepQuiet -E "Build failed due to failed dependency" else <<<"$out" grepQuiet -E "error: 1 dependencies of derivation '.*-x4\\.drv' failed to build" fi -<<<"$out" grepQuiet -E "hash mismatch in fixed-output derivation '.*-x2\\.drv'" +# Either x2 or x3 could have failed, x4 depends on both symmetrically +<<<"$out" grepQuiet -E "hash mismatch in fixed-output derivation '.*-x[23]\\.drv'" out="$(nix build -f fod-failing.nix -L x4 --keep-going 2>&1)" && status=0 || status=$? test "$status" = 1 -test "$(<<<"$out" grep -cE '^error:')" = 3 +# Precise number of errors depends on daemon version / goal refactorings +(( "$(<<<"$out" grep -cE '^error:')" >= 3 )) if isDaemonNewer "2.29pre"; then <<<"$out" grepQuiet -E "error: Cannot build '.*-x4\\.drv'" <<<"$out" grepQuiet -E "Reason: 2 dependencies failed." diff --git a/tests/functional/ca/derivation-json.sh b/tests/functional/ca/derivation-json.sh index 2103707a2e8..eb1d949676a 100644 --- a/tests/functional/ca/derivation-json.sh +++ b/tests/functional/ca/derivation-json.sh @@ -6,7 +6,7 @@ export NIX_TESTS_CA_BY_DEFAULT=1 drvPath=$(nix-instantiate ../simple.nix) -nix derivation show "$drvPath" | jq .[] > "$TEST_HOME"/simple.json +nix derivation show "$drvPath" | jq '.derivations[]' > "$TEST_HOME"/simple.json drvPath2=$(nix derivation add < "$TEST_HOME"/simple.json) @@ -27,5 +27,5 @@ drvPath4=$(nix derivation add < "$TEST_HOME"/foo.json) [[ -e "$drvPath3" ]] # The modified derivation read back as JSON matches -nix derivation show "$drvPath3" | jq .[] > "$TEST_HOME"/foo-read.json +nix derivation show "$drvPath3" | jq '.derivations[]' > "$TEST_HOME"/foo-read.json diff "$TEST_HOME"/foo.json "$TEST_HOME"/foo-read.json diff --git a/tests/functional/ca/issue-13247.sh b/tests/functional/ca/issue-13247.sh index 686d90cede6..70591951329 100755 --- a/tests/functional/ca/issue-13247.sh +++ b/tests/functional/ca/issue-13247.sh @@ -65,7 +65,4 @@ buildViaSubstitute use-a-prime-more-outputs^first # Should only fetch the output we asked for [[ -d "$(jq -r <"$TEST_ROOT"/a.json '.[0].outputs.out')" ]] [[ -f "$(jq -r <"$TEST_ROOT"/a.json '.[2].outputs.first')" ]] - -# Output should *not* be here, this is the bug -[[ -e "$(jq -r <"$TEST_ROOT"/a.json '.[2].outputs.second')" ]] -skipTest "bug is not yet fixed" +[[ ! -e "$(jq -r <"$TEST_ROOT"/a.json '.[2].outputs.second')" ]] diff --git a/tests/functional/characterisation/framework.sh b/tests/functional/characterisation/framework.sh index d2c2155db80..6490d4ff5dd 100644 --- a/tests/functional/characterisation/framework.sh +++ b/tests/functional/characterisation/framework.sh @@ -71,7 +71,7 @@ function characterisationTestExit() { echo >&2 '' echo >&2 'You can rerun this test with:' echo >&2 '' - echo >&2 " _NIX_TEST_ACCEPT=1 meson test ${TEST_NAME}" + echo >&2 " _NIX_TEST_ACCEPT=1 meson test --suite ${TEST_SUITE_NAME} ${TEST_NAME}" echo >&2 '' echo >&2 'to regenerate the files containing the expected output,' echo >&2 'and then view the git diff to decide whether a change is' diff --git a/tests/functional/check-refs.sh b/tests/functional/check-refs.sh index 590c3fb536f..16bcac1582e 100755 --- a/tests/functional/check-refs.sh +++ b/tests/functional/check-refs.sh @@ -64,5 +64,13 @@ fi if isDaemonNewer "2.28pre20241225"; then # test12 should fail (syntactically invalid). expectStderr 1 nix-build -vvv -o "$RESULT" check-refs.nix -A test12 >"$TEST_ROOT/test12.stderr" - grepQuiet -F "output check for 'lib' contains an illegal reference specifier 'dev', expected store path or output name (one of [lib, out])" < "$TEST_ROOT/test12.stderr" + if isDaemonNewer "2.33pre20251110"; then + grepQuiet -F \ + "output check for 'lib' contains output name 'dev', but this is not a valid output of this derivation. (Valid outputs are [lib, out].)" \ + < "$TEST_ROOT/test12.stderr" + else + grepQuiet -F \ + "output check for 'lib' contains an illegal reference specifier 'dev', expected store path or output name (one of [lib, out])" \ + < "$TEST_ROOT/test12.stderr" + fi fi diff --git a/tests/functional/common/test-root.sh b/tests/functional/common/test-root.sh index b50a062672a..db17132f44d 100644 --- a/tests/functional/common/test-root.sh +++ b/tests/functional/common/test-root.sh @@ -1,4 +1,5 @@ # shellcheck shell=bash -TEST_ROOT=$(realpath "${TMPDIR:-/tmp}/nix-test")/${TEST_NAME:-default/tests\/functional//} +TEST_SUBDIR="${TEST_SUITE_NAME:-default}/${TEST_NAME:-tests/functional/}" +TEST_ROOT=$(realpath "${TMPDIR:-/tmp}/nix-test")/"$TEST_SUBDIR" export TEST_ROOT diff --git a/tests/functional/common/vars.sh b/tests/functional/common/vars.sh index ed4b477278f..d4d917dae8d 100644 --- a/tests/functional/common/vars.sh +++ b/tests/functional/common/vars.sh @@ -49,6 +49,9 @@ if ! isTestOnNixOS; then fi export _NIX_IN_TEST=$TEST_ROOT/shared export _NIX_TEST_NO_LSOF=1 + # Suppress warnings that depend on the test environment (e.g., ulimit warnings) + # to avoid non-deterministic test failures in golden tests + export _NIX_TEST_NO_ENVIRONMENT_WARNINGS=1 export NIX_REMOTE=${NIX_REMOTE_-} fi # ! isTestOnNixOS diff --git a/tests/functional/derivation-json.sh b/tests/functional/derivation-json.sh index 06f934cfe0a..d2518b6960e 100755 --- a/tests/functional/derivation-json.sh +++ b/tests/functional/derivation-json.sh @@ -4,11 +4,20 @@ source common.sh drvPath=$(nix-instantiate simple.nix) -nix derivation show "$drvPath" | jq .[] > "$TEST_HOME"/simple.json - -drvPath2=$(nix derivation add < "$TEST_HOME"/simple.json) +nix derivation show "$drvPath" | jq '.derivations[]' > "$TEST_HOME/simple.json" +# Round tripping to JSON works +drvPath2=$(nix derivation add < "$TEST_HOME/simple.json") [[ "$drvPath" = "$drvPath2" ]] +# Derivation is input addressed, all outputs have a path +jq -e '.outputs | .[] | has("path")' < "$TEST_HOME/simple.json" + # Input addressed derivations cannot be renamed. -jq '.name = "foo"' < "$TEST_HOME"/simple.json | expectStderr 1 nix derivation add | grepQuiet "has incorrect output" +jq '.name = "foo"' < "$TEST_HOME/simple.json" | expectStderr 1 nix derivation add | grepQuiet "has incorrect output" + +# If we remove the input addressed to make it a deferred derivation, we +# still get the same result because Nix will see that need not be +# deferred and fill in the right input address for us. +drvPath3=$(jq '.outputs |= map_values(del(.path))' < "$TEST_HOME/simple.json" | nix derivation add) +[[ "$drvPath" = "$drvPath3" ]] diff --git a/tests/functional/derivation/advanced-attributes-structured-attrs.nix b/tests/functional/derivation/advanced-attributes-structured-attrs.nix index 46f619272ec..b11041303b4 100644 --- a/tests/functional/derivation/advanced-attributes-structured-attrs.nix +++ b/tests/functional/derivation/advanced-attributes-structured-attrs.nix @@ -66,10 +66,16 @@ derivation' { outputChecks = { out = { allowedReferences = [ foo ]; - allowedRequisites = [ foo.dev ]; + allowedRequisites = [ + foo.dev + "bin" + ]; }; bin = { - disallowedReferences = [ bar ]; + disallowedReferences = [ + bar + "dev" + ]; disallowedRequisites = [ bar.dev ]; }; dev = { diff --git a/tests/functional/derivation/advanced-attributes.nix b/tests/functional/derivation/advanced-attributes.nix index dd0c09e22d2..19a80f15dc7 100644 --- a/tests/functional/derivation/advanced-attributes.nix +++ b/tests/functional/derivation/advanced-attributes.nix @@ -58,8 +58,14 @@ derivation' { impureEnvVars = [ "UNICORN" ]; __darwinAllowLocalNetworking = true; allowedReferences = [ foo ]; - allowedRequisites = [ foo.dev ]; - disallowedReferences = [ bar ]; + allowedRequisites = [ + foo.dev + "bin" + ]; + disallowedReferences = [ + bar + "dev" + ]; disallowedRequisites = [ bar.dev ]; requiredSystemFeatures = [ "rainbow" diff --git a/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv b/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv index cd02c2f8688..eeaba88e68b 100644 --- a/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv +++ b/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv @@ -1 +1 @@ -Derive([("bin","","r:sha256",""),("dev","","r:sha256",""),("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"refs2\":[\"/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g\"],\"disallowedRequisites\":[\"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"allowedRequisites\":[\"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m"),("dev","/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")]) \ No newline at end of file +Derive([("bin","","r:sha256",""),("dev","","r:sha256",""),("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"refs2\":[\"/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g\",\"dev\"],\"disallowedRequisites\":[\"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"allowedRequisites\":[\"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z\",\"bin\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m"),("dev","/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")]) \ No newline at end of file diff --git a/tests/functional/derivation/ca/advanced-attributes.drv b/tests/functional/derivation/ca/advanced-attributes.drv index 068cb593e83..ee5968cdced 100644 --- a/tests/functional/derivation/ca/advanced-attributes.drv +++ b/tests/functional/derivation/ca/advanced-attributes.drv @@ -1 +1 @@ -Derive([("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"),("allowedRequisites","/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"),("builder","/bin/bash"),("disallowedReferences","/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"),("disallowedRequisites","/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"),("exportReferencesGraph","refs1 /164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9 refs2 /nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file +Derive([("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"),("allowedRequisites","/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z bin"),("builder","/bin/bash"),("disallowedReferences","/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g dev"),("disallowedRequisites","/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"),("exportReferencesGraph","refs1 /164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9 refs2 /nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file diff --git a/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv b/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv index 1dfcac42dc5..0aa82e636f8 100644 --- a/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv +++ b/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv @@ -1 +1 @@ -Derive([("bin","/nix/store/33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin","",""),("dev","/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev","",""),("out","/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"refs2\":[\"/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar\"],\"disallowedRequisites\":[\"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"allowedRequisites\":[\"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/nix/store/33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin"),("dev","/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev"),("out","/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs")]) \ No newline at end of file +Derive([("bin","/nix/store/cnpasdljgkhnwaf78cf3qygcp4qbki1c-advanced-attributes-structured-attrs-bin","",""),("dev","/nix/store/ijq6mwpa9jbnpnl33qldfqihrr38kprx-advanced-attributes-structured-attrs-dev","",""),("out","/nix/store/h1vh648d3p088kdimy0r8ngpfx7c3nzw-advanced-attributes-structured-attrs","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"refs2\":[\"/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar\",\"dev\"],\"disallowedRequisites\":[\"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"allowedRequisites\":[\"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev\",\"bin\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/nix/store/cnpasdljgkhnwaf78cf3qygcp4qbki1c-advanced-attributes-structured-attrs-bin"),("dev","/nix/store/ijq6mwpa9jbnpnl33qldfqihrr38kprx-advanced-attributes-structured-attrs-dev"),("out","/nix/store/h1vh648d3p088kdimy0r8ngpfx7c3nzw-advanced-attributes-structured-attrs")]) \ No newline at end of file diff --git a/tests/functional/derivation/ia/advanced-attributes.drv b/tests/functional/derivation/ia/advanced-attributes.drv index c71a8888614..4bc7320f573 100644 --- a/tests/functional/derivation/ia/advanced-attributes.drv +++ b/tests/functional/derivation/ia/advanced-attributes.drv @@ -1 +1 @@ -Derive([("out","/nix/store/wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"),("allowedRequisites","/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"),("builder","/bin/bash"),("disallowedReferences","/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"),("disallowedRequisites","/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"),("exportReferencesGraph","refs1 /nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo refs2 /nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/nix/store/wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file +Derive([("out","/nix/store/ymqmybkq5j4nd1xplw6ccdpbjnfi017v-advanced-attributes","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"),("allowedRequisites","/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev bin"),("builder","/bin/bash"),("disallowedReferences","/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar dev"),("disallowedRequisites","/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"),("exportReferencesGraph","refs1 /nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo refs2 /nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/nix/store/ymqmybkq5j4nd1xplw6ccdpbjnfi017v-advanced-attributes"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file diff --git a/tests/functional/dyn-drv/non-trivial.nix b/tests/functional/dyn-drv/non-trivial.nix index 3c24ac2ee4b..87f2d9cfec7 100644 --- a/tests/functional/dyn-drv/non-trivial.nix +++ b/tests/functional/dyn-drv/non-trivial.nix @@ -51,10 +51,12 @@ builtins.outputOf "$word": "hello, from $word!", "PATH": ${builtins.toJSON path} }, - "inputDrvs": { - $inputDrvs + "inputs": { + "drvs": { + $inputDrvs + }, + "srcs": [] }, - "inputSrcs": [], "name": "build-$word", "outputs": { "out": { @@ -63,7 +65,7 @@ builtins.outputOf } }, "system": "${system}", - "version": 3 + "version": 4 } EOF drvPath=$(echo "$json" | nix derivation add) diff --git a/tests/functional/dyn-drv/recursive-mod-json.nix b/tests/functional/dyn-drv/recursive-mod-json.nix index 2a7a1ec1a81..eab8e4cdfbb 100644 --- a/tests/functional/dyn-drv/recursive-mod-json.nix +++ b/tests/functional/dyn-drv/recursive-mod-json.nix @@ -18,7 +18,7 @@ mkDerivation rec { PATH=${builtins.getEnv "EXTRA_PATH"}:$PATH # JSON of pre-existing drv - nix derivation show $drv | jq .[] > drv0.json + nix derivation show $drv | jq '.derivations[]' > drv0.json # Fix name jq < drv0.json '.name = "${innerName}"' > drv1.json diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index f4a03cce3c9..31a8d0d2bc1 100755 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -228,6 +228,10 @@ path8=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$rep rev4=$(git -C "$repo" rev-parse HEAD) rev4_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).rev") [[ $rev4 = "$rev4_nix" ]] +export _NIX_FORCE_HTTP=1 +rev4_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).rev") +[[ $rev4 = "$rev4_nix" ]] +unset _NIX_FORCE_HTTP # The name argument should be handled path9=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; name = \"foo\"; }).outPath") diff --git a/tests/functional/fixed.sh b/tests/functional/fixed.sh index edf6f88d4ed..9d769a578a7 100755 --- a/tests/functional/fixed.sh +++ b/tests/functional/fixed.sh @@ -14,7 +14,11 @@ nix-build fixed.nix -A bad --no-out-link && fail "should fail" # Building with the bad hash should produce the "good" output path as # a side-effect. [[ -e $path ]] -nix path-info --json "$path" | grep fixed:md5:2qk15sxzzjlnpjk9brn7j8ppcd +nix path-info --json --json-format 2 "$path" | jq -e \ + '.info.[].ca == { + method: "flat", + hash: "md5-jd2L5LF5pSmvpfL/rkuYWA==" + }' echo 'testing good...' nix-build fixed.nix -A good --no-out-link diff --git a/tests/functional/flakes/check.sh b/tests/functional/flakes/check.sh index 55cd3805ff2..cb4e3eebafc 100755 --- a/tests/functional/flakes/check.sh +++ b/tests/functional/flakes/check.sh @@ -192,3 +192,24 @@ EOF # shellcheck disable=SC2015 checkRes=$(nix flake check "$flakeDir" 2>&1 && fail "nix flake check should have failed" || true) echo "$checkRes" | grepQuiet -E "builder( for .*)? failed with exit code 1" + +# Test that attribute paths are shown in error messages +cat > "$flakeDir"/flake.nix <&1 && fail "nix flake check should have failed" || true) +echo "$checkRes" | grepQuiet "checks.${system}.failingCheck" +echo "$checkRes" | grepQuiet "checks.${system}.anotherFailingCheck" diff --git a/tests/functional/flakes/develop.sh b/tests/functional/flakes/develop.sh index c222f0fbbeb..ee646860b11 100755 --- a/tests/functional/flakes/develop.sh +++ b/tests/functional/flakes/develop.sh @@ -18,6 +18,21 @@ cat <"$TEST_HOME/flake.nix" outputs = [ "out" "dev" ]; meta.outputsToInstall = [ "out" ]; buildCommand = ""; + # ensure we're stripping these from the environment derivation + disallowedReferences = [ "out" ]; + disallowedRequisites = [ "out" ]; + }; + packages.$system.hello-structured = (import ./config.nix).mkDerivation { + __structuredAttrs = true; + name = "hello"; + outputs = [ "out" "dev" ]; + meta.outputsToInstall = [ "out" ]; + buildCommand = ""; + # ensure we're stripping these from the environment derivation + outputChecks.out = { + disallowedReferences = [ "out" ]; + disallowedRequisites = [ "out" ]; + }; }; }; } @@ -142,4 +157,7 @@ echo "\$SHELL" EOF )" -ef "$BASH_INTERACTIVE_EXECUTABLE" ]] +# Test whether `nix develop` works with `__structuredAttrs` +[[ -z "$(nix develop --no-write-lock-file .#hello-structured > "$TEST_ROOT"/actual-env sort "$TEST_ROOT"/actual-env | uniq > "$TEST_ROOT"/actual-env.sorted diff "$TEST_ROOT"/expected-env.sorted "$TEST_ROOT"/actual-env.sorted +# Test for issue #13994: verify behavior of -- separator with installable +# Create a flake with an app that prints its arguments clearStore +rm -rf "$TEST_HOME"/.cache "$TEST_HOME"/.config "$TEST_HOME"/.local +cd "$TEST_HOME" + +cat <<'EOF' > print-args.sh +#!/bin/sh +printf "ARGS:" +for arg in "$@"; do + printf " %s" "$arg" +done +printf "\n" +EOF +chmod +x print-args.sh + +cat < flake.nix +{ + outputs = {self}: { + apps.$system.default = { + type = "app"; + program = "\${self}/print-args.sh"; + }; + }; +} +EOF + +# Test correct usage: installable before -- +nix run --no-write-lock-file . -- myarg1 myarg2 2>&1 | grepQuiet "ARGS: myarg1 myarg2" + +# Test that first positional argument is still treated as installable after -- (issue #13994) +nix run --no-write-lock-file -- . myarg1 myarg2 2>&1 | grepQuiet "ARGS: myarg1 myarg2" + +# And verify that a non-installable first argument causes an error +expectStderr 1 nix run --no-write-lock-file -- myarg1 myarg2 | grepQuiet "error.*myarg1" diff --git a/tests/functional/flakes/show.sh b/tests/functional/flakes/show.sh index c8238bf82b4..fadf2700d3a 100755 --- a/tests/functional/flakes/show.sh +++ b/tests/functional/flakes/show.sh @@ -101,3 +101,26 @@ in assert show_output.packages.${builtins.currentSystem}.default == { }; true ' + + +# Test that nix keeps going even when packages.$SYSTEM contains not derivations +cat >flake.nix < show-output.json +# shellcheck disable=SC2016 +nix eval --impure --expr ' +let show_output = builtins.fromJSON (builtins.readFile ./show-output.json); +in +assert show_output.packages.${builtins.currentSystem}.not-a-derivation == {}; +true +' + diff --git a/tests/functional/flakes/substitution.sh b/tests/functional/flakes/substitution.sh index f7ea6001ce3..97a04931abf 100644 --- a/tests/functional/flakes/substitution.sh +++ b/tests/functional/flakes/substitution.sh @@ -28,4 +28,4 @@ clearStore nix build --no-link "$flake2Dir#bar" --no-eval-cache --substitute --substituters "$cache" clearStore -expectStderr 1 nix build --no-link "$flake2Dir#bar" --no-eval-cache | grepQuiet "The path.*does not exist" +expectStderr 1 nix build --no-link "$flake2Dir#bar" --no-eval-cache | grepQuiet "Git repository.*does not exist" diff --git a/tests/functional/git-hashing/simple-common.sh b/tests/functional/git-hashing/simple-common.sh index 08b5c0e718a..a776ec43e00 100644 --- a/tests/functional/git-hashing/simple-common.sh +++ b/tests/functional/git-hashing/simple-common.sh @@ -47,9 +47,16 @@ try2 () { hashFromGit=$(git -C "$repo" rev-parse "HEAD:$hashPath") [[ "$hashFromGit" == "$expected" ]] - local caFromNix - caFromNix=$(nix path-info --json "$path" | jq -r ".[] | .ca") - [[ "fixed:git:$hashAlgo:$(nix hash convert --to nix32 "$hashAlgo:$hashFromGit")" = "$caFromNix" ]] + # Convert base16 hash to SRI format for comparison + local hashSRI + hashSRI=$(nix hash convert --from base16 --to sri --hash-algo "$hashAlgo" "$hashFromGit") + + nix path-info --json --json-format 2 "$path" | jq -e \ + --arg hashSRI "$hashSRI" \ + '.info.[].ca == { + method: "git", + hash: $hashSRI + }' } test0 () { diff --git a/tests/functional/hash-convert.sh b/tests/functional/hash-convert.sh index 9ef4c189de4..be76179ca87 100755 --- a/tests/functional/hash-convert.sh +++ b/tests/functional/hash-convert.sh @@ -93,10 +93,10 @@ try3() { # Asserting input format fails. # - expectStderr 1 nix hash convert --hash-algo "$1" --from sri "$2" | grepQuiet "is not SRI" - expectStderr 1 nix hash convert --hash-algo "$1" --from nix32 "$2" | grepQuiet "input hash" - expectStderr 1 nix hash convert --hash-algo "$1" --from base16 "$3" | grepQuiet "input hash" - expectStderr 1 nix hash convert --hash-algo "$1" --from nix32 "$4" | grepQuiet "input hash" + expectStderr 1 nix hash convert --hash-algo "$1" --from sri "$2" | grepQuiet "'base16', but '--from sri'" + expectStderr 1 nix hash convert --hash-algo "$1" --from nix32 "$2" | grepQuiet "'base16', but '--from nix32'" + expectStderr 1 nix hash convert --hash-algo "$1" --from base16 "$3" | grepQuiet "'nix32', but '--from base16'" + expectStderr 1 nix hash convert --hash-algo "$1" --from nix32 "$4" | grepQuiet "'base64', but '--from nix32'" # Base-16 hashes can be in uppercase. nix hash convert --hash-algo "$1" --from base16 "$(echo "$2" | tr '[:lower:]' '[:upper:]')" diff --git a/tests/functional/impure-derivations.sh b/tests/functional/impure-derivations.sh index 1c942dc52d6..89392ce3071 100755 --- a/tests/functional/impure-derivations.sh +++ b/tests/functional/impure-derivations.sh @@ -16,7 +16,7 @@ printf 0 > "$TEST_ROOT"/counter # `nix derivation add` with impure derivations work drvPath=$(nix-instantiate ./impure-derivations.nix -A impure) -nix derivation show "$drvPath" | jq .[] > "$TEST_HOME"/impure-drv.json +nix derivation show "$drvPath" | jq '.derivations[]' > "$TEST_HOME"/impure-drv.json drvPath2=$(nix derivation add < "$TEST_HOME"/impure-drv.json) [[ "$drvPath" = "$drvPath2" ]] @@ -30,7 +30,7 @@ path1_stuff=$(echo "$json" | jq -r .[].outputs.stuff) [[ $(< "$path1"/n) = 0 ]] [[ $(< "$path1_stuff"/bla) = 0 ]] -[[ $(nix path-info --json "$path1" | jq .[].ca) =~ fixed:r:sha256: ]] +nix path-info --json --json-format 2 "$path1" | jq -e '.info.[].ca | .method == "nar" and (.hash | startswith("sha256-"))' path2=$(nix build -L --no-link --json --file ./impure-derivations.nix impure | jq -r .[].outputs.out) [[ $(< "$path2"/n) = 1 ]] @@ -50,8 +50,8 @@ path4=$(nix build -L --no-link --json --file ./impure-derivations.nix impureOnIm (! nix build -L --no-link --json --file ./impure-derivations.nix inputAddressed 2>&1) | grep 'depends on impure derivation' drvPath=$(nix eval --json --file ./impure-derivations.nix impure.drvPath | jq -r .) -[[ $(nix derivation show "$drvPath" | jq ".[\"$(basename "$drvPath")\"].outputs.out.impure") = true ]] -[[ $(nix derivation show "$drvPath" | jq ".[\"$(basename "$drvPath")\"].outputs.stuff.impure") = true ]] +[[ $(nix derivation show "$drvPath" | jq ".derivations[\"$(basename "$drvPath")\"].outputs.out.impure") = true ]] +[[ $(nix derivation show "$drvPath" | jq ".derivations[\"$(basename "$drvPath")\"].outputs.stuff.impure") = true ]] # Fixed-output derivations *can* depend on impure derivations. path5=$(nix build -L --no-link --json --file ./impure-derivations.nix contentAddressed | jq -r .[].outputs.out) diff --git a/tests/functional/lang/eval-fail-deepseq-list-attr.err.exp b/tests/functional/lang/eval-fail-deepseq-list-attr.err.exp new file mode 100644 index 00000000000..9abd937ba1a --- /dev/null +++ b/tests/functional/lang/eval-fail-deepseq-list-attr.err.exp @@ -0,0 +1,25 @@ +error: + … while calling the 'deepSeq' builtin + at /pwd/lang/eval-fail-deepseq-list-attr.nix:3:1: + 2| + 3| builtins.deepSeq [ + | ^ + 4| 1 + + … while evaluating list element at index 1 + + … while evaluating the attribute 'b' + at /pwd/lang/eval-fail-deepseq-list-attr.nix:7:5: + 6| a = 2; + 7| b = throw "error in attr in list element"; + | ^ + 8| } + + … while calling the 'throw' builtin + at /pwd/lang/eval-fail-deepseq-list-attr.nix:7:9: + 6| a = 2; + 7| b = throw "error in attr in list element"; + | ^ + 8| } + + error: error in attr in list element diff --git a/tests/functional/lang/eval-fail-deepseq-list-attr.nix b/tests/functional/lang/eval-fail-deepseq-list-attr.nix new file mode 100644 index 00000000000..5ffd8c19629 --- /dev/null +++ b/tests/functional/lang/eval-fail-deepseq-list-attr.nix @@ -0,0 +1,10 @@ +# Test that deepSeq reports list index and attribute name in error traces. + +builtins.deepSeq [ + 1 + { + a = 2; + b = throw "error in attr in list element"; + } + 3 +] "unexpected success" diff --git a/tests/functional/lang/eval-fail-deepseq-stack-overflow.err.exp b/tests/functional/lang/eval-fail-deepseq-stack-overflow.err.exp new file mode 100644 index 00000000000..f142b5c4d45 --- /dev/null +++ b/tests/functional/lang/eval-fail-deepseq-stack-overflow.err.exp @@ -0,0 +1,25 @@ +error: + … while calling the 'deepSeq' builtin + at /pwd/lang/eval-fail-deepseq-stack-overflow.nix:8:1: + 7| in + 8| builtins.deepSeq reverseLinkedList ( + | ^ + 9| throw "unexpected success; expected a controlled stack overflow instead" + + … while evaluating the attribute 'tail' + at /pwd/lang/eval-fail-deepseq-stack-overflow.nix:6:67: + 5| long = builtins.genList (x: x) 100000; + 6| reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; + | ^ + 7| in + + (9997 duplicate frames omitted) + + … while evaluating the attribute 'head' + at /pwd/lang/eval-fail-deepseq-stack-overflow.nix:6:62: + 5| long = builtins.genList (x: x) 100000; + 6| reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; + | ^ + 7| in + + error: stack overflow; max-call-depth exceeded diff --git a/tests/functional/lang/eval-fail-deepseq-stack-overflow.nix b/tests/functional/lang/eval-fail-deepseq-stack-overflow.nix new file mode 100644 index 00000000000..08c0fe4e812 --- /dev/null +++ b/tests/functional/lang/eval-fail-deepseq-stack-overflow.nix @@ -0,0 +1,10 @@ +# Test that deepSeq on a deeply nested structure produces a controlled +# stack overflow error rather than a segfault. + +let + long = builtins.genList (x: x) 100000; + reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; +in +builtins.deepSeq reverseLinkedList ( + throw "unexpected success; expected a controlled stack overflow instead" +) diff --git a/tests/functional/lang/eval-fail-derivation-structuredAttrs-stack-overflow.err.exp b/tests/functional/lang/eval-fail-derivation-structuredAttrs-stack-overflow.err.exp new file mode 100644 index 00000000000..c61eab0aa42 --- /dev/null +++ b/tests/functional/lang/eval-fail-derivation-structuredAttrs-stack-overflow.err.exp @@ -0,0 +1,54 @@ +error: + … while evaluating the attribute 'outPath' + at «nix-internal»/derivation-internal.nix:50:7: + 49| value = commonAttrs // { + 50| outPath = builtins.getAttr outputName strict; + | ^ + 51| drvPath = strict.drvPath; + + … while calling the 'getAttr' builtin + at «nix-internal»/derivation-internal.nix:50:17: + 49| value = commonAttrs // { + 50| outPath = builtins.getAttr outputName strict; + | ^ + 51| drvPath = strict.drvPath; + + … while calling the 'derivationStrict' builtin + at «nix-internal»/derivation-internal.nix:37:12: + 36| + 37| strict = derivationStrict drvAttrs; + | ^ + 38| + + … while evaluating derivation 'test' + whose name attribute is located at /pwd/lang/eval-fail-derivation-structuredAttrs-stack-overflow.nix:5:3 + + … while evaluating attribute 'nested' of derivation 'test' + at /pwd/lang/eval-fail-derivation-structuredAttrs-stack-overflow.nix:9:3: + 8| __structuredAttrs = true; + 9| nested = + | ^ + 10| let + + … while evaluating attribute 'tail' + at /pwd/lang/eval-fail-derivation-structuredAttrs-stack-overflow.nix:12:71: + 11| long = builtins.genList (x: x) 100000; + 12| reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; + | ^ + 13| in + + (9994 duplicate frames omitted) + + … while evaluating attribute 'head' + at /pwd/lang/eval-fail-derivation-structuredAttrs-stack-overflow.nix:12:66: + 11| long = builtins.genList (x: x) 100000; + 12| reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; + | ^ + 13| in + + error: stack overflow; max-call-depth exceeded + at /pwd/lang/eval-fail-derivation-structuredAttrs-stack-overflow.nix:12:66: + 11| long = builtins.genList (x: x) 100000; + 12| reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; + | ^ + 13| in diff --git a/tests/functional/lang/eval-fail-derivation-structuredAttrs-stack-overflow.nix b/tests/functional/lang/eval-fail-derivation-structuredAttrs-stack-overflow.nix new file mode 100644 index 00000000000..c80950f1e57 --- /dev/null +++ b/tests/functional/lang/eval-fail-derivation-structuredAttrs-stack-overflow.nix @@ -0,0 +1,15 @@ +# Test that derivations with __structuredAttrs and deeply nested structures +# produce a controlled stack overflow error rather than a segfault. + +derivation { + name = "test"; + system = "x86_64-linux"; + builder = "/bin/sh"; + __structuredAttrs = true; + nested = + let + long = builtins.genList (x: x) 100000; + reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; + in + reverseLinkedList; +} diff --git a/tests/functional/lang/eval-fail-dynamic-attrs-inherit-2.err.exp b/tests/functional/lang/eval-fail-dynamic-attrs-inherit-2.err.exp new file mode 100644 index 00000000000..e71fc23b513 --- /dev/null +++ b/tests/functional/lang/eval-fail-dynamic-attrs-inherit-2.err.exp @@ -0,0 +1,6 @@ +error: dynamic attributes not allowed in inherit + at /pwd/lang/eval-fail-dynamic-attrs-inherit-2.nix:5:15: + 4| { + 5| inherit (a) ${"b" + ""}; + | ^ + 6| } diff --git a/tests/functional/lang/eval-fail-dynamic-attrs-inherit-2.nix b/tests/functional/lang/eval-fail-dynamic-attrs-inherit-2.nix new file mode 100644 index 00000000000..7af9685fe41 --- /dev/null +++ b/tests/functional/lang/eval-fail-dynamic-attrs-inherit-2.nix @@ -0,0 +1,6 @@ +let + a.b = 1; +in +{ + inherit (a) ${"b" + ""}; +} diff --git a/tests/functional/lang/eval-fail-dynamic-attrs-inherit.err.exp b/tests/functional/lang/eval-fail-dynamic-attrs-inherit.err.exp new file mode 100644 index 00000000000..b08b0e20135 --- /dev/null +++ b/tests/functional/lang/eval-fail-dynamic-attrs-inherit.err.exp @@ -0,0 +1,6 @@ +error: dynamic attributes not allowed in inherit + at /pwd/lang/eval-fail-dynamic-attrs-inherit.nix:5:11: + 4| { + 5| inherit ${"a" + ""}; + | ^ + 6| } diff --git a/tests/functional/lang/eval-fail-dynamic-attrs-inherit.nix b/tests/functional/lang/eval-fail-dynamic-attrs-inherit.nix new file mode 100644 index 00000000000..3a9b684102e --- /dev/null +++ b/tests/functional/lang/eval-fail-dynamic-attrs-inherit.nix @@ -0,0 +1,6 @@ +let + a = 1; +in +{ + inherit ${"a" + ""}; +} diff --git a/tests/functional/lang/eval-fail-dynamic-attrs-let-2.err.exp b/tests/functional/lang/eval-fail-dynamic-attrs-let-2.err.exp new file mode 100644 index 00000000000..2eb7f04a7d8 --- /dev/null +++ b/tests/functional/lang/eval-fail-dynamic-attrs-let-2.err.exp @@ -0,0 +1,5 @@ +error: dynamic attributes not allowed in let + at /pwd/lang/eval-fail-dynamic-attrs-let-2.nix:1:1: + 1| let + | ^ + 2| ${"${"a"}"} = 1; diff --git a/tests/functional/lang/eval-fail-dynamic-attrs-let-2.nix b/tests/functional/lang/eval-fail-dynamic-attrs-let-2.nix new file mode 100644 index 00000000000..bcec33ddf5a --- /dev/null +++ b/tests/functional/lang/eval-fail-dynamic-attrs-let-2.nix @@ -0,0 +1,4 @@ +let + ${"${"a"}"} = 1; +in +a diff --git a/tests/functional/lang/eval-fail-dynamic-attrs-let-3.err.exp b/tests/functional/lang/eval-fail-dynamic-attrs-let-3.err.exp new file mode 100644 index 00000000000..0f44e25dd3f --- /dev/null +++ b/tests/functional/lang/eval-fail-dynamic-attrs-let-3.err.exp @@ -0,0 +1,5 @@ +error: dynamic attributes not allowed in let + at /pwd/lang/eval-fail-dynamic-attrs-let-3.nix:1:1: + 1| let + | ^ + 2| "${"a"}" = 1; diff --git a/tests/functional/lang/eval-fail-dynamic-attrs-let-3.nix b/tests/functional/lang/eval-fail-dynamic-attrs-let-3.nix new file mode 100644 index 00000000000..37453c5309c --- /dev/null +++ b/tests/functional/lang/eval-fail-dynamic-attrs-let-3.nix @@ -0,0 +1,4 @@ +let + "${"a"}" = 1; +in +a diff --git a/tests/functional/lang/eval-fail-dynamic-attrs-let.err.exp b/tests/functional/lang/eval-fail-dynamic-attrs-let.err.exp new file mode 100644 index 00000000000..ca319213384 --- /dev/null +++ b/tests/functional/lang/eval-fail-dynamic-attrs-let.err.exp @@ -0,0 +1,5 @@ +error: dynamic attributes not allowed in let + at /pwd/lang/eval-fail-dynamic-attrs-let.nix:1:1: + 1| let + | ^ + 2| ${"a" + ""} = 1; diff --git a/tests/functional/lang/eval-fail-dynamic-attrs-let.nix b/tests/functional/lang/eval-fail-dynamic-attrs-let.nix new file mode 100644 index 00000000000..fca32ae4f3d --- /dev/null +++ b/tests/functional/lang/eval-fail-dynamic-attrs-let.nix @@ -0,0 +1,4 @@ +let + ${"a" + ""} = 1; +in +a diff --git a/tests/functional/lang/eval-fail-empty-formals.err.exp b/tests/functional/lang/eval-fail-empty-formals.err.exp new file mode 100644 index 00000000000..5cd4829f7d0 --- /dev/null +++ b/tests/functional/lang/eval-fail-empty-formals.err.exp @@ -0,0 +1,12 @@ +error: + … from call site + at /pwd/lang/eval-fail-empty-formals.nix:1:1: + 1| (foo@{ }: 1) { a = 3; } + | ^ + 2| + + error: function 'anonymous lambda' called with unexpected argument 'a' + at /pwd/lang/eval-fail-empty-formals.nix:1:2: + 1| (foo@{ }: 1) { a = 3; } + | ^ + 2| diff --git a/tests/functional/lang/eval-fail-empty-formals.nix b/tests/functional/lang/eval-fail-empty-formals.nix new file mode 100644 index 00000000000..597f4049632 --- /dev/null +++ b/tests/functional/lang/eval-fail-empty-formals.nix @@ -0,0 +1 @@ +(foo@{ }: 1) { a = 3; } diff --git a/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.err.exp b/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.err.exp new file mode 100644 index 00000000000..a5567cbfc5b --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.err.exp @@ -0,0 +1,18 @@ +error: + … while calling the 'seq' builtin + at /pwd/lang/eval-fail-genericClosure-deeply-nested-element.nix:25:1: + 24| in + 25| builtins.seq finiteVal ( + | ^ + 26| builtins.genericClosure { + + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-deeply-nested-element.nix:26:3: + 25| builtins.seq finiteVal ( + 26| builtins.genericClosure { + | ^ + 27| startSet = [ + + … in genericClosure element { finite = { a0 = { a1 = { a2 = { a3 = { a4 = { a5 = { a6 = { a7 = { a8 = { ... }; }; }; }; }; }; }; }; }; }; «1 attribute elided» } + + error: attribute 'key' missing diff --git a/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.nix b/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.nix new file mode 100644 index 00000000000..abc0591bb7d --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.nix @@ -0,0 +1,35 @@ +let + finite = { + a0 = { + a1 = { + a2 = { + a3 = { + a4 = { + a5 = { + a6 = { + a7 = { + a8 = { + a9 = "deep"; + }; + }; + }; + }; + }; + }; + }; + }; + }; + }; + finiteVal = builtins.deepSeq finite finite; +in +builtins.seq finiteVal ( + builtins.genericClosure { + startSet = [ + { + infinite = import ./infinite-nesting.nix; + finite = finiteVal; + } + ]; + operator = x: [ (import ./infinite-nesting.nix) ]; + } +) diff --git a/tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp b/tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp new file mode 100644 index 00000000000..3ba2a7ea831 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-element-missing-key.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ { nokey = 1; } ]; + + … in genericClosure element { nokey = 1; } + + error: attribute 'key' missing diff --git a/tests/functional/lang/eval-fail-genericClosure-element-missing-key.nix b/tests/functional/lang/eval-fail-genericClosure-element-missing-key.nix new file mode 100644 index 00000000000..e39e4043bd6 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-element-missing-key.nix @@ -0,0 +1,4 @@ +builtins.genericClosure { + startSet = [ { nokey = 1; } ]; + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp b/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp new file mode 100644 index 00000000000..b469f60431b --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-element-not-attrset.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ "not an attrset" ]; + + … in genericClosure element "not an attrset" + + error: expected a set but found a string: "not an attrset" diff --git a/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.nix b/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.nix new file mode 100644 index 00000000000..6850be1c2e6 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.nix @@ -0,0 +1,4 @@ +builtins.genericClosure { + startSet = [ "not an attrset" ]; + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp b/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp new file mode 100644 index 00000000000..04b458a4863 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp @@ -0,0 +1,12 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-keys-incompatible-types.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ + + … while comparing element { key = "string"; } + + … with element { key = 1; } + + error: cannot compare a string with an integer; values are "string" and 1 diff --git a/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.nix b/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.nix new file mode 100644 index 00000000000..3335416fdda --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.nix @@ -0,0 +1,7 @@ +builtins.genericClosure { + startSet = [ + { key = 1; } + { key = "string"; } + ]; + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp b/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp new file mode 100644 index 00000000000..97e2bed022b --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp @@ -0,0 +1,12 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-keys-uncomparable.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ + + … while comparing element { key = { }; } + + … with element { key = { }; } + + error: cannot compare a set with a set; values of that type are incomparable (values are { } and { }) diff --git a/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.nix b/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.nix new file mode 100644 index 00000000000..6a1915b6ab3 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.nix @@ -0,0 +1,7 @@ +builtins.genericClosure { + startSet = [ + { key = { }; } + { key = { }; } + ]; + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-missing-operator.err.exp b/tests/functional/lang/eval-fail-genericClosure-missing-operator.err.exp new file mode 100644 index 00000000000..0dce0ffd9a6 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-missing-operator.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-missing-operator.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ { key = 1; } ]; + + … in the attrset passed as argument to builtins.genericClosure + + error: attribute 'operator' missing diff --git a/tests/functional/lang/eval-fail-genericClosure-missing-operator.nix b/tests/functional/lang/eval-fail-genericClosure-missing-operator.nix new file mode 100644 index 00000000000..0b7c63f6d11 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-missing-operator.nix @@ -0,0 +1,3 @@ +builtins.genericClosure { + startSet = [ { key = 1; } ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-missing-startSet.err.exp b/tests/functional/lang/eval-fail-genericClosure-missing-startSet.err.exp new file mode 100644 index 00000000000..b68c6542a4e --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-missing-startSet.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-missing-startSet.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| operator = x: [ ]; + + … in the attrset passed as argument to builtins.genericClosure + + error: attribute 'startSet' missing diff --git a/tests/functional/lang/eval-fail-genericClosure-missing-startSet.nix b/tests/functional/lang/eval-fail-genericClosure-missing-startSet.nix new file mode 100644 index 00000000000..b628029862b --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-missing-startSet.nix @@ -0,0 +1,3 @@ +builtins.genericClosure { + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-not-attrset.err.exp b/tests/functional/lang/eval-fail-genericClosure-not-attrset.err.exp new file mode 100644 index 00000000000..fd3360310a1 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-not-attrset.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-not-attrset.nix:1:1: + 1| builtins.genericClosure "not an attrset" + | ^ + 2| + + … while evaluating the first argument passed to builtins.genericClosure + + error: expected a set but found a string: "not an attrset" diff --git a/tests/functional/lang/eval-fail-genericClosure-not-attrset.nix b/tests/functional/lang/eval-fail-genericClosure-not-attrset.nix new file mode 100644 index 00000000000..3998c3432cb --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-not-attrset.nix @@ -0,0 +1 @@ +builtins.genericClosure "not an attrset" diff --git a/tests/functional/lang/eval-fail-genericClosure-operator-not-function.err.exp b/tests/functional/lang/eval-fail-genericClosure-operator-not-function.err.exp new file mode 100644 index 00000000000..d3c5a627a42 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-operator-not-function.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-operator-not-function.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ { key = 1; } ]; + + … while evaluating the 'operator' attribute passed as argument to builtins.genericClosure + + error: expected a function but found a string: "not a function" diff --git a/tests/functional/lang/eval-fail-genericClosure-operator-not-function.nix b/tests/functional/lang/eval-fail-genericClosure-operator-not-function.nix new file mode 100644 index 00000000000..425cd427d7a --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-operator-not-function.nix @@ -0,0 +1,4 @@ +builtins.genericClosure { + startSet = [ { key = 1; } ]; + operator = "not a function"; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp b/tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp new file mode 100644 index 00000000000..49d478033d2 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp @@ -0,0 +1,12 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-operator-not-list.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ { key = 1; } ]; + + … while calling operator on genericClosure element { key = 1; } + + … while evaluating the return value of the `operator` passed to builtins.genericClosure + + error: expected a list but found a string: "not a list" diff --git a/tests/functional/lang/eval-fail-genericClosure-operator-not-list.nix b/tests/functional/lang/eval-fail-genericClosure-operator-not-list.nix new file mode 100644 index 00000000000..26f97c51c50 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-operator-not-list.nix @@ -0,0 +1,4 @@ +builtins.genericClosure { + startSet = [ { key = 1; } ]; + operator = x: "not a list"; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.err.exp b/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.err.exp new file mode 100644 index 00000000000..e711a23f5a5 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-startSet-not-list.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = "not a list"; + + … while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure + + error: expected a list but found a string: "not a list" diff --git a/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.nix b/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.nix new file mode 100644 index 00000000000..834c82f656c --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.nix @@ -0,0 +1,4 @@ +builtins.genericClosure { + startSet = "not a list"; + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-toJSON-stack-overflow.err.exp b/tests/functional/lang/eval-fail-toJSON-stack-overflow.err.exp new file mode 100644 index 00000000000..cda77331d19 --- /dev/null +++ b/tests/functional/lang/eval-fail-toJSON-stack-overflow.err.exp @@ -0,0 +1,30 @@ +error: + … while calling the 'toJSON' builtin + at /pwd/lang/eval-fail-toJSON-stack-overflow.nix:8:1: + 7| in + 8| builtins.toJSON reverseLinkedList + | ^ + 9| + + … while evaluating attribute 'tail' + at /pwd/lang/eval-fail-toJSON-stack-overflow.nix:6:67: + 5| long = builtins.genList (x: x) 100000; + 6| reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; + | ^ + 7| in + + (9997 duplicate frames omitted) + + … while evaluating attribute 'head' + at /pwd/lang/eval-fail-toJSON-stack-overflow.nix:6:62: + 5| long = builtins.genList (x: x) 100000; + 6| reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; + | ^ + 7| in + + error: stack overflow; max-call-depth exceeded + at /pwd/lang/eval-fail-toJSON-stack-overflow.nix:6:62: + 5| long = builtins.genList (x: x) 100000; + 6| reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; + | ^ + 7| in diff --git a/tests/functional/lang/eval-fail-toJSON-stack-overflow.nix b/tests/functional/lang/eval-fail-toJSON-stack-overflow.nix new file mode 100644 index 00000000000..135ed0a1784 --- /dev/null +++ b/tests/functional/lang/eval-fail-toJSON-stack-overflow.nix @@ -0,0 +1,8 @@ +# Test that toJSON on a deeply nested structure produces a controlled +# stack overflow error rather than a segfault. + +let + long = builtins.genList (x: x) 100000; + reverseLinkedList = builtins.foldl' (tail: head: { inherit head tail; }) null long; +in +builtins.toJSON reverseLinkedList diff --git a/tests/functional/lang/eval-okay-dynamic-attrs-3.exp b/tests/functional/lang/eval-okay-dynamic-attrs-3.exp new file mode 100644 index 00000000000..9d27f872c40 --- /dev/null +++ b/tests/functional/lang/eval-okay-dynamic-attrs-3.exp @@ -0,0 +1 @@ +{ a = 1; attrs = { b = 1; c = 1; d = 1; }; b = 1; c = 1; d = 1; } diff --git a/tests/functional/lang/eval-okay-dynamic-attrs-3.nix b/tests/functional/lang/eval-okay-dynamic-attrs-3.nix new file mode 100644 index 00000000000..d55ed82f83c --- /dev/null +++ b/tests/functional/lang/eval-okay-dynamic-attrs-3.nix @@ -0,0 +1,14 @@ +# dynamic attrs are not generally allowed in `let`, and inherit, but they are if they only contain a string +let + ${"a"} = 1; + attrs = rec { + b = c; + ${"c"} = d; + d = a; + }; +in +{ + inherit ${"a"}; + inherit attrs; + inherit (attrs) ${"b"} ${"c"} d; +} diff --git a/tests/functional/lang/eval-okay-equal-function-attrset-distinct-similar.exp b/tests/functional/lang/eval-okay-equal-function-attrset-distinct-similar.exp new file mode 100644 index 00000000000..c508d5366f7 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-attrset-distinct-similar.exp @@ -0,0 +1 @@ +false diff --git a/tests/functional/lang/eval-okay-equal-function-attrset-distinct-similar.nix b/tests/functional/lang/eval-okay-equal-function-attrset-distinct-similar.nix new file mode 100644 index 00000000000..13c30d9f709 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-attrset-distinct-similar.nix @@ -0,0 +1,3 @@ +# Distinct but not identical functions in attribute set compare as unequal +# See https://nix.dev/manual/nix/latest/language/operators#equality +{ a = (x: x); } == { a = (x: x); } diff --git a/tests/functional/lang/eval-okay-equal-function-attrset-identical.exp b/tests/functional/lang/eval-okay-equal-function-attrset-identical.exp new file mode 100644 index 00000000000..27ba77ddaf6 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-attrset-identical.exp @@ -0,0 +1 @@ +true diff --git a/tests/functional/lang/eval-okay-equal-function-attrset-identical.nix b/tests/functional/lang/eval-okay-equal-function-attrset-identical.nix new file mode 100644 index 00000000000..830267c8282 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-attrset-identical.nix @@ -0,0 +1,10 @@ +# Function comparison in attribute set uses value identity optimization +# See https://nix.dev/manual/nix/latest/language/operators#value-identity-optimization +let + f = x: x; +in +{ + a = f; +} == { + a = f; +} diff --git a/tests/functional/lang/eval-okay-equal-function-direct-distinct-similar.exp b/tests/functional/lang/eval-okay-equal-function-direct-distinct-similar.exp new file mode 100644 index 00000000000..c508d5366f7 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-direct-distinct-similar.exp @@ -0,0 +1 @@ +false diff --git a/tests/functional/lang/eval-okay-equal-function-direct-distinct-similar.nix b/tests/functional/lang/eval-okay-equal-function-direct-distinct-similar.nix new file mode 100644 index 00000000000..f3a931c6bc1 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-direct-distinct-similar.nix @@ -0,0 +1,3 @@ +# Direct comparison of distinct but not identical functions returns false +# See https://nix.dev/manual/nix/latest/language/operators#equality +(x: x) == (x: x) diff --git a/tests/functional/lang/eval-okay-equal-function-direct-identical.exp b/tests/functional/lang/eval-okay-equal-function-direct-identical.exp new file mode 100644 index 00000000000..c508d5366f7 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-direct-identical.exp @@ -0,0 +1 @@ +false diff --git a/tests/functional/lang/eval-okay-equal-function-direct-identical.nix b/tests/functional/lang/eval-okay-equal-function-direct-identical.nix new file mode 100644 index 00000000000..f91a39fb883 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-direct-identical.nix @@ -0,0 +1,6 @@ +# Direct comparison of identical function returns false +# See https://nix.dev/manual/nix/latest/language/operators#equality +let + f = x: x; +in +f == f diff --git a/tests/functional/lang/eval-okay-equal-function-list-distinct-similar.exp b/tests/functional/lang/eval-okay-equal-function-list-distinct-similar.exp new file mode 100644 index 00000000000..c508d5366f7 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-list-distinct-similar.exp @@ -0,0 +1 @@ +false diff --git a/tests/functional/lang/eval-okay-equal-function-list-distinct-similar.nix b/tests/functional/lang/eval-okay-equal-function-list-distinct-similar.nix new file mode 100644 index 00000000000..cd6182770c8 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-list-distinct-similar.nix @@ -0,0 +1,3 @@ +# Distinct but not identical functions in list compare as unequal +# See https://nix.dev/manual/nix/latest/language/operators#equality +[ (x: x) ] == [ (x: x) ] diff --git a/tests/functional/lang/eval-okay-equal-function-list-identical.exp b/tests/functional/lang/eval-okay-equal-function-list-identical.exp new file mode 100644 index 00000000000..27ba77ddaf6 --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-list-identical.exp @@ -0,0 +1 @@ +true diff --git a/tests/functional/lang/eval-okay-equal-function-list-identical.nix b/tests/functional/lang/eval-okay-equal-function-list-identical.nix new file mode 100644 index 00000000000..5156ffc471d --- /dev/null +++ b/tests/functional/lang/eval-okay-equal-function-list-identical.nix @@ -0,0 +1,6 @@ +# Function comparison in list uses value identity optimization +# See https://nix.dev/manual/nix/latest/language/operators#value-identity-optimization +let + f = x: x; +in +[ f ] == [ f ] diff --git a/tests/functional/lang/infinite-nesting.nix b/tests/functional/lang/infinite-nesting.nix new file mode 100644 index 00000000000..1f937e63daf --- /dev/null +++ b/tests/functional/lang/infinite-nesting.nix @@ -0,0 +1,4 @@ +let + mkInfinite = i: { "a${toString i}" = mkInfinite (i + 1); }; +in +mkInfinite 0 diff --git a/tests/functional/logging.sh b/tests/functional/logging.sh index f637036be26..ffb1e6d9621 100755 --- a/tests/functional/logging.sh +++ b/tests/functional/logging.sh @@ -40,6 +40,6 @@ if [[ "$NIX_REMOTE" != "daemon" ]]; then nix build -vv --file dependencies.nix --no-link --json-log-path "$TEST_ROOT/log.json" 2>&1 | grepQuiet 'building.*dependencies-top.drv' jq < "$TEST_ROOT/log.json" grep '{"action":"start","fields":\[".*-dependencies-top.drv","",1,1\],"id":.*,"level":3,"parent":0' "$TEST_ROOT/log.json" >&2 - grep -E '{"action":"result","id":[^,]+,"payload":{"builtOutputs":{"out":{"path":"[^-]+-dependencies-top"' "$TEST_ROOT/log.json" >&2 + grep -E '{"action":"result","id":[^,]+,"payload":{"builtOutputs":{"out":{"dependentRealisations":\{\},"id":"[^"]+","outPath":"[^-]+-dependencies-top".*"status":"Built".*"success":true' "$TEST_ROOT/log.json" >&2 (( $(grep -c '{"action":"msg","level":5,"msg":"executing builder .*"}' "$TEST_ROOT/log.json" ) == 5 )) fi diff --git a/tests/functional/meson.build b/tests/functional/meson.build index 6f649c8360b..d917d91c3f3 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -263,7 +263,8 @@ foreach suite : suites 'ASAN_OPTIONS' : asan_options, '_NIX_TEST_SOURCE_DIR' : meson.current_source_dir(), '_NIX_TEST_BUILD_DIR' : meson.current_build_dir(), - 'TEST_NAME' : suite_name / name, + 'TEST_SUITE_NAME' : suite_name, + 'TEST_NAME' : name, 'NIX_REMOTE' : '', 'PS4' : '+(${BASH_SOURCE[0]-$0}:$LINENO) ', }, diff --git a/tests/functional/nar-access.sh b/tests/functional/nar-access.sh index 2b0a6a32918..cd419b4eefc 100755 --- a/tests/functional/nar-access.sh +++ b/tests/functional/nar-access.sh @@ -23,6 +23,8 @@ diff -u data.cat-nar "$storePath/foo/data" # Check that file contents of baz match. nix nar cat "$narFile" /foo/baz > baz.cat-nar diff -u baz.cat-nar "$storePath/foo/baz" +nix nar cat /dev/stdin /foo/baz < "$narFile" > baz.cat-nar-pipe +expect 1 nix nar cat "$narFile" /foo/baz/doesntexist 2>&1 | grep "NAR does not contain regular file '/foo/baz/doesntexist'" nix store cat "$storePath/foo/baz" > baz.cat-nar diff -u baz.cat-nar "$storePath/foo/baz" diff --git a/tests/functional/nars.sh b/tests/functional/nars.sh index dd90345a6cc..2925177c5c9 100755 --- a/tests/functional/nars.sh +++ b/tests/functional/nars.sh @@ -114,7 +114,7 @@ if (( unicodeTestCode == 1 )); then # If the command failed (MacOS or ZFS + normalization), checks that it failed # with the expected "already exists" error, and that this is the same # behavior as `touch` - echo "$unicodeTestOut" | grepQuiet "path '.*/out/â' already exists" + echo "$unicodeTestOut" | grepQuiet "creating directory '.*/out/â': File exists" (( touchFilesCount == 1 )) elif (( unicodeTestCode == 0 )); then @@ -131,31 +131,3 @@ else fi rm -f "$TEST_ROOT/unicode-*" - -# Unpacking a NAR with a NUL character in a file name should fail. -rm -rf "$TEST_ROOT/out" -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < nul-character.nar | grepQuiet "NAR contains invalid file name 'f" - -# Likewise for a '.' filename. -rm -rf "$TEST_ROOT/out" -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < dot.nar | grepQuiet "NAR contains invalid file name '.'" - -# Likewise for a '..' filename. -rm -rf "$TEST_ROOT/out" -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < dotdot.nar | grepQuiet "NAR contains invalid file name '..'" - -# Likewise for a filename containing a slash. -rm -rf "$TEST_ROOT/out" -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < slash.nar | grepQuiet "NAR contains invalid file name 'x/y'" - -# Likewise for an empty filename. -rm -rf "$TEST_ROOT/out" -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < empty.nar | grepQuiet "NAR contains invalid file name ''" - -# Test that the 'executable' field cannot come before the 'contents' field. -rm -rf "$TEST_ROOT/out" -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < executable-after-contents.nar | grepQuiet "expected tag ')', got 'executable'" - -# Test that the 'name' field cannot come before the 'node' field in a directory entry. -rm -rf "$TEST_ROOT/out" -expectStderr 1 nix-store --restore "$TEST_ROOT/out" < name-after-node.nar | grepQuiet "expected tag 'name'" diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 66fec6c1fe8..3ee0e4b9ab6 100755 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -166,7 +166,7 @@ printf 4.0 > "$flake1Dir"/version printf Utrecht > "$flake1Dir"/who nix profile add "$flake1Dir" [[ $("$TEST_HOME"/.nix-profile/bin/hello) = "Hello Utrecht" ]] -[[ $(nix path-info --json "$(realpath "$TEST_HOME"/.nix-profile/bin/hello)" | jq -r .[].ca) =~ fixed:r:sha256: ]] +nix path-info --json --json-format 2 "$(realpath "$TEST_HOME"/.nix-profile/bin/hello)" | jq -e '.info.[].ca | .method == "nar" and (.hash | startswith("sha256-"))' # Override the outputs. nix profile remove simple flake1 diff --git a/tests/functional/nix-shell.sh b/tests/functional/nix-shell.sh index cf650e2c36c..cdeea32687a 100755 --- a/tests/functional/nix-shell.sh +++ b/tests/functional/nix-shell.sh @@ -175,7 +175,7 @@ cat >"$TEST_ROOT"/marco/polo/default.nix < "$TEST_ROOT"/foo foo=$(nix store add-file "$TEST_ROOT"/foo) +fooBase=$(basename "$foo") echo bar > "$TEST_ROOT"/bar bar=$(nix store add-file "$TEST_ROOT"/bar) +barBase=$(basename "$bar") echo baz > "$TEST_ROOT"/baz baz=$(nix store add-file "$TEST_ROOT"/baz) +bazBase=$(basename "$baz") nix-store --delete --ignore-liveness "$baz" diff --unified --color=always \ - <(nix path-info --json "$foo" "$bar" "$baz" | - jq --sort-keys 'map_values(.narHash)') \ + <(nix path-info --json --json-format 2 "$foo" "$bar" "$baz" | + jq --sort-keys '.info | map_values(.narHash)') \ <(jq --sort-keys <<-EOF { - "$foo": "sha256-QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA=", - "$bar": "sha256-9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ=", - "$baz": null + "$fooBase": "sha256-QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA=", + "$barBase": "sha256-9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ=", + "$bazBase": null } EOF ) + +# Test that storeDir is returned in the JSON output in individual store objects +nix path-info --json --json-format 2 "$foo" | jq -e \ + --arg fooBase "$fooBase" \ + --arg storeDir "${NIX_STORE_DIR:-/nix/store}" \ + '.info[$fooBase].storeDir == $storeDir' + +# And also at the top -evel +echo | nix path-info --json --json-format 2 --stdin | jq -e \ + --arg storeDir "${NIX_STORE_DIR:-/nix/store}" \ + '.storeDir == $storeDir' diff --git a/tests/functional/repl.sh b/tests/functional/repl.sh index d75b80bb0b0..cbda380516b 100755 --- a/tests/functional/repl.sh +++ b/tests/functional/repl.sh @@ -25,6 +25,13 @@ import $testDir/undefined-variable.nix TODO_NixOS +# FIXME: repl tests fail on systems with stack limits +stack_ulimit="$(ulimit -Hs)" +stack_required="$((64 * 1024 * 1024))" +if [[ "$stack_ulimit" != "unlimited" ]]; then + ((stack_ulimit < stack_required)) && skipTest "repl tests cannot run on systems with stack size <$stack_required ($stack_ulimit)" +fi + testRepl () { local nixArgs nixArgs=("$@") @@ -343,10 +350,9 @@ runRepl () { local testDirNoUnderscores testDirNoUnderscores="${testDir//_/}" - # TODO: pass arguments to nix repl; see lang.sh _NIX_TEST_RAW_MARKDOWN=1 \ _NIX_TEST_REPL_ECHO=1 \ - nix repl 2>&1 \ + nix repl "$@" 2>&1 \ | stripColors \ | tr -d '\0' \ | stripEmptyLinesBeforePrompt \ @@ -366,7 +372,12 @@ for test in $(cd "$testDir/repl"; echo *.in); do in="$testDir/repl/$test.in" actual="$TEST_ROOT/$test.actual" expected="$testDir/repl/$test.expected" - (cd "$testDir/repl"; set +x; runRepl 2>&1) < "$in" > "$actual" || { + declare -a flags=() + if test -e "$testDir/repl/$test.flags"; then + read -r -a flags < "$testDir/repl/$test.flags" + fi + + (cd "$testDir/repl"; set +x; runRepl "${flags[@]}" 2>&1) < "$in" > "$actual" || { echo "FAIL: $test (exit code $?)" >&2 badExitCode=1 } diff --git a/tests/functional/repl/doc-functor.expected b/tests/functional/repl/doc-functor.expected index 503fb807368..8b86fe91344 100644 --- a/tests/functional/repl/doc-functor.expected +++ b/tests/functional/repl/doc-functor.expected @@ -43,7 +43,7 @@ error: | ^ 91| }; - (19999 duplicate frames omitted) + (199 duplicate frames omitted) error: stack overflow; max-call-depth exceeded at /path/to/tests/functional/repl/doc-functor.nix:90:23: @@ -56,7 +56,7 @@ nix-repl> :doc diverging error: … while partially calling '__functor' to retrieve documentation - (10000 duplicate frames omitted) + (100 duplicate frames omitted) … while calling '__functor' at /path/to/tests/functional/repl/doc-functor.nix:103:21: diff --git a/tests/functional/repl/doc-functor.flags b/tests/functional/repl/doc-functor.flags new file mode 100644 index 00000000000..49308843003 --- /dev/null +++ b/tests/functional/repl/doc-functor.flags @@ -0,0 +1 @@ +--max-call-depth 100 diff --git a/tests/functional/shell.nix b/tests/functional/shell.nix index 5e9f4881819..267b0c8f016 100644 --- a/tests/functional/shell.nix +++ b/tests/functional/shell.nix @@ -84,6 +84,16 @@ let ''; }; + # Shells should also work with fixed-output derivations + fixed = mkDerivation { + name = "fixed"; + FOO = "was a fixed-output derivation"; + outputHash = "1ixr6yd3297ciyp9im522dfxpqbkhcw0pylkb2aab915278fqaik"; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + outputs = [ "out" ]; + }; + # Used by nix-shell -p runCommand = name: args: buildCommand: diff --git a/tests/functional/signing.sh b/tests/functional/signing.sh index 2893efec7d9..bfa21fcff76 100755 --- a/tests/functional/signing.sh +++ b/tests/functional/signing.sh @@ -14,11 +14,11 @@ pk2=$(cat "$TEST_ROOT"/pk2) outPath=$(nix-build dependencies.nix --no-out-link --secret-key-files "$TEST_ROOT/sk1 $TEST_ROOT/sk2") # Verify that the path got signed. -info=$(nix path-info --json "$outPath") -echo "$info" | jq -e '.[] | .ultimate == true' +info=$(nix path-info --json --json-format 2 "$outPath") +echo "$info" | jq -e '.info.[] | .ultimate == true' TODO_NixOS # looks like an actual bug? Following line fails on NixOS: -echo "$info" | jq -e '.[] | .signatures.[] | select(startswith("cache1.example.org"))' -echo "$info" | jq -e '.[] | .signatures.[] | select(startswith("cache2.example.org"))' +echo "$info" | jq -e '.info.[] | .signatures.[] | select(startswith("cache1.example.org"))' +echo "$info" | jq -e '.info.[] | .signatures.[] | select(startswith("cache2.example.org"))' # Test "nix store verify". nix store verify -r "$outPath" @@ -39,9 +39,9 @@ outPath2=$(nix-build simple.nix --no-out-link) nix store verify -r "$outPath" # Verify that the path did not get signed but does have the ultimate bit. -info=$(nix path-info --json "$outPath2") -echo "$info" | jq -e '.[] | .ultimate == true' -echo "$info" | jq -e '.[] | .signatures == []' +info=$(nix path-info --json --json-format 2 "$outPath2") +echo "$info" | jq -e '.info.[] | .ultimate == true' +echo "$info" | jq -e '.info.[] | .signatures == []' # Test "nix store verify". nix store verify -r "$outPath2" @@ -58,7 +58,7 @@ nix store verify -r "$outPath2" --sigs-needed 1 --trusted-public-keys "$pk1" # Build something content-addressed. outPathCA=$(IMPURE_VAR1=foo IMPURE_VAR2=bar nix-build ./fixed.nix -A good.0 --no-out-link) -nix path-info --json "$outPathCA" | jq -e '.[] | .ca | startswith("fixed:md5:")' +nix path-info --json --json-format 2 "$outPathCA" | jq -e '.info.[].ca | .method == "flat" and (.hash | startswith("md5-"))' # Content-addressed paths don't need signatures, so they verify # regardless of --sigs-needed. @@ -73,16 +73,16 @@ nix store verify -r "$outPathCA" --sigs-needed 1000 --trusted-public-keys "$pk1" nix copy --to file://"$cacheDir" "$outPath2" # Verify that signatures got copied. -info=$(nix path-info --store file://"$cacheDir" --json "$outPath2") -echo "$info" | jq -e '.[] | .ultimate == false' -echo "$info" | jq -e '.[] | .signatures.[] | select(startswith("cache1.example.org"))' -echo "$info" | expect 4 jq -e '.[] | .signatures.[] | select(startswith("cache2.example.org"))' +info=$(nix path-info --store file://"$cacheDir" --json --json-format 2 "$outPath2") +echo "$info" | jq -e '.info.[] | .ultimate == false' +echo "$info" | jq -e '.info.[] | .signatures.[] | select(startswith("cache1.example.org"))' +echo "$info" | expect 4 jq -e '.info.[] | .signatures.[] | select(startswith("cache2.example.org"))' # Verify that adding a signature to a path in a binary cache works. nix store sign --store file://"$cacheDir" --key-file "$TEST_ROOT"/sk2 "$outPath2" -info=$(nix path-info --store file://"$cacheDir" --json "$outPath2") -echo "$info" | jq -e '.[] | .signatures.[] | select(startswith("cache1.example.org"))' -echo "$info" | jq -e '.[] | .signatures.[] | select(startswith("cache2.example.org"))' +info=$(nix path-info --store file://"$cacheDir" --json --json-format 2 "$outPath2") +echo "$info" | jq -e '.info.[] | .signatures.[] | select(startswith("cache1.example.org"))' +echo "$info" | jq -e '.info.[] | .signatures.[] | select(startswith("cache2.example.org"))' # Copying to a diverted store should fail due to a lack of signatures by trusted keys. chmod -R u+w "$TEST_ROOT"/store0 || true diff --git a/tests/functional/structured-attrs.sh b/tests/functional/structured-attrs.sh index 473a037f9f3..01bdc10d116 100755 --- a/tests/functional/structured-attrs.sh +++ b/tests/functional/structured-attrs.sh @@ -49,4 +49,4 @@ expectStderr 0 nix-instantiate --expr "$hackyExpr" --eval --strict | grepQuiet " # Check it works with the expected structured attrs hacky=$(nix-instantiate --expr "$hackyExpr") -nix derivation show "$hacky" | jq --exit-status '."'"$(basename "$hacky")"'".structuredAttrs | . == {"a": 1}' +nix derivation show "$hacky" | jq --exit-status '.derivations."'"$(basename "$hacky")"'".structuredAttrs | . == {"a": 1}' diff --git a/tests/functional/test-libstoreconsumer/main.cc b/tests/functional/test-libstoreconsumer/main.cc index 5b013293475..6cfe50047af 100644 --- a/tests/functional/test-libstoreconsumer/main.cc +++ b/tests/functional/test-libstoreconsumer/main.cc @@ -5,13 +5,6 @@ using namespace nix; -extern "C" [[gnu::retain]] const char * __asan_default_options() -{ - // We leak a bunch of memory knowingly on purpose. It's not worthwhile to - // diagnose that memory being leaked for now. - return "abort_on_error=1:print_summary=1:detect_leaks=0"; -} - int main(int argc, char ** argv) { try { diff --git a/tests/functional/test-libstoreconsumer/meson.build b/tests/functional/test-libstoreconsumer/meson.build index 7f619d01baa..b2f1c1ca3f9 100644 --- a/tests/functional/test-libstoreconsumer/meson.build +++ b/tests/functional/test-libstoreconsumer/meson.build @@ -1,11 +1,12 @@ cxx = meson.get_compiler('cpp') -subdir('nix-meson-build-support/asan-options') +deps_other = [] +subdir('nix-meson-build-support/common/asan-options') libstoreconsumer_tester = executable( 'test-libstoreconsumer', 'main.cc', - dependencies : [ + dependencies : deps_other + [ dependency('nix-store'), ], build_by_default : false, diff --git a/tests/nixos/fetchers-substitute.nix b/tests/nixos/fetchers-substitute.nix index 453982677be..bfe15c5c36e 100644 --- a/tests/nixos/fetchers-substitute.nix +++ b/tests/nixos/fetchers-substitute.nix @@ -47,6 +47,7 @@ { nodes }: # python '' import json + import os start_all() @@ -117,10 +118,10 @@ tarball_store_path = json.loads(tarball_store_path_json) # Get the NAR hash of the unpacked tarball in SRI format - path_info_json = substituter.succeed(f"nix path-info --json {tarball_store_path}").strip() - path_info_dict = json.loads(path_info_json) - # nix path-info returns a dict with store paths as keys - tarball_hash_sri = path_info_dict[tarball_store_path]["narHash"] + path_info_json = substituter.succeed(f"nix path-info --json-format 2 --json {tarball_store_path}").strip() + path_info_dict = json.loads(path_info_json)["info"] + # narHash is already in SRI format + tarball_hash_sri = path_info_dict[os.path.basename(tarball_store_path)]["narHash"] print(f"Tarball NAR hash (SRI): {tarball_hash_sri}") # Also get the old format hash for fetchTarball (which uses sha256 parameter) diff --git a/tests/nixos/s3-binary-cache-store.nix b/tests/nixos/s3-binary-cache-store.nix index 1f79e8cf969..33e869d1954 100644 --- a/tests/nixos/s3-binary-cache-store.nix +++ b/tests/nixos/s3-binary-cache-store.nix @@ -1,25 +1,23 @@ { - lib, config, - nixpkgs, ... }: let pkgs = config.nodes.client.nixpkgs.pkgs; - pkgA = pkgs.cowsay; + # Test packages - minimal packages for fast copying + pkgA = pkgs.writeText "test-package-a" "test package a"; + pkgB = pkgs.writeText "test-package-b" "test package b"; + pkgC = pkgs.writeText "test-package-c" "test package c"; + # S3 configuration accessKey = "BKIKJAA5BMMU2RHO6IBB"; secretKey = "V7f1CwQqAcwo80UEIJEjc5gVQUSSx5ohQ9GSrr12"; - env = "AWS_ACCESS_KEY_ID=${accessKey} AWS_SECRET_ACCESS_KEY=${secretKey}"; - - storeUrl = "s3://my-cache?endpoint=http://server:9000®ion=eu-west-1"; - objectThatDoesNotExist = "s3://my-cache/foo-that-does-not-exist?endpoint=http://server:9000®ion=eu-west-1"; in { - name = "s3-binary-cache-store"; + name = "curl-s3-binary-cache-store"; nodes = { server = @@ -31,8 +29,15 @@ in }: { virtualisation.writableStore = true; - virtualisation.additionalPaths = [ pkgA ]; + virtualisation.cores = 2; + virtualisation.additionalPaths = [ + pkgA + pkgB + pkgC + pkgs.coreutils + ]; environment.systemPackages = [ pkgs.minio-client ]; + nix.nixPath = [ "nixpkgs=${pkgs.path}" ]; nix.extraOptions = '' substituters = ''; @@ -51,6 +56,7 @@ in { config, pkgs, ... }: { virtualisation.writableStore = true; + virtualisation.cores = 2; nix.extraOptions = '' substituters = ''; @@ -59,38 +65,739 @@ in testScript = { nodes }: + # python '' - # fmt: off - start_all() + import json + import random + import re + import uuid - # Create a binary cache. - server.wait_for_unit("minio") - server.wait_for_unit("network-addresses-eth1.service") - server.wait_for_open_port(9000) + # ============================================================================ + # Configuration + # ============================================================================ + + ACCESS_KEY = '${accessKey}' + SECRET_KEY = '${secretKey}' + ENDPOINT = 'http://server:9000' + REGION = 'eu-west-1' + + PKGS = { + 'A': '${pkgA}', + 'B': '${pkgB}', + 'C': '${pkgC}', + } + + ENV_WITH_CREDS = f"AWS_ACCESS_KEY_ID={ACCESS_KEY} AWS_SECRET_ACCESS_KEY={SECRET_KEY}" + + # ============================================================================ + # Helper Functions + # ============================================================================ + + def make_s3_url(bucket, path="", **params): + """Build S3 URL with optional path and query parameters""" + params.setdefault('endpoint', ENDPOINT) + params.setdefault('region', REGION) + query = '&'.join(f"{k}={v}" for k, v in params.items()) + bucket_and_path = f"{bucket}{path}" if path else bucket + return f"s3://{bucket_and_path}?{query}" + + def get_package_hash(pkg_path): + """Extract store hash from package path""" + return pkg_path.split("/")[-1].split("-")[0] + + def verify_content_encoding(machine, bucket, object_path, expected_encoding): + """Verify S3 object has expected Content-Encoding header""" + stat = machine.succeed(f"mc stat minio/{bucket}/{object_path}") + if "Content-Encoding" not in stat or expected_encoding not in stat: + print(f"mc stat output for {object_path}:") + print(stat) + raise Exception(f"Expected Content-Encoding: {expected_encoding} header on {object_path}") + + def verify_no_compression(machine, bucket, object_path): + """Verify S3 object has no compression headers""" + stat = machine.succeed(f"mc stat minio/{bucket}/{object_path}") + if "Content-Encoding" in stat and ("gzip" in stat or "xz" in stat): + print(f"mc stat output for {object_path}:") + print(stat) + raise Exception(f"Object {object_path} should not have compression Content-Encoding") + + def assert_count(output, pattern, expected, error_msg): + """Assert that pattern appears exactly expected times in output""" + actual = output.count(pattern) + if actual != expected: + print("Debug output:") + print(output) + raise Exception(f"{error_msg}: expected {expected}, got {actual}") + + def verify_packages_in_store(machine, pkg_paths, should_exist=True): + """ + Verify whether packages exist in the store. + + Args: + machine: The machine to check on + pkg_paths: List of package paths to check (or single path) + should_exist: If True, verify packages exist; if False, verify they don't + """ + paths = [pkg_paths] if isinstance(pkg_paths, str) else pkg_paths + for pkg in paths: + if should_exist: + machine.succeed(f"nix path-info {pkg}") + else: + machine.fail(f"nix path-info {pkg}") + + def setup_s3(populate_bucket=[], public=False, versioned=False): + """ + Decorator that creates/destroys a unique bucket for each test. + Optionally pre-populates bucket with specified packages. + Cleans up client store after test completion. + + Args: + populate_bucket: List of packages to upload before test runs + public: If True, make the bucket publicly accessible + versioned: If True, enable versioning on the bucket before populating + """ + def decorator(test_func): + def wrapper(): + bucket = str(uuid.uuid4()) + server.succeed(f"mc mb minio/{bucket}") + try: + if public: + server.succeed(f"mc anonymous set download minio/{bucket}") + if versioned: + server.succeed(f"mc version enable minio/{bucket}") + if populate_bucket: + store_url = make_s3_url(bucket) + for pkg in populate_bucket: + server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {pkg}") + test_func(bucket) + finally: + server.succeed(f"mc rb --force minio/{bucket}") + # Clean up client store - only delete if path exists + for pkg in PKGS.values(): + client.succeed(f"[ ! -e {pkg} ] || nix store delete --ignore-liveness {pkg}") + return wrapper + return decorator + + # ============================================================================ + # Test Functions + # ============================================================================ + + @setup_s3() + def test_credential_caching(bucket): + """Verify credential providers are cached and reused""" + print("\n=== Testing Credential Caching ===") + + store_url = make_s3_url(bucket) + output = server.succeed( + f"{ENV_WITH_CREDS} nix copy --debug --to '{store_url}' " + f"{PKGS['A']} {PKGS['B']} {PKGS['C']} 2>&1" + ) + + assert_count( + output, + "creating new AWS credential provider", + 1, + "Credential provider caching failed" + ) + + print("✓ Credential provider created once and cached") + + @setup_s3(populate_bucket=[PKGS['A']]) + def test_fetchurl_basic(bucket): + """Test builtins.fetchurl works with s3:// URLs""" + print("\n=== Testing builtins.fetchurl ===") + + client.wait_for_unit("network-addresses-eth1.service") + + cache_info_url = make_s3_url(bucket, path="/nix-cache-info") + + client.succeed( + f"{ENV_WITH_CREDS} nix eval --impure --expr " + f"'builtins.fetchurl {{ name = \"foo\"; url = \"{cache_info_url}\"; }}'" + ) + + print("✓ builtins.fetchurl works with s3:// URLs") + + @setup_s3() + def test_error_message_formatting(bucket): + """Verify error messages display URLs correctly""" + print("\n=== Testing Error Message Formatting ===") + + nonexistent_url = make_s3_url(bucket, path="/foo-that-does-not-exist") + expected_http_url = f"{ENDPOINT}/{bucket}/foo-that-does-not-exist" + + error_msg = client.fail( + f"{ENV_WITH_CREDS} nix eval --impure --expr " + f"'builtins.fetchurl {{ name = \"foo\"; url = \"{nonexistent_url}\"; }}' 2>&1" + ) + + if f"unable to download '{expected_http_url}': HTTP error 404" not in error_msg: + print("Actual error message:") + print(error_msg) + raise Exception("Error message formatting failed - should show actual URL, not %s placeholder") + + print("✓ Error messages format URLs correctly") + + @setup_s3(populate_bucket=[PKGS['A']]) + def test_fork_credential_preresolution(bucket): + """Test credential pre-resolution in forked processes""" + print("\n=== Testing Fork Credential Pre-resolution ===") + + # Get hash of nix-cache-info for fixed-output derivation + cache_info_url = make_s3_url(bucket, path="/nix-cache-info") + + cache_info_path = client.succeed( + f"{ENV_WITH_CREDS} nix eval --impure --raw --expr " + f"'builtins.fetchurl {{ name = \"nix-cache-info\"; url = \"{cache_info_url}\"; }}'" + ).strip() + + cache_info_hash = client.succeed( + f"nix hash file --type sha256 --base32 {cache_info_path}" + ).strip() + + # Build derivation with unique test ID + test_id = random.randint(0, 10000) + test_url = make_s3_url(bucket, path="/nix-cache-info", test_id=test_id) + + fetchurl_expr = """ + import {{ + name = "s3-fork-test-{id}"; + url = "{url}"; + sha256 = "{hash}"; + }} + """.format(id=test_id, url=test_url, hash=cache_info_hash) + + output = client.succeed( + f"{ENV_WITH_CREDS} nix build --debug --impure --no-link --expr '{fetchurl_expr}' 2>&1" + ) + + # Verify fork behavior + if "builtin:fetchurl creating fresh FileTransfer instance" not in output: + print("Debug output:") + print(output) + raise Exception("Expected to find FileTransfer creation in forked process") + + print(" ✓ Forked process creates fresh FileTransfer") + + # Verify pre-resolution in parent + required_messages = [ + "Pre-resolving AWS credentials for S3 URL in builtin:fetchurl", + "Successfully pre-resolved AWS credentials in parent process", + ] + + for msg in required_messages: + if msg not in output: + print("Debug output:") + print(output) + raise Exception(f"Missing expected message: {msg}") + + print(" ✓ Parent pre-resolves credentials") + + # Verify child uses pre-resolved credentials + if "Using pre-resolved AWS credentials from parent process" not in output: + print("Debug output:") + print(output) + raise Exception("Child should use pre-resolved credentials") + + # Extract child PID and verify it doesn't create new providers + filetransfer_match = re.search( + r'\[pid=(\d+)\] builtin:fetchurl creating fresh FileTransfer instance', + output + ) + + if not filetransfer_match: + raise Exception("Could not extract child PID from debug output") + + child_pid = filetransfer_match.group(1) + child_provider_creation = f"[pid={child_pid}] creating new AWS credential provider" + + if child_provider_creation in output: + print("Debug output:") + print(output) + raise Exception(f"Child process (pid={child_pid}) should NOT create new credential providers") + + print(" ✓ Child uses pre-resolved credentials (no new providers)") + + @setup_s3(populate_bucket=[PKGS['A'], PKGS['B'], PKGS['C']]) + def test_store_operations(bucket): + """Test nix store info and copy operations""" + print("\n=== Testing Store Operations ===") + + store_url = make_s3_url(bucket) + + # Verify store info works + client.succeed(f"{ENV_WITH_CREDS} nix store info --store '{store_url}' >&2") + + # Get and validate store info JSON + info_json = client.succeed(f"{ENV_WITH_CREDS} nix store info --json --store '{store_url}'") + store_info = json.loads(info_json) + + if not store_info.get("url"): + raise Exception("Store should have a URL") + + print(f" ✓ Store URL: {store_info['url']}") + + # Test copy from store + verify_packages_in_store(client, PKGS['A'], should_exist=False) + + output = client.succeed( + f"{ENV_WITH_CREDS} nix copy --debug --no-check-sigs " + f"--from '{store_url}' {PKGS['A']} {PKGS['B']} {PKGS['C']} 2>&1" + ) + + assert_count( + output, + "creating new AWS credential provider", + 1, + "Client credential provider caching failed" + ) + + verify_packages_in_store(client, [PKGS['A'], PKGS['B'], PKGS['C']]) + + print(" ✓ nix copy works") + print(" ✓ Credentials cached on client") + + @setup_s3(populate_bucket=[PKGS['A'], PKGS['B']], public=True) + def test_public_bucket_operations(bucket): + """Test store operations on public bucket without credentials""" + print("\n=== Testing Public Bucket Operations ===") + + store_url = make_s3_url(bucket) + + # Verify store info works without credentials + client.succeed(f"nix store info --store '{store_url}' >&2") + print(" ✓ nix store info works without credentials") + + # Get and validate store info JSON + info_json = client.succeed(f"nix store info --json --store '{store_url}'") + store_info = json.loads(info_json) + + if not store_info.get("url"): + raise Exception("Store should have a URL") + + print(f" ✓ Store URL: {store_info['url']}") + + # Verify packages are not yet in client store + verify_packages_in_store(client, [PKGS['A'], PKGS['B']], should_exist=False) + + # Test copy from public bucket without credentials + client.succeed( + f"nix copy --debug --no-check-sigs " + f"--from '{store_url}' {PKGS['A']} {PKGS['B']} 2>&1" + ) + + # Verify packages were copied successfully + verify_packages_in_store(client, [PKGS['A'], PKGS['B']]) + + print(" ✓ nix copy from public bucket works without credentials") + + @setup_s3(populate_bucket=[PKGS['A']]) + def test_url_format_variations(bucket): + """Test different S3 URL parameter combinations""" + print("\n=== Testing URL Format Variations ===") + + # Test parameter order variation (region before endpoint) + url1 = f"s3://{bucket}?region={REGION}&endpoint={ENDPOINT}" + client.succeed(f"{ENV_WITH_CREDS} nix store info --store '{url1}' >&2") + print(" ✓ Parameter order: region before endpoint works") + + # Test parameter order variation (endpoint before region) + url2 = f"s3://{bucket}?endpoint={ENDPOINT}®ion={REGION}" + client.succeed(f"{ENV_WITH_CREDS} nix store info --store '{url2}' >&2") + print(" ✓ Parameter order: endpoint before region works") + + @setup_s3(populate_bucket=[PKGS['A']]) + def test_concurrent_fetches(bucket): + """Validate thread safety with concurrent S3 operations""" + print("\n=== Testing Concurrent Fetches ===") + + # Get hash for test derivations + cache_info_url = make_s3_url(bucket, path="/nix-cache-info") + + cache_info_path = client.succeed( + f"{ENV_WITH_CREDS} nix eval --impure --raw --expr " + f"'builtins.fetchurl {{ name = \"nix-cache-info\"; url = \"{cache_info_url}\"; }}'" + ).strip() + + cache_info_hash = client.succeed( + f"nix hash file --type sha256 --base32 {cache_info_path}" + ).strip() - server.succeed("mc config host add minio http://localhost:9000 ${accessKey} ${secretKey} --api s3v4") - server.succeed("mc mb minio/my-cache") + # Create 5 concurrent fetch derivations + # Build base URL for concurrent test (we'll add fetch_id in Nix interpolation) + base_url = make_s3_url(bucket, path="/nix-cache-info") + concurrent_expr = """ + let + mkFetch = i: import {{ + name = "concurrent-s3-fetch-''${{toString i}}"; + url = "{url}&fetch_id=''${{toString i}}"; + sha256 = "{hash}"; + }}; + fetches = builtins.listToAttrs (map (i: {{ + name = "fetch''${{toString i}}"; + value = mkFetch i; + }}) (builtins.genList (i: i) 5)); + in fetches + """.format(url=base_url, hash=cache_info_hash) - server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}") + try: + output = client.succeed( + f"{ENV_WITH_CREDS} nix build --debug --impure --no-link " + f"--expr '{concurrent_expr}' --max-jobs 5 2>&1" + ) + except: + output = client.fail( + f"{ENV_WITH_CREDS} nix build --debug --impure --no-link " + f"--expr '{concurrent_expr}' --max-jobs 5 2>&1" + ) - client.wait_for_unit("network-addresses-eth1.service") + if "error:" in output.lower(): + print("Found error during concurrent fetches:") + print(output) - # Test fetchurl on s3:// URLs while we're at it. - client.succeed("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"s3://my-cache/nix-cache-info?endpoint=http://server:9000®ion=eu-west-1\"; }'") + providers_created = output.count("creating new AWS credential provider") + transfers_created = output.count("builtin:fetchurl creating fresh FileTransfer instance") - # Test that the format string in the error message is properly setup and won't display `%s` instead of the failed URI - msg = client.fail("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"${objectThatDoesNotExist}\"; }' 2>&1") - if "S3 object '${objectThatDoesNotExist}' does not exist" not in msg: - print(msg) # So that you can see the message that was improperly formatted - raise Exception("Error message formatting didn't work") + print(f" ✓ {providers_created} credential providers created") + print(f" ✓ {transfers_created} FileTransfer instances created") - # Copy a package from the binary cache. - client.fail("nix path-info ${pkgA}") + if transfers_created != 5: + print("Debug output:") + print(output) + raise Exception( + f"Expected 5 FileTransfer instances for 5 concurrent fetches, got {transfers_created}" + ) - client.succeed("${env} nix store info --store '${storeUrl}' >&2") + if providers_created != 1: + print("Debug output:") + print(output) + raise Exception( + f"Expected 1 credential provider for concurrent fetches, got {providers_created}" + ) + + @setup_s3() + def test_compression_narinfo_gzip(bucket): + """Test narinfo compression with gzip""" + print("\n=== Testing Compression: narinfo (gzip) ===") + + store_url = make_s3_url(bucket, **{'narinfo-compression': 'gzip'}) + server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['B']}") + + pkg_hash = get_package_hash(PKGS['B']) + verify_content_encoding(server, bucket, f"{pkg_hash}.narinfo", "gzip") + + print(" ✓ .narinfo has Content-Encoding: gzip") + + # Verify client can download and decompress + client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKGS['B']}") + verify_packages_in_store(client, PKGS['B']) + + print(" ✓ Client decompressed .narinfo successfully") + + @setup_s3() + def test_compression_mixed(bucket): + """Test mixed compression (narinfo=xz, ls=gzip)""" + print("\n=== Testing Compression: mixed (narinfo=xz, ls=gzip) ===") + + store_url = make_s3_url( + bucket, + **{'narinfo-compression': 'xz', 'write-nar-listing': 'true', 'ls-compression': 'gzip'} + ) + + server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['C']}") + + pkg_hash = get_package_hash(PKGS['C']) + + # Verify .narinfo has xz compression + verify_content_encoding(server, bucket, f"{pkg_hash}.narinfo", "xz") + print(" ✓ .narinfo has Content-Encoding: xz") + + # Verify .ls has gzip compression + verify_content_encoding(server, bucket, f"{pkg_hash}.ls", "gzip") + print(" ✓ .ls has Content-Encoding: gzip") + + # Verify client can download with mixed compression + client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKGS['C']}") + verify_packages_in_store(client, PKGS['C']) + + print(" ✓ Client downloaded package with mixed compression") + + @setup_s3() + def test_compression_disabled(bucket): + """Verify no compression by default""" + print("\n=== Testing Compression: disabled (default) ===") + + store_url = make_s3_url(bucket) + server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['A']}") + + pkg_hash = get_package_hash(PKGS['A']) + verify_no_compression(server, bucket, f"{pkg_hash}.narinfo") + + print(" ✓ No compression applied by default") + + @setup_s3() + def test_nix_prefetch_url(bucket): + """Test that nix-prefetch-url retrieves actual file content from S3, not empty files (issue #8862)""" + print("\n=== Testing nix-prefetch-url S3 Content Retrieval (issue #8862) ===") + + # Create a test file with known content + test_content = "This is test content to verify S3 downloads work correctly!\n" + test_file_size = len(test_content) + + server.succeed(f"echo -n '{test_content}' > /tmp/test-file.txt") + + # Upload to S3 + server.succeed(f"mc cp /tmp/test-file.txt minio/{bucket}/test-file.txt") + + # Calculate expected hash + expected_hash = server.succeed( + "nix hash file --type sha256 --base32 /tmp/test-file.txt" + ).strip() + + print(f" ✓ Uploaded test file to S3 ({test_file_size} bytes)") + + # Use nix-prefetch-url to download from S3 + s3_url = make_s3_url(bucket, path="/test-file.txt") + + prefetch_output = client.succeed( + f"{ENV_WITH_CREDS} nix-prefetch-url --print-path '{s3_url}'" + ) + + # Extract hash and store path + # With --print-path, output is: \n + lines = prefetch_output.strip().split('\n') + prefetch_hash = lines[0] # First line is the hash + store_path = lines[1] # Second line is the store path + + # Verify hash matches + if prefetch_hash != expected_hash: + raise Exception( + f"Hash mismatch: expected {expected_hash}, got {prefetch_hash}" + ) + + print(" ✓ nix-prefetch-url completed with correct hash") + + # Verify the downloaded file is NOT empty (the bug in #8862) + file_size = int(client.succeed(f"stat -c %s {store_path}").strip()) + + if file_size == 0: + raise Exception("Downloaded file is EMPTY - issue #8862 regression detected!") + + if file_size != test_file_size: + raise Exception( + f"File size mismatch: expected {test_file_size}, got {file_size}" + ) + + print(f" ✓ File has correct size ({file_size} bytes, not empty)") + + # Verify actual content matches by comparing hashes instead of printing entire file + downloaded_hash = client.succeed(f"nix hash file --type sha256 --base32 {store_path}").strip() + + if downloaded_hash != expected_hash: + raise Exception(f"Content hash mismatch: expected {expected_hash}, got {downloaded_hash}") + + print(" ✓ File content verified correct (hash matches)") + + @setup_s3(populate_bucket=[PKGS['A']], versioned=True) + def test_versioned_urls(bucket): + """Test that versionId parameter is accepted in S3 URLs""" + print("\n=== Testing Versioned URLs ===") + + # Get the nix-cache-info file + cache_info_url = make_s3_url(bucket, path="/nix-cache-info") + + # Fetch without versionId should work + client.succeed( + f"{ENV_WITH_CREDS} nix eval --impure --expr " + f"'builtins.fetchurl {{ name = \"cache-info\"; url = \"{cache_info_url}\"; }}'" + ) + print(" ✓ Fetch without versionId works") + + # List versions to get a version ID + # MinIO output format: [timestamp] size tier versionId versionNumber method filename + versions_output = server.succeed(f"mc ls --versions minio/{bucket}/nix-cache-info") + + # Extract version ID from output (4th field after STANDARD) + import re + version_match = re.search(r'STANDARD\s+(\S+)\s+v\d+', versions_output) + if not version_match: + print(f"Debug: versions output: {versions_output}") + raise Exception("Could not extract version ID from MinIO output") + + version_id = version_match.group(1) + print(f" ✓ Found version ID: {version_id}") + + # Version ID should not be "null" since versioning was enabled before upload + if version_id == "null": + raise Exception("Version ID is 'null' - versioning may not be working correctly") + + # Fetch with versionId parameter + versioned_url = f"{cache_info_url}&versionId={version_id}" + client.succeed( + f"{ENV_WITH_CREDS} nix eval --impure --expr " + f"'builtins.fetchurl {{ name = \"cache-info-versioned\"; url = \"{versioned_url}\"; }}'" + ) + print(" ✓ Fetch with versionId parameter works") + + @setup_s3() + def test_multipart_upload_basic(bucket): + """Test basic multipart upload with a large file""" + print("\n--- Test: Multipart Upload Basic ---") + + large_file_size = 10 * 1024 * 1024 + large_pkg = server.succeed( + "nix-store --add $(dd if=/dev/urandom of=/tmp/large-file bs=1M count=10 2>/dev/null && echo /tmp/large-file)" + ).strip() + + chunk_size = 5 * 1024 * 1024 + expected_parts = 3 # 10 MB raw becomes ~10.5 MB compressed (NAR + xz overhead) + + store_url = make_s3_url( + bucket, + **{ + "multipart-upload": "true", + "multipart-threshold": str(5 * 1024 * 1024), + "multipart-chunk-size": str(chunk_size), + } + ) + + print(f" Uploading {large_file_size} byte file (expect {expected_parts} parts)") + output = server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {large_pkg} --debug 2>&1") + + if "using S3 multipart upload" not in output: + raise Exception("Expected multipart upload to be used") + + expected_msg = f"{expected_parts} parts uploaded" + if expected_msg not in output: + print("Debug output:") + print(output) + raise Exception(f"Expected '{expected_msg}' in output") + + print(f" ✓ Multipart upload used with {expected_parts} parts") + + client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' {large_pkg} --no-check-sigs") + verify_packages_in_store(client, large_pkg, should_exist=True) + + print(" ✓ Large file downloaded and verified") + + @setup_s3() + def test_multipart_threshold(bucket): + """Test that files below threshold use regular upload""" + print("\n--- Test: Multipart Threshold Behavior ---") + + store_url = make_s3_url( + bucket, + **{ + "multipart-upload": "true", + "multipart-threshold": str(1024 * 1024 * 1024), + } + ) + + print(" Uploading small file with high threshold") + output = server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['A']} --debug 2>&1") + + if "using S3 multipart upload" in output: + raise Exception("Should not use multipart for file below threshold") + + if "using S3 regular upload" not in output: + raise Exception("Expected regular upload to be used") + + print(" ✓ Regular upload used for file below threshold") + + client.succeed(f"{ENV_WITH_CREDS} nix copy --no-check-sigs --from '{store_url}' {PKGS['A']}") + verify_packages_in_store(client, PKGS['A'], should_exist=True) + + print(" ✓ Small file uploaded and verified") + + @setup_s3() + def test_multipart_with_log_compression(bucket): + """Test multipart upload with compressed build logs""" + print("\n--- Test: Multipart Upload with Log Compression ---") + + # Create a derivation that produces a large text log (12 MB of base64 output) + drv_path = server.succeed( + """ + nix-instantiate --expr ' + let pkgs = import {}; + in derivation { + name = "large-log-builder"; + builder = "/bin/sh"; + args = ["-c" "$coreutils/bin/dd if=/dev/urandom bs=1M count=12 | $coreutils/bin/base64; echo success > $out"]; + coreutils = pkgs.coreutils; + system = builtins.currentSystem; + } + ' + """ + ).strip() + + print(" Building derivation to generate large log") + server.succeed(f"nix-store --realize {drv_path} &>/dev/null") + + # Upload logs with compression and multipart + store_url = make_s3_url( + bucket, + **{ + "multipart-upload": "true", + "multipart-threshold": str(5 * 1024 * 1024), + "multipart-chunk-size": str(5 * 1024 * 1024), + "log-compression": "xz", + } + ) + + print(" Uploading build log with compression and multipart") + output = server.succeed( + f"{ENV_WITH_CREDS} nix store copy-log --to '{store_url}' {drv_path} --debug 2>&1" + ) + + # Should use multipart for the compressed log + if "using S3 multipart upload" not in output or "log/" not in output: + print("Debug output:") + print(output) + raise Exception("Expected multipart upload to be used for compressed log") + + if "parts uploaded" not in output: + print("Debug output:") + print(output) + raise Exception("Expected multipart completion message") + + print(" ✓ Compressed log uploaded with multipart") + + # ============================================================================ + # Main Test Execution + # ============================================================================ + + print("\n" + "="*80) + print("S3 Binary Cache Store Tests") + print("="*80) + + start_all() + + # Initialize MinIO server + server.wait_for_unit("minio") + server.wait_for_unit("network-addresses-eth1.service") + server.wait_for_open_port(9000) + server.succeed(f"mc config host add minio http://localhost:9000 {ACCESS_KEY} {SECRET_KEY} --api s3v4") - client.succeed("${env} nix copy --no-check-sigs --from '${storeUrl}' ${pkgA}") + # Run tests (each gets isolated bucket via decorator) + test_credential_caching() + test_fetchurl_basic() + test_error_message_formatting() + test_fork_credential_preresolution() + test_store_operations() + test_public_bucket_operations() + test_url_format_variations() + test_concurrent_fetches() + test_compression_narinfo_gzip() + test_compression_mixed() + test_compression_disabled() + test_nix_prefetch_url() + test_versioned_urls() + test_multipart_upload_basic() + test_multipart_threshold() + test_multipart_with_log_compression() - client.succeed("nix path-info ${pkgA}") + print("\n" + "="*80) + print("✓ All S3 Binary Cache Store Tests Passed!") + print("="*80) ''; }