From cc78a23ce8b1a196a49f059cafdb5c932c8cc190 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Thu, 20 Mar 2025 17:14:02 +0100 Subject: [PATCH 01/44] Focus CI on building a single wheel for a start --- .github/workflows/circleci.yml | 25 -- .github/workflows/codeql.yml | 75 ---- .github/workflows/compiler_sanitizers.yml | 129 ------- .github/workflows/cygwin.yml | 81 ----- .github/workflows/dependency-review.yml | 24 -- .github/workflows/emscripten.yml | 85 ----- .github/workflows/labeler.yml | 19 - .github/workflows/linux.yml | 350 ------------------ .github/workflows/linux_blas.yml | 410 ---------------------- .github/workflows/linux_musl.yml | 69 ---- .github/workflows/linux_qemu.yml | 280 --------------- .github/workflows/linux_simd.yml | 289 --------------- .github/workflows/macos.yml | 164 --------- .github/workflows/mypy.yml | 74 ---- .github/workflows/mypy_primer.yml | 99 ------ .github/workflows/mypy_primer_comment.yml | 103 ------ .github/workflows/scorecards.yml | 55 --- .github/workflows/wheels.yml | 124 +------ .github/workflows/windows.yml | 133 ------- .github/workflows/windows_arm64.yml | 208 ----------- 20 files changed, 11 insertions(+), 2785 deletions(-) delete mode 100644 .github/workflows/circleci.yml delete mode 100644 .github/workflows/codeql.yml delete mode 100644 .github/workflows/compiler_sanitizers.yml delete mode 100644 .github/workflows/cygwin.yml delete mode 100644 .github/workflows/dependency-review.yml delete mode 100644 .github/workflows/emscripten.yml delete mode 100644 .github/workflows/labeler.yml delete mode 100644 .github/workflows/linux.yml delete mode 100644 .github/workflows/linux_blas.yml delete mode 100644 .github/workflows/linux_musl.yml delete mode 100644 .github/workflows/linux_qemu.yml delete mode 100644 .github/workflows/linux_simd.yml delete mode 100644 .github/workflows/macos.yml delete mode 100644 .github/workflows/mypy.yml delete mode 100644 .github/workflows/mypy_primer.yml delete mode 100644 .github/workflows/mypy_primer_comment.yml delete mode 100644 .github/workflows/scorecards.yml delete mode 100644 .github/workflows/windows.yml delete mode 100644 .github/workflows/windows_arm64.yml diff --git a/.github/workflows/circleci.yml b/.github/workflows/circleci.yml deleted file mode 100644 index c0c8876b6bbe..000000000000 --- a/.github/workflows/circleci.yml +++ /dev/null @@ -1,25 +0,0 @@ -# To enable this workflow on a fork, comment out: -# -# if: github.repository == 'numpy/numpy' - -name: CircleCI artifact redirector - -on: [status] - -permissions: read-all - -jobs: - circleci_artifacts_redirector_job: - runs-on: ubuntu-latest - if: "github.repository == 'numpy/numpy' && !contains(github.event.head_commit.message, '[circle skip]') && !contains(github.event.head_commit.message, '[skip circle]') && github.event.context == 'ci/circleci: build'" - name: Run CircleCI artifacts redirector - permissions: - statuses: write - steps: - - name: GitHub Action step - uses: larsoner/circleci-artifacts-redirector-action@4e13a10d89177f4bfc8007a7064bdbeda848d8d1 # master - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - api-token: ${{ secrets.CIRCLE_TOKEN }} - artifact-path: 0/doc/build/html/index.html - circleci-jobs: build diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml deleted file mode 100644 index a6665adafed3..000000000000 --- a/.github/workflows/codeql.yml +++ /dev/null @@ -1,75 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: ["main"] - pull_request: - # The branches below must be a subset of the branches above - branches: ["main"] - schedule: - - cron: "0 0 * * 1" - -permissions: - contents: read - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: ["python"] - # CodeQL supports [ $supported-codeql-languages ] - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - - steps: - - name: Checkout repository - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - persist-credentials: false - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 - - # â„šī¸ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - - # - run: | - # echo "Run, Build Application using script" - # ./location_of_script_within_repo/buildscript.sh - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 - with: - category: "/language:${{matrix.language}}" diff --git a/.github/workflows/compiler_sanitizers.yml b/.github/workflows/compiler_sanitizers.yml deleted file mode 100644 index 9452289239bc..000000000000 --- a/.github/workflows/compiler_sanitizers.yml +++ /dev/null @@ -1,129 +0,0 @@ -name: Test with compiler sanitizers - -on: - push: - branches: - - main - pull_request: - branches: - - main - - maintenance/** - -defaults: - run: - shell: bash - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - clang_ASAN: - # To enable this workflow on a fork, comment out: - if: github.repository == 'numpy/numpy' - runs-on: macos-latest - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - name: Set up pyenv - run: | - git clone https://github.com/pyenv/pyenv.git "$HOME/.pyenv" - PYENV_ROOT="$HOME/.pyenv" - PYENV_BIN="$PYENV_ROOT/bin" - PYENV_SHIMS="$PYENV_ROOT/shims" - echo "$PYENV_BIN" >> $GITHUB_PATH - echo "$PYENV_SHIMS" >> $GITHUB_PATH - echo "PYENV_ROOT=$PYENV_ROOT" >> $GITHUB_ENV - - name: Check pyenv is working - run: - pyenv --version - - name: Set up LLVM - run: | - brew install llvm@19 - LLVM_PREFIX=$(brew --prefix llvm@19) - echo CC="$LLVM_PREFIX/bin/clang" >> $GITHUB_ENV - echo CXX="$LLVM_PREFIX/bin/clang++" >> $GITHUB_ENV - echo LDFLAGS="-L$LLVM_PREFIX/lib" >> $GITHUB_ENV - echo CPPFLAGS="-I$LLVM_PREFIX/include" >> $GITHUB_ENV - - name: Build Python with address sanitizer - run: | - CONFIGURE_OPTS="--with-address-sanitizer" pyenv install 3.13t - pyenv global 3.13t - - name: Install dependencies - run: | - # TODO: remove when a released cython supports free-threaded python - pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython - pip install -r requirements/build_requirements.txt - pip install -r requirements/ci_requirements.txt - pip install -r requirements/test_requirements.txt - # xdist captures stdout/stderr, but we want the ASAN output - pip uninstall -y pytest-xdist - - name: Build - run: - python -m spin build -j2 -- -Db_sanitize=address - - name: Test - run: | - # pass -s to pytest to see ASAN errors and warnings, otherwise pytest captures them - ASAN_OPTIONS=detect_leaks=0:symbolize=1:strict_init_order=true:allocator_may_return_null=1 \ - python -m spin test -- -v -s --timeout=600 --durations=10 - - clang_TSAN: - # To enable this workflow on a fork, comment out: - if: github.repository == 'numpy/numpy' - runs-on: macos-latest - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - name: Set up pyenv - run: | - git clone https://github.com/pyenv/pyenv.git "$HOME/.pyenv" - PYENV_ROOT="$HOME/.pyenv" - PYENV_BIN="$PYENV_ROOT/bin" - PYENV_SHIMS="$PYENV_ROOT/shims" - echo "$PYENV_BIN" >> $GITHUB_PATH - echo "$PYENV_SHIMS" >> $GITHUB_PATH - echo "PYENV_ROOT=$PYENV_ROOT" >> $GITHUB_ENV - - name: Check pyenv is working - run: - pyenv --version - - name: Set up LLVM - run: | - brew install llvm@19 - LLVM_PREFIX=$(brew --prefix llvm@19) - echo CC="$LLVM_PREFIX/bin/clang" >> $GITHUB_ENV - echo CXX="$LLVM_PREFIX/bin/clang++" >> $GITHUB_ENV - echo LDFLAGS="-L$LLVM_PREFIX/lib" >> $GITHUB_ENV - echo CPPFLAGS="-I$LLVM_PREFIX/include" >> $GITHUB_ENV - - name: Build Python with thread sanitizer support - run: | - # free-threaded Python is much more likely to trigger races - CONFIGURE_OPTS="--with-thread-sanitizer" pyenv install 3.13t - pyenv global 3.13t - - name: Install dependencies - run: | - # TODO: remove when a released cython supports free-threaded python - pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython - pip install -r requirements/build_requirements.txt - pip install -r requirements/ci_requirements.txt - pip install -r requirements/test_requirements.txt - # xdist captures stdout/stderr, but we want the TSAN output - pip uninstall -y pytest-xdist - - name: Build - run: - python -m spin build -j2 -- -Db_sanitize=thread - - name: Test - run: | - # These tests are slow, so only run tests in files that do "import threading" to make them count - TSAN_OPTIONS="allocator_may_return_null=1:suppressions=$GITHUB_WORKSPACE/tools/ci/tsan_suppressions.txt" \ - python -m spin test \ - `find numpy -name "test*.py" | xargs grep -l "import threading" | tr '\n' ' '` \ - -- -v -s --timeout=600 --durations=10 diff --git a/.github/workflows/cygwin.yml b/.github/workflows/cygwin.yml deleted file mode 100644 index 174d04efb567..000000000000 --- a/.github/workflows/cygwin.yml +++ /dev/null @@ -1,81 +0,0 @@ -name: Test on Cygwin -on: - pull_request: - branches: - - main - - maintenance/** - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - cygwin_build_test: - runs-on: windows-latest - # To enable this workflow on a fork, comment out: - if: github.repository == 'numpy/numpy' - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - name: Install Cygwin - uses: egor-tensin/setup-cygwin@d2c752bab416d4b0662591bd366fc2686297c82d # v4 - with: - platform: x86_64 - install-dir: 'C:\tools\cygwin' - packages: >- - python39=3.9.16-1 python39-devel=3.9.16-1 python39-pip python-pip-wheel - python-setuptools-wheel liblapack-devel liblapack0 gcc-fortran - gcc-g++ git dash cmake ninja - - name: Set Windows PATH - uses: egor-tensin/cleanup-path@f04bc953e6823bf491cc0bdcff959c630db1b458 # v4.0.1 - with: - dirs: 'C:\tools\cygwin\bin;C:\tools\cygwin\lib\lapack' - - name: Verify that bash is Cygwin bash - run: | - command bash - bash -c "uname -svrmo" - - name: Tell Cygwin's git about this repository. - run: | - dash -c "which git; /usr/bin/git config --system --add safe.directory /cygdrive/d/a/numpy/numpy" - - name: Verify python version - # Make sure it's the Cygwin one, not a Windows one - run: | - dash -c "which python3.9; /usr/bin/python3.9 --version -V" - - name: Build NumPy wheel - run: | - dash -c "/usr/bin/python3.9 -m pip install build pytest hypothesis pytest-xdist Cython meson" - dash -c "/usr/bin/python3.9 -m build . --wheel -Csetup-args=-Dblas=blas -Csetup-args=-Dlapack=lapack -Csetup-args=-Dcpu-dispatch=none -Csetup-args=-Dcpu-baseline=native" - - name: Install NumPy from wheel - run: | - bash -c "/usr/bin/python3.9 -m pip install dist/numpy-*cp39*.whl" - - name: Rebase NumPy compiled extensions - run: | - dash "tools/rebase_installed_dlls_cygwin.sh" 3.9 - - name: Run NumPy test suite - shell: "C:\\tools\\cygwin\\bin\\bash.exe -o igncr -eo pipefail {0}" - run: | - cd tools - /usr/bin/python3.9 -m pytest --pyargs numpy -n2 -m "not slow" - - name: Upload wheel if tests fail - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - if: failure() - with: - name: numpy-cygwin-wheel - path: dist/numpy-*cp39*.whl - - name: Check the extension modules on failure - if: failure() - run: | - dash -c "/usr/bin/python3.9 -m pip show numpy" - dash -c "/usr/bin/python3.9 -m pip show -f numpy | grep .dll" - dash -c "/bin/tr -d '\r' list_dlls_unix.sh" - dash "list_dlls_unix.sh" 3.9 - - name: Print installed package versions on failure - if: failure() - run: | - cygcheck -c diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml deleted file mode 100644 index 58bf4a40055b..000000000000 --- a/.github/workflows/dependency-review.yml +++ /dev/null @@ -1,24 +0,0 @@ -# Dependency Review Action -# -# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging. -# -# Source repository: https://github.com/actions/dependency-review-action -# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement -name: 'Dependency Review' -on: [pull_request] - -permissions: - contents: read - -jobs: - dependency-review: - runs-on: ubuntu-latest - steps: - - name: 'Checkout Repository' - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - persist-credentials: false - - name: 'Dependency Review' - uses: actions/dependency-review-action@3b139cfc5fae8b618d3eae3675e383bb1769c019 # v4.5.0 - with: - allow-ghsas: GHSA-cx63-2mw6-8hw5 diff --git a/.github/workflows/emscripten.yml b/.github/workflows/emscripten.yml deleted file mode 100644 index fe8d5376bd96..000000000000 --- a/.github/workflows/emscripten.yml +++ /dev/null @@ -1,85 +0,0 @@ -name: Test Emscripten/Pyodide build - -on: - pull_request: - branches: - - main - - maintenance/** - # Note: this workflow gets triggered on the same schedule as the - # wheels.yml workflow to upload WASM wheels to Anaconda.org. - schedule: - # ┌───────────── minute (0 - 59) - # │ ┌───────────── hour (0 - 23) - # │ │ ┌───────────── day of the month (1 - 31) - # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC) - # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT) - # │ │ │ │ │ - - cron: "42 2 * * SUN,WED" - workflow_dispatch: - inputs: - push_wheels: - # Can be 'true' or 'false'. Default is 'false'. - # Warning: this will overwrite existing wheels. - description: > - Push wheels to Anaconda.org if the build succeeds - required: false - default: 'false' - -env: - FORCE_COLOR: 3 - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - - -jobs: - build-wasm-emscripten: - permissions: - contents: read # to fetch code (actions/checkout) - name: Build NumPy distribution for Pyodide - runs-on: ubuntu-22.04 - # To enable this workflow on a fork, comment out: - if: github.repository == 'numpy/numpy' - steps: - - name: Checkout NumPy - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - uses: pypa/cibuildwheel@42728e866bbc80d544a70825bd9990b9a26f1a50 # 2.23.1 - env: - CIBW_PLATFORM: pyodide - - - name: Upload wheel artifact(s) - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: cp312-pyodide_wasm32 - path: ./wheelhouse/*.whl - if-no-files-found: error - - # Push to https://anaconda.org/scientific-python-nightly-wheels/numpy - # WARNING: this job will overwrite any existing WASM wheels. - upload-wheels: - name: Upload NumPy WASM wheels to Anaconda.org - runs-on: ubuntu-22.04 - permissions: {} - needs: [build-wasm-emscripten] - if: >- - (github.repository == 'numpy/numpy') && - (github.event_name == 'workflow_dispatch' && github.event.inputs.push_wheels == 'true') || - (github.event_name == 'schedule') - steps: - - name: Download wheel artifact(s) - uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 - with: - path: wheelhouse/ - merge-multiple: true - - - name: Push to Anaconda PyPI index - uses: scientific-python/upload-nightly-action@82396a2ed4269ba06c6b2988bb4fd568ef3c3d6b # v0.6.1 - with: - artifacts_path: wheelhouse/ - anaconda_nightly_upload_token: ${{ secrets.NUMPY_NIGHTLY_UPLOAD_TOKEN }} diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml deleted file mode 100644 index 7d2edc869893..000000000000 --- a/.github/workflows/labeler.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: "Pull Request Labeler" -on: - pull_request_target: - types: [opened] - -permissions: {} - -jobs: - pr-labeler: - runs-on: ubuntu-latest - permissions: - pull-requests: write # to add labels - steps: - - name: Label the PR - uses: gerrymanoim/pr-prefix-labeler@c8062327f6de59a9ae1c19f7f07cacd0b976b6fa # v3 - continue-on-error: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - if: github.repository == 'numpy/numpy' diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml deleted file mode 100644 index b5bd098b7edd..000000000000 --- a/.github/workflows/linux.yml +++ /dev/null @@ -1,350 +0,0 @@ -name: Linux tests - -# This file is meant for testing across supported Python versions, build types -# and interpreters (PyPy, python-dbg, a pre-release Python in summer time), -# build-via-sdist, run benchmarks, measure code coverage, and other build -# options. - -on: - push: - branches: - # coverage comparison in the "full" step needs to run on main after merges - - main - pull_request: - branches: - - main - - maintenance/** - -defaults: - run: - shell: bash - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - lint: - # To enable this job and subsequent jobs on a fork, comment out: - if: github.repository == 'numpy/numpy' && github.event_name != 'push' - runs-on: ubuntu-latest - continue-on-error: true - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-depth: 0 - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - name: Install linter requirements - run: - python -m pip install -r requirements/linter_requirements.txt - - name: Run linter on PR - env: - BASE_REF: ${{ github.base_ref }} - run: - python tools/linter.py - - smoke_test: - # To enable this job on a fork, comment out: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - env: - MESON_ARGS: "-Dallow-noblas=true -Dcpu-baseline=none -Dcpu-dispatch=none" - strategy: - matrix: - version: ["3.11", "3.12", "3.13", "3.13t"] - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: astral-sh/setup-uv@f94ec6bedd8674c4426838e6b50417d36b6ab231 - with: - python-version: ${{ matrix.version }} - enable-cache: false - - run: - uv pip install --python=${{ matrix.version }} pip - # TODO: remove cython nightly install when cython does a release - - name: Install nightly Cython - if: matrix.version == '3.13t' - run: | - pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython - - uses: ./.github/meson_actions - - pypy: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: 'pypy3.11-v7.3.19' - - name: Setup using scipy-openblas - run: | - python -m pip install -r requirements/ci_requirements.txt - spin config-openblas --with-scipy-openblas=32 - - uses: ./.github/meson_actions - - debug: - needs: [smoke_test] - runs-on: ubuntu-24.04 - if: github.event_name != 'push' - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - name: Install debug Python - run: | - sudo apt-get update - sudo apt-get install python3-dbg ninja-build - - name: Build NumPy and install into venv - run: | - python3-dbg -m venv venv - source venv/bin/activate - pip install -U pip - pip install . -v -Csetup-args=-Dbuildtype=debug -Csetup-args=-Dallow-noblas=true - - name: Install test dependencies - run: | - source venv/bin/activate - pip install -r requirements/test_requirements.txt - - name: Run test suite - run: | - source venv/bin/activate - cd tools - pytest --timeout=600 --durations=10 --pyargs numpy -m "not slow" - - full: - # Install as editable, then run the full test suite with code coverage - needs: [smoke_test] - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - name: Install build and test dependencies from PyPI - run: | - pip install -r requirements/build_requirements.txt - pip install -r requirements/test_requirements.txt - - name: Install gfortran and setup OpenBLAS (MacPython build) - run: | - set -xe - sudo apt update - sudo apt install gfortran libgfortran5 - python -m pip install -r requirements/ci32_requirements.txt - mkdir -p ./.openblas - python -c"import scipy_openblas32 as ob32; print(ob32.get_pkg_config())" > ./.openblas/scipy-openblas.pc - - - name: Install as editable - env: - PKG_CONFIG_PATH: ${{ github.workspace }}/.openblas - run: | - pip install -e . --no-build-isolation - - name: Run full test suite - run: | - pytest numpy --durations=10 --timeout=600 --cov-report=html:build/coverage - # TODO: gcov - env: - PYTHONOPTIMIZE: 2 - - - aarch64_test: - needs: [smoke_test] - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-22.04-arm - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - - name: Install Python dependencies - run: | - python -m pip install -r requirements/build_requirements.txt - python -m pip install -r requirements/test_requirements.txt - python -m pip install -r requirements/ci32_requirements.txt - mkdir -p ./.openblas - python -c"import scipy_openblas32 as ob32; print(ob32.get_pkg_config())" > ./.openblas/scipy-openblas.pc - - - name: Build - env: - PKG_CONFIG_PATH: ${{ github.workspace }}/.openblas - run: | - spin build - - - name: Test - run: | - spin test -j2 -m full -- --timeout=600 --durations=10 - - benchmark: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - name: Install build and benchmarking dependencies - run: | - sudo apt-get update - sudo apt-get install libopenblas-dev ninja-build - pip install asv virtualenv packaging -r requirements/build_requirements.txt - - name: Install NumPy - run: | - spin build -- -Dcpu-dispatch=none - # Ensure to keep the below steps as single-line bash commands (it's a - # workaround for asv#1333, and it may have side-effects on multi-line commands) - - name: Appease asv's need for machine info - shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' - run: | - asv machine --yes --config benchmarks/asv.conf.json - - name: Run benchmarks - shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' - run: | - spin bench --quick - # These are run on CircleCI - # - name: Check docstests - # shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' - # run: | - # pip install scipy-doctest==1.6.0 hypothesis==6.104.1 matplotlib scipy pytz pandas - # spin check-docs -v - # spin check-tutorials -v - - sdist: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - name: Install gfortran and setup OpenBLAS (sdist build) - run: | - set -xe - python -m pip install -r requirements/ci_requirements.txt - mkdir -p ./.openblas - python -c"import scipy_openblas64 as ob64; print(ob64.get_pkg_config())" > ./.openblas/scipy-openblas.pc - - name: Build a wheel via an sdist - env: - PKG_CONFIG_PATH: ${{ github.workspace }}/.openblas - run: | - pip install build - python -m build - pip install dist/numpy*.whl - - name: Install test dependencies - run: | - pip install -r requirements/test_requirements.txt - - name: Run test suite - run: | - cd tools - pytest --pyargs numpy -m "not slow" - - array_api_tests: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - steps: - - name: Checkout NumPy - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - name: Checkout array-api-tests - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - repository: data-apis/array-api-tests - ref: '827edd804bcace9d64176b8115138d29ae3e8dec' # Latest commit as of 2024-07-30 - submodules: 'true' - path: 'array-api-tests' - persist-credentials: false - - name: Set up Python - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - name: Install build and test dependencies from PyPI - run: | - python -m pip install -r requirements/build_requirements.txt - python -m pip install -r requirements/test_requirements.txt - python -m pip install -r array-api-tests/requirements.txt - - name: Build and install NumPy - run: | - python -m pip install . -v -Csetup-args=-Dallow-noblas=true -Csetup-args=-Dcpu-baseline=none -Csetup-args=-Dcpu-dispatch=none - - name: Run the test suite - env: - ARRAY_API_TESTS_MODULE: numpy - PYTHONWARNINGS: 'ignore::UserWarning::,ignore::DeprecationWarning::,ignore::RuntimeWarning::' - run: | - cd ${GITHUB_WORKSPACE}/array-api-tests - pytest array_api_tests -v -c pytest.ini --ci --max-examples=100 --derandomize --disable-deadline --xfails-file ${GITHUB_WORKSPACE}/tools/ci/array-api-xfails.txt - - custom_checks: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - name: Install build and test dependencies from PyPI - run: | - pip install -r requirements/build_requirements.txt - pip install -r requirements/test_requirements.txt - pip install vulture - - name: Build and install NumPy - run: | - # Install using the fastest way to build (no BLAS, no SIMD) - spin build -j2 -- -Dallow-noblas=true -Dcpu-baseline=none -Dcpu-dispatch=none - - name: Check build-internal dependencies - run: | - ninja -C build -t missingdeps - - name: Check installed test and stub files - run: | - python tools/check_installed_files.py $(find ./build-install -path '*/site-packages/numpy') - - name: Check for unreachable code paths in Python modules - run: | - # Need the explicit `bash -c` here because `grep` returns exit code 1 for no matches - bash -c "! vulture . --min-confidence 100 --exclude doc/,numpy/distutils/,vendored-meson/ | grep 'unreachable'" - - name: Check usage of install_tag - run: | - rm -rf build-install - ./vendored-meson/meson/meson.py install -C build --destdir ../build-install --tags=runtime,python-runtime,devel - python tools/check_installed_files.py $(find ./build-install -path '*/site-packages/numpy') --no-tests diff --git a/.github/workflows/linux_blas.yml b/.github/workflows/linux_blas.yml deleted file mode 100644 index a2c5e56eaa9b..000000000000 --- a/.github/workflows/linux_blas.yml +++ /dev/null @@ -1,410 +0,0 @@ -name: BLAS tests (Linux) - -# This file is meant for testing different BLAS/LAPACK flavors and build -# options on Linux. All other yml files for Linux will only test without BLAS -# (mostly because that's easier and faster to build) or with the same 64-bit -# OpenBLAS build that is used in the wheel jobs. -# -# Jobs and their purpose: -# -# - openblas32_stable_nightly: -# Uses the 32-bit OpenBLAS builds, both the latest stable release -# and a nightly build. -# - openblas_no_pkgconfig_fedora: -# Test OpenBLAS on Fedora. Fedora doesn't ship .pc files for OpenBLAS, -# hence this exercises the "system dependency" detection method. -# - flexiblas_fedora: -# Tests FlexiBLAS (the default on Fedora for its own packages), via -# pkg-config. FlexiBLAS allows runtime switching of BLAS/LAPACK -# libraries, which is a useful capability (not tested in this job). -# - openblas_cmake: -# Tests whether OpenBLAS LP64 is detected correctly when only CMake -# and not pkg-config is installed. -# - netlib-debian: -# Installs libblas/liblapack, which in Debian contains libcblas within -# libblas. -# - netlib-split: -# Installs vanilla Netlib blas/lapack with separate libcblas, which is -# the last option tried in auto-detection. -# - mkl: -# Tests MKL installed from PyPI (because easiest/fastest, if broken) in -# 3 ways: both LP64 and ILP64 via pkg-config, and then using the -# Single Dynamic Library (SDL, or `libmkl_rt`). -# - blis: -# Simple test for LP64 via pkg-config -# - atlas: -# Simple test for LP64 via pkg-config - -on: - pull_request: - branches: - - main - - maintenance/** - -defaults: - run: - shell: bash - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - openblas32_stable_nightly: - # To enable this workflow on a fork, comment out: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - USE_NIGHTLY_OPENBLAS: [false, true] - env: - USE_NIGHTLY_OPENBLAS: ${{ matrix.USE_NIGHTLY_OPENBLAS }} - name: "Test Linux (${{ matrix.USE_NIGHTLY_OPENBLAS && 'nightly' || 'stable' }} OpenBLAS)" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r requirements/build_requirements.txt - # Install OpenBLAS - if [[ $USE_NIGHTLY_OPENBLAS == "true" ]]; then - python -m pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy-openblas32 - else - python -m pip install -r requirements/ci32_requirements.txt - fi - mkdir -p ./.openblas - python -c"import scipy_openblas32 as ob32; print(ob32.get_pkg_config())" > ./.openblas/scipy-openblas.pc - echo "PKG_CONFIG_PATH=${{ github.workspace }}/.openblas" >> $GITHUB_ENV - ld_library_path=$(python -c"import scipy_openblas32 as ob32; print(ob32.get_lib_dir())") - echo "LD_LIBRARY_PATH=$ld_library_path" >> $GITHUB_ENV - - - name: Build - shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' - env: - TERM: xterm-256color - run: - spin build -- --werror -Dallow-noblas=false - - - name: Check build-internal dependencies - run: - ninja -C build -t missingdeps - - - name: Check installed test and stub files - run: - python tools/check_installed_files.py $(find ./build-install -path '*/site-packages/numpy') - - name: Ensure scipy-openblas - run: | - set -ex - spin python tools/check_openblas_version.py 0.3.26 - - - name: Test - shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' - env: - TERM: xterm-256color - run: | - pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout - spin test -j auto -- --timeout=600 --durations=10 - - - openblas_no_pkgconfig_fedora: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - container: fedora:39 - name: "OpenBLAS (Fedora, no pkg-config, LP64/ILP64)" - steps: - - name: Install system dependencies - run: | - dnf install git gcc-gfortran g++ python3-devel openblas-devel -y - - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - name: Install dependencies - run: | - pip install -r requirements/build_requirements.txt - pip install pytest hypothesis typing_extensions pytest-timeout - - - name: Build (LP64) - run: spin build -- -Dblas=openblas -Dlapack=openblas -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: spin test -- numpy/linalg --timeout=600 --durations=10 - - - name: Build (ILP64) - run: | - rm -rf build - spin build -- -Duse-ilp64=true -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: spin test -- numpy/linalg --timeout=600 --durations=10 - - - flexiblas_fedora: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - container: fedora:39 - name: "FlexiBLAS (LP64, ILP64 on Fedora)" - steps: - - name: Install system dependencies - run: | - dnf install git gcc-gfortran g++ python3-devel flexiblas-devel -y - - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - name: Install dependencies - run: | - pip install -r requirements/build_requirements.txt - pip install pytest hypothesis typing_extensions pytest-timeout - - - name: Build - run: spin build -- -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: spin test -- numpy/linalg --timeout=600 --durations=10 - - - name: Build (ILP64) - run: | - rm -rf build - spin build -- -Ddisable-optimization=true -Duse-ilp64=true -Dallow-noblas=false - - - name: Test (ILP64) - run: spin test -- numpy/linalg --timeout=600 --durations=10 - - - openblas_cmake: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - name: "OpenBLAS with CMake" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r requirements/build_requirements.txt - pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout - sudo apt-get update - sudo apt-get install libopenblas-dev cmake - sudo apt-get remove pkg-config - - - name: Build - run: spin build -- -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: spin test -j auto -- numpy/linalg --timeout=600 --durations=10 - - - netlib-debian: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - name: "Debian libblas/liblapack" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r requirements/build_requirements.txt - sudo apt-get update - sudo apt-get install liblapack-dev pkg-config - - - name: Build - run: | - spin build -- -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: | - pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout - spin test -j auto -- numpy/linalg --timeout=600 --durations=10 - - - netlib-split: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - container: opensuse/tumbleweed - name: "OpenSUSE Netlib BLAS/LAPACK" - steps: - - name: Install system dependencies - run: | - # No blas.pc on OpenSUSE as of Nov 2023, so no need to install pkg-config. - # If it is needed in the future, use install name `pkgconf-pkg-config` - zypper install -y git gcc-c++ python3-pip python3-devel blas cblas lapack - - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - name: Install PyPI dependencies - run: | - pip install --break-system-packages -r requirements/build_requirements.txt - - - name: Build - run: | - spin build -- -Dblas=blas -Dlapack=lapack -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: | - pip install --break-system-packages pytest pytest-xdist hypothesis typing_extensions pytest-timeout - spin test -j auto -- numpy/linalg --timeout=600 --durations=10 - - - mkl: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - name: "MKL (LP64, ILP64, SDL)" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r requirements/build_requirements.txt - pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout - pip install mkl mkl-devel - - - name: Repair MKL pkg-config files and symlinks - run: | - # MKL 2023.2 works when installed from conda-forge (except for `-iomp` - # and `-tbb` pkg-config files), Spack, or with the standalone Intel - # installer. The standalone installer is the worst option, since it's - # large and clumsy to install and requires running a setvars.sh script - # before things work. The PyPI MKL packages are broken and need the - # fixes in this step. For details, see - # https://github.com/conda-forge/intel_repack-feedstock/issues/34 - cd $Python3_ROOT_DIR/lib/pkgconfig - sed -i 's/\/intel64//g' mkl*.pc - # add the expected .so -> .so.2 symlinks to fix linking - cd .. - for i in $( ls libmkl*.so.2 ); do ln -s $i ${i%.*}; done - - - name: Build with defaults (LP64) - run: | - pkg-config --libs mkl-dynamic-lp64-seq # check link flags - spin build -- -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: spin test -- numpy/linalg --timeout=600 --durations=10 - - - name: Build with ILP64 - run: | - git clean -xdf > /dev/null - pkg-config --libs mkl-dynamic-ilp64-seq - spin build -- -Duse-ilp64=true -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: spin test -- numpy/linalg --timeout=600 --durations=10 - - - name: Build without pkg-config (default options, SDL) - run: | - git clean -xdf > /dev/null - pushd $Python3_ROOT_DIR/lib/pkgconfig - rm mkl*.pc - popd - export MKLROOT=$Python3_ROOT_DIR - spin build -- -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: spin test -- numpy/linalg --timeout=600 --durations=10 - - blis: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - name: "BLIS" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r requirements/build_requirements.txt - pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout - sudo apt-get update - sudo apt-get install libblis-dev libopenblas-dev pkg-config - - - name: Add BLIS pkg-config file - run: | - # Needed because blis.pc missing in Debian: - # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=989076 - # The alternative here would be to use another distro or Miniforge - sudo cp tools/ci/_blis_debian.pc /usr/lib/x86_64-linux-gnu/pkgconfig/blis.pc - # Check if the patch works: - pkg-config --libs blis - pkg-config --cflags blis - - - name: Build - run: spin build -- -Dblas=blis -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: spin test -- numpy/linalg --timeout=600 --durations=10 - - atlas: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - name: "ATLAS" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r requirements/build_requirements.txt - pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout - sudo apt-get update - sudo apt-get install libatlas-base-dev pkg-config - - - name: Build - run: spin build -- -Dblas=blas-atlas -Dlapack=lapack-atlas -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test - run: spin test -- numpy/linalg - diff --git a/.github/workflows/linux_musl.yml b/.github/workflows/linux_musl.yml deleted file mode 100644 index 547c031bc84b..000000000000 --- a/.github/workflows/linux_musl.yml +++ /dev/null @@ -1,69 +0,0 @@ -name: Test musllinux_x86_64 - -on: - pull_request: - branches: - - main - - maintenance/** - - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - - -permissions: - contents: read # to fetch code (actions/checkout) - - -jobs: - musllinux_x86_64: - runs-on: ubuntu-latest - # To enable this workflow on a fork, comment out: - if: github.repository == 'numpy/numpy' - container: - # Use container used for building musllinux wheels - # it has git installed, all the pythons, etc - image: quay.io/pypa/musllinux_1_2_x86_64 - - steps: - - name: setup - run: | - apk update --quiet - - # using git commands to clone because versioneer doesn't work when - # actions/checkout is used for the clone step in a container - - git config --global --add safe.directory $PWD - - if [ $GITHUB_EVENT_NAME != pull_request ]; then - git clone --recursive --branch=$GITHUB_REF_NAME https://github.com/${GITHUB_REPOSITORY}.git $GITHUB_WORKSPACE - git reset --hard $GITHUB_SHA - else - git clone --recursive https://github.com/${GITHUB_REPOSITORY}.git $GITHUB_WORKSPACE - git fetch origin $GITHUB_REF:my_ref_name - git checkout $GITHUB_BASE_REF - git -c user.email="you@example.com" merge --no-commit my_ref_name - fi - git submodule update --init - - ln -s /usr/local/bin/python3.11 /usr/local/bin/python - - - name: test-musllinux_x86_64 - env: - PKG_CONFIG_PATH: ${{ github.workspace }}/.openblas - run: | - python -m venv test_env - source test_env/bin/activate - - pip install -r requirements/ci_requirements.txt - pip install -r requirements/build_requirements.txt -r requirements/test_requirements.txt - - # use meson to build and test - spin build --with-scipy-openblas=64 - spin test -j auto -- --timeout=600 --durations=10 - - - name: Meson Log - shell: bash - run: | - cat build/meson-logs/meson-log.txt diff --git a/.github/workflows/linux_qemu.yml b/.github/workflows/linux_qemu.yml deleted file mode 100644 index 6324de0ac85c..000000000000 --- a/.github/workflows/linux_qemu.yml +++ /dev/null @@ -1,280 +0,0 @@ -# Meson's Python module doesn't support crosscompiling, -# and python dependencies may be another potential hurdle. -# There might also be a need to run runtime tests during configure time. -# -# The recommended practice is to rely on Docker to provide the x86_64 crosscompile toolchain, -# enabling native execution via binfmt. -# -# In simpler terms, everything except the crosscompile toolchain will be emulated. - -name: Linux Qemu tests - -on: - pull_request: - branches: - - main - - maintenance/** - workflow_dispatch: - -defaults: - run: - shell: bash - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - linux_qemu: - # Only workflow_dispatch is enabled on forks. - # To enable this job and subsequent jobs on a fork for other events, comment out: - if: github.repository == 'numpy/numpy' || github.event_name == 'workflow_dispatch' - runs-on: ubuntu-22.04 - continue-on-error: true - strategy: - fail-fast: false - matrix: - BUILD_PROP: - - [ - "armhf", - "arm-linux-gnueabihf", - "arm32v7/ubuntu:22.04", - "-Dallow-noblas=true", - # test_unary_spurious_fpexception is currently skipped - # FIXME(@seiko2plus): Requires confirmation for the following issue: - # The presence of an FP invalid exception caused by sqrt. Unsure if this is a qemu bug or not. - "(test_kind or test_multiarray or test_simd or test_umath or test_ufunc) and not test_unary_spurious_fpexception", - "arm" - ] - - [ - "ppc64le", - "powerpc64le-linux-gnu", - "ppc64le/ubuntu:22.04", - "-Dallow-noblas=true", - "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", - "ppc64le" - ] - - [ - "ppc64le - baseline(Power9)", - "powerpc64le-linux-gnu", - "ppc64le/ubuntu:22.04", - "-Dallow-noblas=true -Dcpu-baseline=vsx3", - "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", - "ppc64le" - ] - - [ - "s390x", - "s390x-linux-gnu", - "s390x/ubuntu:22.04", - "-Dallow-noblas=true", - # Skipping TestRationalFunctions.test_gcd_overflow test - # because of a possible qemu bug that appears to be related to int64 overflow in absolute operation. - # TODO(@seiko2plus): Confirm the bug and provide a minimal reproducer, then report it to upstream. - "(test_kind or test_multiarray or test_simd or test_umath or test_ufunc) and not test_gcd_overflow", - "s390x" - ] - - [ - "s390x - baseline(Z13)", - "s390x-linux-gnu", - "s390x/ubuntu:22.04", - "-Dallow-noblas=true -Dcpu-baseline=vx", - "(test_kind or test_multiarray or test_simd or test_umath or test_ufunc) and not test_gcd_overflow", - "s390x" - ] - - [ - "riscv64", - "riscv64-linux-gnu", - "riscv64/ubuntu:22.04", - "-Dallow-noblas=true", - "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", - "riscv64" - ] - env: - TOOLCHAIN_NAME: ${{ matrix.BUILD_PROP[1] }} - DOCKER_CONTAINER: ${{ matrix.BUILD_PROP[2] }} - MESON_OPTIONS: ${{ matrix.BUILD_PROP[3] }} - RUNTIME_TEST_FILTER: ${{ matrix.BUILD_PROP[4] }} - ARCH: ${{ matrix.BUILD_PROP[5] }} - TERM: xterm-256color - - name: "${{ matrix.BUILD_PROP[0] }}" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - name: Initialize binfmt_misc for qemu-user-static - run: | - # see https://hub.docker.com/r/tonistiigi/binfmt for available versions - docker run --rm --privileged tonistiigi/binfmt:qemu-v9.2.2-52 --install all - - - name: Install GCC cross-compilers - run: | - sudo apt update - sudo apt install -y ninja-build gcc-${TOOLCHAIN_NAME} g++-${TOOLCHAIN_NAME} gfortran-${TOOLCHAIN_NAME} - - - name: Cache docker container - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 - id: container-cache - with: - path: ~/docker_${{ matrix.BUILD_PROP[1] }} - key: container-${{ runner.os }}-${{ matrix.BUILD_PROP[1] }}-${{ matrix.BUILD_PROP[2] }}-${{ hashFiles('requirements/build_requirements.txt') }} - - - name: Creates new container - if: steps.container-cache.outputs.cache-hit != 'true' - run: | - docker run --platform=linux/${ARCH} --name the_container --interactive \ - -v /:/host -v $(pwd):/numpy ${DOCKER_CONTAINER} /bin/bash -c " - apt update && - apt install -y cmake git python3 python-is-python3 python3-dev python3-pip && - mkdir -p /lib64 && ln -s /host/lib64/ld-* /lib64/ && - ln -s /host/lib/x86_64-linux-gnu /lib/x86_64-linux-gnu && - rm -rf /usr/${TOOLCHAIN_NAME} && ln -s /host/usr/${TOOLCHAIN_NAME} /usr/${TOOLCHAIN_NAME} && - rm -rf /usr/lib/gcc/${TOOLCHAIN_NAME} && ln -s /host/usr/lib/gcc-cross/${TOOLCHAIN_NAME} /usr/lib/gcc/${TOOLCHAIN_NAME} && - rm -f /usr/bin/gcc && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-gcc /usr/bin/gcc && - rm -f /usr/bin/g++ && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-g++ /usr/bin/g++ && - rm -f /usr/bin/gfortran && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-gfortran /usr/bin/gfortran && - rm -f /usr/bin/ar && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ar /usr/bin/ar && - rm -f /usr/bin/as && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-as /usr/bin/as && - rm -f /usr/bin/ld && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ld /usr/bin/ld && - rm -f /usr/bin/ld.bfd && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ld.bfd /usr/bin/ld.bfd && - rm -f /usr/bin/ninja && ln -s /host/usr/bin/ninja /usr/bin/ninja && - git config --global --add safe.directory /numpy && - # No need to build ninja from source, the host ninja is used for the build - grep -v ninja /numpy/requirements/build_requirements.txt > /tmp/build_requirements.txt && - python -m pip install -r /tmp/build_requirements.txt && - python -m pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout && - rm -f /usr/local/bin/ninja && mkdir -p /usr/local/bin && ln -s /host/usr/bin/ninja /usr/local/bin/ninja - " - docker commit the_container the_container - mkdir -p "~/docker_${TOOLCHAIN_NAME}" - docker save -o "~/docker_${TOOLCHAIN_NAME}/the_container.tar" the_container - - - name: Load container from cache - if: steps.container-cache.outputs.cache-hit == 'true' - run: docker load -i "~/docker_${TOOLCHAIN_NAME}/the_container.tar" - - - name: Meson Build - run: | - docker run --rm --platform=linux/${ARCH} -e "TERM=xterm-256color" \ - -v $(pwd):/numpy -v /:/host the_container \ - /bin/script -e -q -c "/bin/bash --noprofile --norc -eo pipefail -c ' - cd /numpy && spin build --clean -- ${MESON_OPTIONS} - '" - - - name: Meson Log - if: always() - run: 'cat build/meson-logs/meson-log.txt' - - - name: Run Tests - run: | - docker run --rm --platform=linux/${ARCH} -e "TERM=xterm-256color" \ - -v $(pwd):/numpy -v /:/host the_container \ - /bin/script -e -q -c "/bin/bash --noprofile --norc -eo pipefail -c ' - export F90=/usr/bin/gfortran - cd /numpy && spin test -- --timeout=600 --durations=10 -k \"${RUNTIME_TEST_FILTER}\" - '" - - - linux_loongarch64_qemu: - # Only workflow_dispatch is enabled on forks. - # To enable this job and subsequent jobs on a fork for other events, comment out: - if: github.repository == 'numpy/numpy' || github.event_name == 'workflow_dispatch' - runs-on: ubuntu-24.04 - continue-on-error: true - strategy: - fail-fast: false - matrix: - BUILD_PROP: - - [ - "loongarch64", - "loongarch64-linux-gnu", - "cnclarechen/numpy-loong64-debian:v1", - "-Dallow-noblas=true", - "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", - "loong64" - ] - env: - TOOLCHAIN_NAME: ${{ matrix.BUILD_PROP[1] }} - DOCKER_CONTAINER: ${{ matrix.BUILD_PROP[2] }} - MESON_OPTIONS: ${{ matrix.BUILD_PROP[3] }} - RUNTIME_TEST_FILTER: ${{ matrix.BUILD_PROP[4] }} - ARCH: ${{ matrix.BUILD_PROP[5] }} - TERM: xterm-256color - - name: "${{ matrix.BUILD_PROP[0] }}" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - - - name: Initialize binfmt_misc for qemu-user-static - run: | - docker run --rm --privileged loongcr.lcpu.dev/multiarch/archlinux --reset -p yes - - - name: Install GCC cross-compilers - run: | - sudo apt update - sudo apt install -y ninja-build gcc-14-${TOOLCHAIN_NAME} g++-14-${TOOLCHAIN_NAME} gfortran-14-${TOOLCHAIN_NAME} - - - name: Cache docker container - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 - id: container-cache - with: - path: ~/docker_${{ matrix.BUILD_PROP[1] }} - key: container-${{ runner.os }}-${{ matrix.BUILD_PROP[1] }}-${{ matrix.BUILD_PROP[2] }}-${{ hashFiles('requirements/build_requirements.txt') }} - - - name: Creates new container - if: steps.container-cache.outputs.cache-hit != 'true' - run: | - docker run --platform=linux/${ARCH} --name the_container --interactive \ - -v /:/host -v $(pwd):/numpy ${DOCKER_CONTAINER} /bin/bash -c " - mkdir -p /lib64 && ln -s /host/lib64/ld-* /lib64/ && - ln -s /host/lib/x86_64-linux-gnu /lib/x86_64-linux-gnu && - ln -s /host/usr/${TOOLCHAIN_NAME} /usr/${TOOLCHAIN_NAME} && - ln -s /host/usr/lib/gcc-cross/${TOOLCHAIN_NAME} /usr/lib/gcc/${TOOLCHAIN_NAME} && - rm -f /usr/bin/gcc && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-gcc-14 /usr/bin/gcc && - rm -f /usr/bin/g++ && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-g++-14 /usr/bin/g++ && - rm -f /usr/bin/gfortran && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-gfortran-14 /usr/bin/gfortran && - rm -f /usr/bin/ar && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ar /usr/bin/ar && - rm -f /usr/bin/as && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-as /usr/bin/as && - rm -f /usr/bin/ld && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ld /usr/bin/ld && - rm -f /usr/bin/ld.bfd && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ld.bfd /usr/bin/ld.bfd && - rm -f /usr/bin/ninja && ln -s /host/usr/bin/ninja /usr/bin/ninja && - git config --global --add safe.directory /numpy && - python -m pip install --break-system-packages -r /numpy/requirements/build_requirements.txt && - python -m pip install --break-system-packages pytest pytest-xdist hypothesis typing_extensions - " - docker commit the_container the_container - mkdir -p "~/docker_${TOOLCHAIN_NAME}" - docker save -o "~/docker_${TOOLCHAIN_NAME}/the_container.tar" the_container - - - name: Load container from cache - if: steps.container-cache.outputs.cache-hit == 'true' - run: docker load -i "~/docker_${TOOLCHAIN_NAME}/the_container.tar" - - - name: Meson Build - run: | - docker run --rm --platform=linux/${ARCH} -e "TERM=xterm-256color" \ - -v $(pwd):/numpy -v /:/host the_container \ - /bin/script -e -q -c "/bin/bash --noprofile --norc -eo pipefail -c ' - cd /numpy/ && spin build --clean -- ${MESON_OPTIONS} - '" - - - name: Meson Log - if: always() - run: 'cat build/meson-logs/meson-log.txt' - - - name: Run Tests - run: | - docker run --rm --platform=linux/${ARCH} -e "TERM=xterm-256color" \ - -v $(pwd):/numpy -v /:/host the_container \ - /bin/script -e -q -c "/bin/bash --noprofile --norc -eo pipefail -c ' - cd /numpy && spin test -- -k \"${RUNTIME_TEST_FILTER}\" - '" diff --git a/.github/workflows/linux_simd.yml b/.github/workflows/linux_simd.yml deleted file mode 100644 index 265261603a6f..000000000000 --- a/.github/workflows/linux_simd.yml +++ /dev/null @@ -1,289 +0,0 @@ -name: Linux SIMD tests - -# This file is meant for testing different SIMD-related build options and -# optimization levels. See `meson_options.txt` for the available build options. -# -# Jobs and their purposes: -# -# - baseline_only: -# Focuses on completing as quickly as possible and acts as a filter for other, more resource-intensive jobs. -# Utilizes only the default baseline targets (e.g., SSE3 on X86_64) without enabling any runtime dispatched features. -# -# - old_gcc: -# Tests the oldest supported GCC version with default CPU/baseline/dispatch settings. -# -# - without_optimizations: -# Completely disables all SIMD optimizations and other compiler optimizations such as loop unrolling. -# -# - native: -# Tests against the host CPU features set as the baseline without enabling any runtime dispatched features. -# Intended to assess the entire NumPy codebase against host flags, even for code sections lacking handwritten SIMD intrinsics. -# -# - without_avx512/avx2/fma3: -# Uses runtime SIMD dispatching but disables AVX2, FMA3, and AVX512. -# Intended to evaluate 128-bit SIMD extensions without FMA support. -# -# - without_avx512: -# Uses runtime SIMD dispatching but disables AVX512. -# Intended to evaluate 128-bit/256-bit SIMD extensions. -# -# - intel_sde: -# Executes only the SIMD tests for various AVX512 SIMD extensions under the Intel Software Development Emulator (SDE). -# -on: - pull_request: - branches: - - main - - maintenance/** - -defaults: - run: - shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' - -env: - TERM: xterm-256color - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - baseline_only: - # To enable this workflow on a fork, comment out: - if: github.repository == 'numpy/numpy' - runs-on: ubuntu-latest - env: - MESON_ARGS: "-Dallow-noblas=true -Dcpu-dispatch=none" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - uses: ./.github/meson_actions - name: Build/Test - - old_gcc: - if: github.event_name != 'push' - needs: [baseline_only] - runs-on: ubuntu-latest - env: - MESON_ARGS: "-Dallow-noblas=true" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - - name: Install GCC9/10 - run: | - echo "deb http://archive.ubuntu.com/ubuntu focal main universe" | sudo tee /etc/apt/sources.list.d/focal.list - sudo apt update - sudo apt install -y g++-9 g++-10 - - - name: Enable gcc-9 - run: | - sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 1 - sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-9 1 - - - uses: ./.github/meson_actions - name: Build/Test against gcc-9 - - - name: Enable gcc-10 - run: | - sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 2 - sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 2 - - - uses: ./.github/meson_actions - name: Build/Test against gcc-10 - - arm64_simd: - if: github.repository == 'numpy/numpy' - needs: [baseline_only] - runs-on: ubuntu-22.04-arm - strategy: - fail-fast: false - matrix: - config: - - name: "baseline only" - args: "-Dallow-noblas=true -Dcpu-dispatch=none" - - name: "with ASIMD" - args: "-Dallow-noblas=true -Dcpu-baseline=asimd" - - name: "native" - args: "-Dallow-noblas=true -Dcpu-baseline=native -Dcpu-dispatch=none" - name: "ARM64 SIMD - ${{ matrix.config.name }}" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - name: Install dependencies - run: | - python -m pip install -r requirements/build_requirements.txt - python -m pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout - - name: Build - run: | - spin build -- ${{ matrix.config.args }} - - name: Test - run: | - spin test -- --timeout=600 --durations=10 - - specialize: - needs: [baseline_only] - runs-on: ubuntu-latest - if: github.event_name != 'push' - continue-on-error: true - strategy: - fail-fast: false - matrix: - BUILD_PROP: - - [ - "without optimizations", - "-Dallow-noblas=true -Ddisable-optimization=true", - "3.12" - ] - - [ - "native", - "-Dallow-noblas=true -Dcpu-baseline=native -Dcpu-dispatch=none", - "3.11" - ] - - [ - "without avx512", - "-Dallow-noblas=true -Dcpu-dispatch=SSSE3,SSE41,POPCNT,SSE42,AVX,F16C,AVX2,FMA3", - "3.11" - ] - - [ - "without avx512/avx2/fma3", - "-Dallow-noblas=true -Dcpu-dispatch=SSSE3,SSE41,POPCNT,SSE42,AVX,F16C", - "3.11" - ] - - env: - MESON_ARGS: ${{ matrix.BUILD_PROP[1] }} - - name: "${{ matrix.BUILD_PROP[0] }}" - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: "${{ matrix.BUILD_PROP[2] }}" - - uses: ./.github/meson_actions - name: Build/Test - - intel_sde_avx512: - needs: [baseline_only] - runs-on: ubuntu-24.04 - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - - name: Install Intel SDE - run: | - curl -o /tmp/sde.tar.xz https://downloadmirror.intel.com/788820/sde-external-9.27.0-2023-09-13-lin.tar.xz - mkdir /tmp/sde && tar -xvf /tmp/sde.tar.xz -C /tmp/sde/ - sudo mv /tmp/sde/* /opt/sde && sudo ln -s /opt/sde/sde64 /usr/bin/sde - - - name: Install dependencies - run: | - python -m pip install -r requirements/build_requirements.txt - python -m pip install pytest pytest-xdist hypothesis typing_extensions - - - name: Build - run: CC=gcc-13 CXX=g++-13 spin build -- -Dallow-noblas=true -Dcpu-baseline=avx512_skx -Dtest-simd='BASELINE,AVX512_KNL,AVX512_KNM,AVX512_SKX,AVX512_CLX,AVX512_CNL,AVX512_ICL,AVX512_SPR' - - - name: Meson Log - if: always() - run: cat build/meson-logs/meson-log.txt - - - name: SIMD tests (SKX) - run: | - export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) - export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" - cd build-install && - sde -skx -- python -c "import numpy; numpy.show_config()" && - sde -skx -- python -m pytest $NUMPY_SITE/numpy/_core/tests/test_simd* - - - name: linalg/ufunc/umath tests (TGL) - run: | - export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) - export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" - cd build-install && - sde -tgl -- python -c "import numpy; numpy.show_config()" && - sde -tgl -- python -m pytest $NUMPY_SITE/numpy/_core/tests/test_umath* \ - $NUMPY_SITE/numpy/_core/tests/test_ufunc.py \ - $NUMPY_SITE/numpy/_core/tests/test_multiarray.py \ - $NUMPY_SITE/numpy/linalg/tests/test_* - - - intel_sde_spr: - needs: [baseline_only] - runs-on: ubuntu-24.04 - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - - - name: Install Intel SDE - run: | - curl -o /tmp/sde.tar.xz https://downloadmirror.intel.com/788820/sde-external-9.27.0-2023-09-13-lin.tar.xz - mkdir /tmp/sde && tar -xvf /tmp/sde.tar.xz -C /tmp/sde/ - sudo mv /tmp/sde/* /opt/sde && sudo ln -s /opt/sde/sde64 /usr/bin/sde - - - name: Install dependencies - run: | - python -m pip install -r requirements/build_requirements.txt - python -m pip install pytest pytest-xdist hypothesis typing_extensions - - - name: Build - run: CC=gcc-13 CXX=g++-13 spin build -- -Dallow-noblas=true -Dcpu-baseline=avx512_spr - - - name: Meson Log - if: always() - run: cat build/meson-logs/meson-log.txt - - - name: SIMD tests (SPR) - run: | - export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) - export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" - cd build-install && - sde -spr -- python -c "import numpy; numpy.show_config()" && - sde -spr -- python -m pytest $NUMPY_SITE/numpy/_core/tests/test_simd* - - - name: linalg/ufunc/umath tests on Intel SPR - run: | - export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) - export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" - cd build-install && - sde -spr -- python -c "import numpy; numpy.show_config()" && - sde -spr -- python -m pytest $NUMPY_SITE/numpy/_core/tests/test_umath* \ - $NUMPY_SITE/numpy/_core/tests/test_ufunc.py \ - $NUMPY_SITE/numpy/_core/tests/test_multiarray.py \ - $NUMPY_SITE/numpy/linalg/tests/test_* diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml deleted file mode 100644 index 3a401379c03e..000000000000 --- a/.github/workflows/macos.yml +++ /dev/null @@ -1,164 +0,0 @@ -name: macOS tests - -on: - pull_request: - branches: - - main - - maintenance/** - - -permissions: - contents: read # to fetch code (actions/checkout) - -env: - CCACHE_DIR: "${{ github.workspace }}/.ccache" - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -jobs: - x86_conda: - name: macOS x86-64 conda - # To enable this workflow on a fork, comment out: - if: github.repository == 'numpy/numpy' - runs-on: macos-13 - strategy: - fail-fast: false - matrix: - python-version: ["3.12"] - - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - name: Prepare cache dirs and timestamps - id: prep-ccache - shell: bash -l {0} - run: | - mkdir -p "${CCACHE_DIR}" - echo "dir=$CCACHE_DIR" >> $GITHUB_OUTPUT - NOW=$(date -u +"%F-%T") - echo "timestamp=${NOW}" >> $GITHUB_OUTPUT - echo "today=$(/bin/date -u '+%Y%m%d')" >> $GITHUB_OUTPUT - - - name: Setup compiler cache - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 - id: cache-ccache - with: - path: ${{ steps.prep-ccache.outputs.dir }} - key: ${{ github.workflow }}-${{ matrix.python-version }}-ccache-macos-${{ steps.prep-ccache.outputs.timestamp }} - restore-keys: | - ${{ github.workflow }}-${{ matrix.python-version }}-ccache-macos- - - - name: Setup Miniforge - uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1 - with: - python-version: ${{ matrix.python-version }} - channels: conda-forge - channel-priority: true - activate-environment: numpy-dev - use-only-tar-bz2: false - miniforge-variant: Miniforge3 - miniforge-version: latest - use-mamba: true - - # Updates if `environment.yml` or the date changes. The latter is needed to - # ensure we re-solve once a day (since we don't lock versions). Could be - # replaced by a conda-lock based approach in the future. - - name: Cache conda environment - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 - env: - # Increase this value to reset cache if environment.yml has not changed - CACHE_NUMBER: 1 - with: - path: ${{ env.CONDA }}/envs/numpy-dev - key: - ${{ runner.os }}--${{ steps.prep-ccache.outputs.today }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('environment.yml') }} - id: envcache - - - name: Update Conda Environment - run: mamba env update -n numpy-dev -f environment.yml - if: steps.envcache.outputs.cache-hit != 'true' - - - name: Build and Install NumPy - shell: bash -l {0} - run: | - conda activate numpy-dev - CC="ccache $CC" spin build -j2 -- -Dallow-noblas=false - - - name: Run test suite (full) - shell: bash -l {0} - run: | - conda activate numpy-dev - export OMP_NUM_THREADS=2 - spin test -j2 -m full - - - name: Ccache statistics - shell: bash -l {0} - run: | - conda activate numpy-dev - ccache -s - - - accelerate: - name: Accelerate - ${{ matrix.build_runner[1] }} - ${{ matrix.version }} - # To enable this workflow on a fork, comment out: - if: github.repository == 'numpy/numpy' - runs-on: ${{ matrix.build_runner[0] }} - strategy: - fail-fast: false - matrix: - build_runner: - - [ macos-13, "macos_x86_64" ] - - [ macos-14, "macos_arm64" ] - version: ["3.11", "3.13t"] - - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - uses: astral-sh/setup-uv@f94ec6bedd8674c4426838e6b50417d36b6ab231 - with: - python-version: ${{ matrix.version }} - enable-cache: false - - - run: - uv pip install --python=${{ matrix.version }} pip - - - uses: maxim-lobanov/setup-xcode@60606e260d2fc5762a71e64e74b2174e8ea3c8bd # v1.6.0 - if: ${{ matrix.build_runner[0] == 'macos-13' }} - with: - xcode-version: '14.3' - - # TODO: remove cython nightly install when cython does a release - - name: Install nightly Cython - if: matrix.version == '3.13t' - run: | - pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython - - - name: Install dependencies - run: | - pip install -r requirements/build_requirements.txt - pip install -r requirements/setuptools_requirement.txt - pip install pytest pytest-xdist pytest-timeout hypothesis - - - name: Build against Accelerate (LP64) - run: spin build -- -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test (linalg only) - run: spin test -j2 -- numpy/linalg --timeout=600 --durations=10 - - - name: Build NumPy against Accelerate (ILP64) - run: | - rm -r build build-install - spin build -- -Duse-ilp64=true -Ddisable-optimization=true -Dallow-noblas=false - - - name: Test (fast tests) - run: spin test -j2 -- --timeout=600 --durations=10 diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml deleted file mode 100644 index 446fb899e308..000000000000 --- a/.github/workflows/mypy.yml +++ /dev/null @@ -1,74 +0,0 @@ -name: Run MyPy - -# Mypy is too slow to run as part of regular CI. The purpose of the jobs in -# this file is to cover running Mypy across: -# -# - OSes: Linux, Windows and macOS -# - Python versions: lowest/highest supported versions, and an intermediate one -# -# The build matrix aims for sparse coverage across those two dimensions. -# Use of BLAS/LAPACK and SIMD is disabled on purpose, because those things -# don't matter for static typing and this speeds up the builds. -# -# This is a separate job file so it's easy to trigger by hand. - -on: - pull_request: - branches: - - main - - maintenance/** - paths-ignore: - - 'benchmarks/' - - '.circlecl/' - - 'docs/' - - 'meson_cpu/' - - 'tools/' - workflow_dispatch: - -defaults: - run: - shell: bash - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - mypy: - # To enable this workflow on a fork, comment out: - if: github.repository == 'numpy/numpy' - name: "MyPy" - runs-on: ${{ matrix.os_python[0] }} - strategy: - fail-fast: false - matrix: - os_python: - - [ubuntu-latest, '3.12'] - - [windows-latest, '3.11'] - - [macos-latest, '3.11'] - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: ${{ matrix.os_python[1] }} - - name: Install dependencies - run: | - pip install -r requirements/build_requirements.txt - # orjson makes mypy faster but the default requirements.txt - # can't install it because orjson doesn't support 32 bit Linux - pip install orjson - pip install -r requirements/test_requirements.txt - - name: Build - run: | - spin build -j2 -- -Dallow-noblas=true -Ddisable-optimization=true --vsenv - - name: Run Mypy - run: | - spin mypy diff --git a/.github/workflows/mypy_primer.yml b/.github/workflows/mypy_primer.yml deleted file mode 100644 index 7e6d7dba9cb4..000000000000 --- a/.github/workflows/mypy_primer.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: Run mypy_primer - -on: - # Only run on PR, since we diff against main - pull_request: - paths: - - "**/*.pyi" - - ".github/workflows/mypy_primer.yml" - - ".github/workflows/mypy_primer_comment.yml" - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - mypy_primer: - name: Run - runs-on: ubuntu-latest - strategy: - matrix: - shard-index: [0] # e.g. change this to [0, 1, 2] and --num-shards below to 3 - fail-fast: false - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - path: numpy_to_test - fetch-depth: 0 - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: "3.12" - - name: Install dependencies - run: pip install git+https://github.com/hauntsaninja/mypy_primer.git - - name: Run mypy_primer - shell: bash - run: | - cd numpy_to_test - MYPY_VERSION=$(grep mypy== requirements/test_requirements.txt | sed -n 's/mypy==\([^;]*\).*/\1/p') - - echo "new commit" - git checkout $GITHUB_SHA - git rev-list --format=%s --max-count=1 HEAD - - MERGE_BASE=$(git merge-base $GITHUB_SHA origin/$GITHUB_BASE_REF) - git worktree add ../numpy_base $MERGE_BASE - cd ../numpy_base - - echo "base commit" - git rev-list --format=%s --max-count=1 HEAD - - echo '' - cd .. - # fail action if exit code isn't zero or one - # TODO: note that we don't build numpy, so if a project attempts to use the - # numpy mypy plugin, we may see some issues involving version skew. - ( - mypy_primer \ - --new v${MYPY_VERSION} --old v${MYPY_VERSION} \ - --known-dependency-selector numpy \ - --old-prepend-path numpy_base --new-prepend-path numpy_to_test \ - --num-shards 1 --shard-index ${{ matrix.shard-index }} \ - --debug \ - --output concise \ - | tee diff_${{ matrix.shard-index }}.txt - ) || [ $? -eq 1 ] - - if: ${{ matrix.shard-index == 0 }} - name: Save PR number - run: | - echo ${{ github.event.pull_request.number }} | tee pr_number.txt - - name: Upload mypy_primer diff + PR number - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - if: ${{ matrix.shard-index == 0 }} - with: - name: mypy_primer_diffs-${{ matrix.shard-index }} - path: | - diff_${{ matrix.shard-index }}.txt - pr_number.txt - - name: Upload mypy_primer diff - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - if: ${{ matrix.shard-index != 0 }} - with: - name: mypy_primer_diffs-${{ matrix.shard-index }} - path: diff_${{ matrix.shard-index }}.txt - - join_artifacts: - name: Join artifacts - runs-on: ubuntu-latest - needs: [mypy_primer] - permissions: - contents: read - steps: - - name: Merge artifacts - uses: actions/upload-artifact/merge@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: mypy_primer_diffs - pattern: mypy_primer_diffs-* - delete-merged: true diff --git a/.github/workflows/mypy_primer_comment.yml b/.github/workflows/mypy_primer_comment.yml deleted file mode 100644 index be0dda7f7dec..000000000000 --- a/.github/workflows/mypy_primer_comment.yml +++ /dev/null @@ -1,103 +0,0 @@ -name: Comment with mypy_primer diff - -on: - workflow_run: - workflows: - - Run mypy_primer - types: - - completed - -permissions: - contents: read - pull-requests: write - -jobs: - comment: - name: Comment PR from mypy_primer - runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.conclusion == 'success' }} - steps: - - name: Download diffs - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 - with: - script: | - const fs = require('fs'); - const artifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: ${{ github.event.workflow_run.id }}, - }); - const [matchArtifact] = artifacts.data.artifacts.filter((artifact) => - artifact.name == "mypy_primer_diffs"); - - const download = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: "zip", - }); - fs.writeFileSync("diff.zip", Buffer.from(download.data)); - - - run: unzip diff.zip - - - name: Get PR number - id: get-pr-number - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 - with: - script: | - const fs = require('fs'); - return parseInt(fs.readFileSync("pr_number.txt", { encoding: "utf8" })) - - - name: Hide old comments - uses: kanga333/comment-hider@c12bb20b48aeb8fc098e35967de8d4f8018fffdf # v0.4.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - issue_number: ${{ steps.get-pr-number.outputs.result }} - - - run: cat diff_*.txt | tee fulldiff.txt - - - name: Post comment - id: post-comment - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const MAX_CHARACTERS = 50000 - const MAX_CHARACTERS_PER_PROJECT = MAX_CHARACTERS / 3 - - const fs = require('fs') - let data = fs.readFileSync('fulldiff.txt', { encoding: 'utf8' }) - - function truncateIfNeeded(original, maxLength) { - if (original.length <= maxLength) { - return original - } - let truncated = original.substring(0, maxLength) - // further, remove last line that might be truncated - truncated = truncated.substring(0, truncated.lastIndexOf('\n')) - let lines_truncated = original.split('\n').length - truncated.split('\n').length - return `${truncated}\n\n... (truncated ${lines_truncated} lines) ...` - } - - const projects = data.split('\n\n') - // don't let one project dominate - data = projects.map(project => truncateIfNeeded(project, MAX_CHARACTERS_PER_PROJECT)).join('\n\n') - // posting comment fails if too long, so truncate - data = truncateIfNeeded(data, MAX_CHARACTERS) - - console.log("Diff from mypy_primer:") - console.log(data) - - let body - if (data.trim()) { - body = 'Diff from [mypy_primer](https://github.com/hauntsaninja/mypy_primer), ' - body += 'showing the effect of this PR on type check results on a corpus of open source code:\n```diff\n' - body += data + '```' - const prNumber = parseInt(fs.readFileSync("pr_number.txt", { encoding: "utf8" })) - await github.rest.issues.createComment({ - issue_number: prNumber, - owner: context.repo.owner, - repo: context.repo.repo, - body - }) - } diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml deleted file mode 100644 index 0a11922b0877..000000000000 --- a/.github/workflows/scorecards.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Scorecards supply-chain security -on: - # For Branch-Protection check. Only the default branch is supported. See - # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection - branch_protection_rule: - # To guarantee Maintained check is occasionally updated. See - # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained - schedule: - - cron: "19 23 * * 5" - push: - branches: ["main"] - -# Declare default permissions as read only. -permissions: {} - -jobs: - analysis: - name: Scorecards analysis - runs-on: ubuntu-latest - permissions: - # Needed to upload the results to code-scanning dashboard. - security-events: write - # Needed to publish results and get a badge (see publish_results below). - id-token: write - - steps: - - name: "Checkout code" - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v3.1.0 - with: - persist-credentials: false - - - name: "Run analysis" - uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 - with: - results_file: results.sarif - results_format: sarif - # Publish results to OpenSSF REST API for easy access by consumers. - # Allows the repository to include the Scorecard badge. - # See https://github.com/ossf/scorecard-action#publishing-results. - publish_results: true - - # Upload the results as artifacts (optional). Commenting out will disable - # uploads of run results in SARIF format to the repository Actions tab. - - name: "Upload artifact" - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: SARIF file - path: results.sarif - retention-days: 5 - - # Upload the results to GitHub's code scanning dashboard. - - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@5f8171a638ada777af81d42b55959a643bb29017 # v2.1.27 - with: - sarif_file: results.sarif diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index bcd888cd8047..0dc274388577 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -14,22 +14,7 @@ name: Wheel builder on: - schedule: - # ┌───────────── minute (0 - 59) - # │ ┌───────────── hour (0 - 23) - # │ │ ┌───────────── day of the month (1 - 31) - # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC) - # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT) - # │ │ │ │ │ - - cron: "42 2 * * SUN,WED" - pull_request: - branches: - - main - - maintenance/** push: - tags: - - v* - workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} @@ -42,9 +27,6 @@ jobs: get_commit_message: name: Get commit message runs-on: ubuntu-latest - # Only workflow_dispatch is enabled on forks. - # To enable this job and subsequent jobs on a fork for other events, comment out: - if: github.repository == 'numpy/numpy' || github.event_name == 'workflow_dispatch' outputs: message: ${{ steps.commit_message.outputs.message }} steps: @@ -67,11 +49,6 @@ jobs: build_wheels: name: Build wheel ${{ matrix.python }}-${{ matrix.buildplat[1] }}-${{ matrix.buildplat[2] }} needs: get_commit_message - if: >- - contains(needs.get_commit_message.outputs.message, '[wheel build]') || - github.event_name == 'schedule' || - github.event_name == 'workflow_dispatch' || - (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && ( ! endsWith(github.ref, 'dev0'))) runs-on: ${{ matrix.buildplat[0] }} strategy: # Ensure that a wheel builder finishes even if another fails @@ -81,17 +58,17 @@ jobs: # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 buildplat: - [ubuntu-22.04, manylinux_x86_64, ""] - - [ubuntu-22.04, musllinux_x86_64, ""] - - [ubuntu-22.04-arm, manylinux_aarch64, ""] - - [ubuntu-22.04-arm, musllinux_aarch64, ""] - - [macos-13, macosx_x86_64, openblas] - - # targeting macos >= 14. Could probably build on macos-14, but it would be a cross-compile - - [macos-13, macosx_x86_64, accelerate] - - [macos-14, macosx_arm64, accelerate] # always use accelerate - - [windows-2019, win_amd64, ""] - - [windows-2019, win32, ""] - python: ["cp311", "cp312", "cp313", "cp313t", "pp311"] +# - [ubuntu-22.04, musllinux_x86_64, ""] +# - [ubuntu-22.04-arm, manylinux_aarch64, ""] +# - [ubuntu-22.04-arm, musllinux_aarch64, ""] +# - [macos-13, macosx_x86_64, openblas] +# +# # targeting macos >= 14. Could probably build on macos-14, but it would be a cross-compile +# - [macos-13, macosx_x86_64, accelerate] +# - [macos-14, macosx_arm64, accelerate] # always use accelerate +# - [windows-2019, win_amd64, ""] +# - [windows-2019, win32, ""] + python: ["cp312"] # ["cp311", "cp312", "cp313", "cp313t", "pp311"] exclude: # Don't build PyPy 32-bit windows - buildplat: [windows-2019, win32, ""] @@ -211,82 +188,3 @@ jobs: # https://anaconda.org/multibuild-wheels-staging/numpy # The tokens were originally generated at anaconda.org upload_wheels - - build_sdist: - name: Build sdist - needs: get_commit_message - if: >- - contains(needs.get_commit_message.outputs.message, '[wheel build]') || - github.event_name == 'schedule' || - github.event_name == 'workflow_dispatch' || - (github.event_name == 'pull_request' && - (contains(github.event.pull_request.labels.*.name, '36 - Build') || - contains(github.event.pull_request.labels.*.name, '14 - Release'))) || - (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && ( ! endsWith(github.ref, 'dev0'))) - runs-on: ubuntu-latest - env: - IS_PUSH: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') }} - # commented out so the sdist doesn't upload to nightly - # IS_SCHEDULE_DISPATCH: ${{ github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' }} - steps: - - name: Checkout numpy - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: true - persist-credentials: false - # Used to push the built wheels - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - # Build sdist on lowest supported Python - python-version: "3.11" - - name: Build sdist - run: | - python -m pip install -U pip build - python -m build --sdist -Csetup-args=-Dallow-noblas=true - - name: Test the sdist - run: | - # TODO: Don't run test suite, and instead build wheels from sdist - # Depends on pypa/cibuildwheel#1020 - python -m pip install dist/*.gz -Csetup-args=-Dallow-noblas=true - pip install -r requirements/test_requirements.txt - cd .. # Can't import numpy within numpy src directory - python -c "import numpy, sys; print(numpy.__version__); sys.exit(numpy.test() is False)" - - - name: Check README rendering for PyPI - run: | - python -mpip install twine - twine check dist/* - - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: sdist - path: ./dist/* - - - uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1 - with: - # for installation of anaconda-client, required for upload to - # anaconda.org - # default (and activated) environment name is test - # Note that this step is *after* specific pythons have been used to - # build and test - auto-update-conda: true - python-version: "3.11" - - - name: Upload sdist - if: success() && github.repository == 'numpy/numpy' - shell: bash -el {0} - env: - NUMPY_STAGING_UPLOAD_TOKEN: ${{ secrets.NUMPY_STAGING_UPLOAD_TOKEN }} - # commented out so the sdist doesn't upload to nightly - # NUMPY_NIGHTLY_UPLOAD_TOKEN: ${{ secrets.NUMPY_NIGHTLY_UPLOAD_TOKEN }} - run: | - conda install -y anaconda-client - source tools/wheels/upload_wheels.sh - set_upload_vars - # trigger an upload to - # https://anaconda.org/scientific-python-nightly-wheels/numpy - # for cron jobs or "Run workflow" (restricted to main branch). - # Tags will upload to - # https://anaconda.org/multibuild-wheels-staging/numpy - # The tokens were originally generated at anaconda.org - upload_wheels diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml deleted file mode 100644 index 5d9f5f4db7c9..000000000000 --- a/.github/workflows/windows.yml +++ /dev/null @@ -1,133 +0,0 @@ -name: Windows tests - -on: - pull_request: - branches: - - main - - maintenance/** - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - python64bit_openblas: - name: x86-64, LP64 OpenBLAS - runs-on: windows-2019 - # To enable this job on a fork, comment out: - if: github.repository == 'numpy/numpy' - strategy: - fail-fast: false - matrix: - compiler-pyversion: - - ["MSVC", "3.11"] - - ["Clang-cl", "3.13t"] - - steps: - - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - name: Setup Python - uses: astral-sh/setup-uv@f94ec6bedd8674c4426838e6b50417d36b6ab231 - with: - python-version: ${{ matrix.compiler-pyversion[1] }} - enable-cache: false - - - run: - uv pip install --python=${{ matrix.version }} pip - - # TODO: remove cython nightly install when cython does a release - - name: Install nightly Cython - if: matrix.compiler-pyversion[1] == '3.13t' - run: | - pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython - - - name: Install build dependencies from PyPI - run: | - pip install -r requirements/build_requirements.txt - - - name: Install pkg-config - run: | - choco install -y --stoponfirstfailure --checksum 6004DF17818F5A6DBF19CB335CC92702 pkgconfiglite - echo "PKG_CONFIG_PATH=${{ github.workspace }}/.openblas" >> $env:GITHUB_ENV - - - - name: Install Clang-cl - if: matrix.compiler-pyversion[0] == 'Clang-cl' - run: | - # llvm is preinstalled, but leave - # this here in case we need to pin the - # version at some point. - #choco install llvm -y - - - name: Install NumPy (MSVC) - if: matrix.compiler-pyversion[0] == 'MSVC' - run: | - pip install -r requirements/ci_requirements.txt - spin build --with-scipy-openblas=32 -j2 -- --vsenv - - - name: Install NumPy (Clang-cl) - if: matrix.compiler-pyversion[0] == 'Clang-cl' - run: | - "[binaries]","c = 'clang-cl'","cpp = 'clang-cl'","ar = 'llvm-lib'","c_ld = 'lld-link'","cpp_ld = 'lld-link'" | Out-File $PWD/clang-cl-build.ini -Encoding ascii - pip install -r requirements/ci_requirements.txt - spin build --with-scipy-openblas=32 -j2 -- --vsenv --native-file=$PWD/clang-cl-build.ini - - - name: Meson Log - shell: bash - if: ${{ failure() }} - run: | - cat build/meson-logs/meson-log.txt - - - name: Install test dependencies - run: | - python -m pip install -r requirements/test_requirements.txt - python -m pip install threadpoolctl - - - name: Run test suite - run: | - spin test -- --timeout=600 --durations=10 - - msvc_32bit_python_no_openblas: - name: MSVC, 32-bit Python, no BLAS - runs-on: windows-2019 - # To enable this job on a fork, comment out: - if: github.repository == 'numpy/numpy' - steps: - - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - name: Setup Python (32-bit) - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.11' - architecture: 'x86' - - - name: Setup MSVC (32-bit) - uses: bus1/cabuild/action/msdevshell@e22aba57d6e74891d059d66501b6b5aed8123c4d # v1 - with: - architecture: 'x86' - - - name: Build and install - run: | - python -m pip install . -v -Ccompile-args="-j2" -Csetup-args="-Dallow-noblas=true" - - - name: Install test dependencies - run: | - python -m pip install -r requirements/test_requirements.txt - - - name: Run test suite (fast) - run: | - cd tools - python -m pytest --pyargs numpy -m "not slow" -n2 --timeout=600 --durations=10 diff --git a/.github/workflows/windows_arm64.yml b/.github/workflows/windows_arm64.yml deleted file mode 100644 index 42d96aa1989d..000000000000 --- a/.github/workflows/windows_arm64.yml +++ /dev/null @@ -1,208 +0,0 @@ -name: Windows Arm64 - -on: - workflow_dispatch: - -env: - python_version: 3.12 - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - windows_arm: - runs-on: windows-2019 - - # To enable this job on a fork, comment out: - if: github.repository == 'numpy/numpy' - steps: - - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - submodules: recursive - fetch-tags: true - persist-credentials: false - - - name: Setup Python - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: ${{env.python_version}} - architecture: x64 - - - name: Install build dependencies from PyPI - run: | - python -m pip install -r requirements/build_requirements.txt - - - name: Prepare python - shell: powershell - run: | - $ErrorActionPreference = "Stop" - - #Detecting python location and version - $PythonDir = (Split-Path -Parent (get-command python).Path) - $PythonVersionParts = ( -split (python -V)) - $PythonVersion = $PythonVersionParts[1] - - #Downloading the package for appropriate python version from nuget - $PythonARM64NugetLink = "https://www.nuget.org/api/v2/package/pythonarm64/$PythonVersion" - $PythonARM64NugetZip = "nuget_python.zip" - $PythonARM64NugetDir = "temp_nuget" - Invoke-WebRequest $PythonARM64NugetLink -OutFile $PythonARM64NugetZip - - #Changing the libs folder to enable python libraries to be linked for arm64 - Expand-Archive $PythonARM64NugetZip $PythonARM64NugetDir - Copy-Item $PythonARM64NugetDir\tools\libs\* $PythonDir\libs - Remove-Item -Force -Recurse $PythonARM64NugetDir - Remove-Item -Force $PythonARM64NugetZip - - if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE } - - - name: Prepare Licence - shell: powershell - run: | - $ErrorActionPreference = "Stop" - - $CurrentDir = (get-location).Path - $LicenseFile = "$CurrentDir\LICENSE.txt" - Set-Content $LicenseFile ([Environment]::NewLine) - Add-Content $LicenseFile "----" - Add-Content $LicenseFile ([Environment]::NewLine) - Add-Content $LicenseFile (Get-Content "$CurrentDir\LICENSES_bundled.txt") - Add-Content $LicenseFile (Get-Content "$CurrentDir\tools\wheels\LICENSE_win32.txt") - - if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE } - - - name: Wheel build - shell: powershell - run: | - $ErrorActionPreference = "Stop" - - #Creating cross compile script for messon subsystem - $CurrentDir = (get-location) - $CrossScript = "$CurrentDir\arm64_w64.txt" - $CrossScriptContent = - { - [host_machine] - system = 'windows' - subsystem = 'windows' - kernel = 'nt' - cpu_family = 'aarch64' - cpu = 'aarch64' - endian = 'little' - - [binaries] - c='cl.exe' - cpp = 'cl.exe' - - [properties] - sizeof_short = 2 - sizeof_int = 4 - sizeof_long = 4 - sizeof_long_long = 8 - sizeof_float = 4 - sizeof_double = 8 - sizeof_long_double = 8 - sizeof_size_t = 8 - sizeof_wchar_t = 2 - sizeof_off_t = 4 - sizeof_Py_intptr_t = 8 - sizeof_PY_LONG_LONG = 8 - longdouble_format = 'IEEE_DOUBLE_LE' - } - Set-Content $CrossScript $CrossScriptContent.ToString() - - #Setting up cross compilers from MSVC - $Products = 'Community', 'Professional', 'Enterprise', 'BuildTools' | % { "Microsoft.VisualStudio.Product.$_" } - $VsInstallPath = (vswhere -products $Products -latest -format json | ConvertFrom-Json).installationPath - $VSVars = (Get-ChildItem -Path $VsInstallPath -Recurse -Filter "vcvarsamd64_arm64.bat").FullName - $ScriptingObj = New-Object -ComObject Scripting.FileSystemObject - $VSVarsShort = $ScriptingObj.GetFile($VSVars).ShortPath - cmd /c "$VSVarsShort && set" | - ForEach-Object { - if ($_ -match "=") { - $Var = $_.split("=") - set-item -force -path "ENV:\$($Var[0])" -value "$($Var[1])" - } - } - - #Building the wheel - pip wheel . --config-settings=setup-args="--cross-file=$CrossScript" - - if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE } - - - name: Fix wheel - shell: powershell - run: | - $ErrorActionPreference = "Stop" - - #Finding whl file - $CurrentDir = (get-location) - $WhlName = ((Get-ChildItem -Filter "*.whl").FullName) - $ZipWhlName = "$CurrentDir\ZipWhlName.zip" - $UnzippedWhl = "$CurrentDir\unzipedWhl" - - #Expanding whl file - Rename-Item -Path $WhlName $ZipWhlName - if (Test-Path $UnzippedWhl) { - Remove-Item -Force -Recurse $UnzippedWhl - } - Expand-Archive -Force -Path $ZipWhlName $UnzippedWhl - - #Renaming all files to show that their arch is arm64 - Get-ChildItem -Recurse -Path $UnzippedWhl *win_amd64* | Rename-Item -NewName { $_.Name -replace 'win_amd64', 'win_arm64' } - $DIST_DIR = (Get-ChildItem -Recurse -Path $UnzippedWhl *dist-info).FullName - - #Changing amd64 references from metafiles - (GET-Content $DIST_DIR/RECORD) -replace 'win_amd64', 'win_arm64' | Set-Content $DIST_DIR/RECORD - (GET-Content $DIST_DIR/WHEEL) -replace 'win_amd64', 'win_arm64' | Set-Content $DIST_DIR/WHEEL - - #Packing whl file - Compress-Archive -Path $UnzippedWhl\* -DestinationPath $ZipWhlName -Force - $WhlName = $WhlName.Replace("win_amd64", "win_arm64") - Rename-Item -Path $ZipWhlName $WhlName - - if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE } - - - name: Upload Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: ${{ env.python_version }}-win_arm64 - path: ./*.whl - - - name: Setup Mamba - uses: mamba-org/setup-micromamba@0dea6379afdaffa5d528b3d1dabc45da37f443fc - with: - # for installation of anaconda-client, required for upload to - # anaconda.org - # Note that this step is *after* specific pythons have been used to - # build and test the wheel - # for installation of anaconda-client, for upload to anaconda.org - # environment will be activated after creation, and in future bash steps - init-shell: bash - environment-name: upload-env - create-args: >- - anaconda-client - - # - name: Upload wheels - # if: success() - # shell: bash -el {0} - # # see https://github.com/marketplace/actions/setup-miniconda for why - # # `-el {0}` is required. - # env: - # NUMPY_STAGING_UPLOAD_TOKEN: ${{ secrets.NUMPY_STAGING_UPLOAD_TOKEN }} - # NUMPY_NIGHTLY_UPLOAD_TOKEN: ${{ secrets.NUMPY_NIGHTLY_UPLOAD_TOKEN }} - # run: | - # source tools/wheels/upload_wheels.sh - # set_upload_vars - # # trigger an upload to - # # https://anaconda.org/scientific-python-nightly-wheels/numpy - # # for cron jobs or "Run workflow" (restricted to main branch). - # # Tags will upload to - # # https://anaconda.org/multibuild-wheels-staging/numpy - # # The tokens were originally generated at anaconda.org - # upload_wheels - From 9890df801193e7556ae8908748bcf0211a76e184 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Thu, 20 Mar 2025 17:15:07 +0100 Subject: [PATCH 02/44] trigger From ffd4145c763fb6d5565bc2f17bede7c2cb060a4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Thu, 20 Mar 2025 17:25:10 +0100 Subject: [PATCH 03/44] disable testing for the time being --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index eb7015acc347..9186cc9ff833 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -146,8 +146,6 @@ skip = "*_i686 *_ppc64le *_s390x *_universal2" before-build = "bash {project}/tools/wheels/cibw_before_build.sh {project}" # The build will use openblas64 everywhere, except on arm64 macOS >=14.0 (uses Accelerate) config-settings = "setup-args=-Duse-ilp64=true setup-args=-Dallow-noblas=false build-dir=build" -before-test = "pip install -r {project}/requirements/test_requirements.txt" -test-command = "bash {project}/tools/wheels/cibw_test_command.sh {project}" enable = ["cpython-freethreading", "pypy", "cpython-prerelease"] [tool.cibuildwheel.linux] From 628cc837022577a788a1bb92fc53dfbecd89d3f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Thu, 20 Mar 2025 17:47:28 +0100 Subject: [PATCH 04/44] Depend on variant-capable meson-python --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9186cc9ff833..0ef63ed99510 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [build-system] build-backend = "mesonpy" requires = [ - "meson-python>=0.15.0", + "meson-python @ https://github.com/mgorny/meson-python/archive/wheel-variants.tar.gz", "Cython>=3.0.6", # keep in sync with version check in meson.build ] From 264452a8a329a4267523710fb4d25687a6b15978 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Thu, 20 Mar 2025 18:18:57 +0100 Subject: [PATCH 05/44] Let's see if cibuildwheel can handle variant wheels --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0ef63ed99510..47b0fcc4fa7e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -145,7 +145,7 @@ build-frontend = "build" skip = "*_i686 *_ppc64le *_s390x *_universal2" before-build = "bash {project}/tools/wheels/cibw_before_build.sh {project}" # The build will use openblas64 everywhere, except on arm64 macOS >=14.0 (uses Accelerate) -config-settings = "setup-args=-Duse-ilp64=true setup-args=-Dallow-noblas=false build-dir=build" +config-settings = "setup-args=-Duse-ilp64=true setup-args=-Dallow-noblas=false build-dir=build variant-name=foo::bar::baz variant-name=a::b::c" enable = ["cpython-freethreading", "pypy", "cpython-prerelease"] [tool.cibuildwheel.linux] From be6a7a86c57f2382d4792fba7e728e6c4f38102e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Thu, 20 Mar 2025 18:33:07 +0100 Subject: [PATCH 06/44] Try disabling repair --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 47b0fcc4fa7e..47d0ad20a05a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -149,6 +149,7 @@ config-settings = "setup-args=-Duse-ilp64=true setup-args=-Dallow-noblas=false b enable = ["cpython-freethreading", "pypy", "cpython-prerelease"] [tool.cibuildwheel.linux] +repair-wheel-command = "" manylinux-x86_64-image = "manylinux_2_28" manylinux-aarch64-image = "manylinux_2_28" musllinux-x86_64-image = "musllinux_1_2" From d13fbfdb29cc341fd90dccaa16d0dab17bd99987 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Thu, 20 Mar 2025 18:42:50 +0100 Subject: [PATCH 07/44] Try building different blas variants --- .github/workflows/wheels.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 0dc274388577..876983d93e71 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -54,6 +54,7 @@ jobs: # Ensure that a wheel builder finishes even if another fails fail-fast: false matrix: + blas: [mkl, openblas] # Github Actions doesn't support pairing matrix values together, let's improvise # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 buildplat: @@ -151,6 +152,7 @@ jobs: uses: pypa/cibuildwheel@42728e866bbc80d544a70825bd9990b9a26f1a50 # v2.23.1 env: CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }} + CIBW_CONFIG_SETTINGS: "setup-args=-Duse-ilp64=true setup-args=-Dallow-noblas=false build-dir=build variant=blas::variant::${{ matrix.blas }} setup-args=-Dblas=${{ matrix.blas }}" - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: From 1980f611011358085d15cca6f67bbc37ab4fab5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Thu, 20 Mar 2025 18:44:32 +0100 Subject: [PATCH 08/44] s/variant/variant-name/ --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 876983d93e71..95a02c156bdb 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -152,7 +152,7 @@ jobs: uses: pypa/cibuildwheel@42728e866bbc80d544a70825bd9990b9a26f1a50 # v2.23.1 env: CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }} - CIBW_CONFIG_SETTINGS: "setup-args=-Duse-ilp64=true setup-args=-Dallow-noblas=false build-dir=build variant=blas::variant::${{ matrix.blas }} setup-args=-Dblas=${{ matrix.blas }}" + CIBW_CONFIG_SETTINGS: "setup-args=-Duse-ilp64=true setup-args=-Dallow-noblas=false build-dir=build variant-name=blas::variant::${{ matrix.blas }} setup-args=-Dblas=${{ matrix.blas }}" - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: From 806b0934d4f7e4f427763709460d8262a9531ede Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 13:48:12 +0100 Subject: [PATCH 09/44] Try blis instead --- .github/workflows/wheels.yml | 2 +- tools/wheels/cibw_before_build.sh | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 95a02c156bdb..3bac03e1ec54 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -54,7 +54,7 @@ jobs: # Ensure that a wheel builder finishes even if another fails fail-fast: false matrix: - blas: [mkl, openblas] + blas: [openblas, blis] # Github Actions doesn't support pairing matrix values together, let's improvise # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 buildplat: diff --git a/tools/wheels/cibw_before_build.sh b/tools/wheels/cibw_before_build.sh index e2f464d32a2a..bd7bb8f60948 100644 --- a/tools/wheels/cibw_before_build.sh +++ b/tools/wheels/cibw_before_build.sh @@ -61,3 +61,5 @@ if [[ $FREE_THREADED_BUILD == "True" ]]; then python -m pip install meson-python ninja python -m pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython fi + +sudo dnf install blis From 31a473197ee8863d0768e8983f562d15db5196c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 13:49:35 +0100 Subject: [PATCH 10/44] no sudo? --- tools/wheels/cibw_before_build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/wheels/cibw_before_build.sh b/tools/wheels/cibw_before_build.sh index bd7bb8f60948..b4013170dfff 100644 --- a/tools/wheels/cibw_before_build.sh +++ b/tools/wheels/cibw_before_build.sh @@ -62,4 +62,4 @@ if [[ $FREE_THREADED_BUILD == "True" ]]; then python -m pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython fi -sudo dnf install blis +dnf install blis From afbb68fa4713de36fc17bd8f04b9971e64980f56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 14:00:01 +0100 Subject: [PATCH 11/44] gotta -y --- tools/wheels/cibw_before_build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/wheels/cibw_before_build.sh b/tools/wheels/cibw_before_build.sh index b4013170dfff..9adc5c4e1d18 100644 --- a/tools/wheels/cibw_before_build.sh +++ b/tools/wheels/cibw_before_build.sh @@ -62,4 +62,4 @@ if [[ $FREE_THREADED_BUILD == "True" ]]; then python -m pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython fi -dnf install blis +dnf install -y blis From 3740d72cc7d808ed167164e2dd32a4a3228d3fc3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 14:21:06 +0100 Subject: [PATCH 12/44] blis-devel --- tools/wheels/cibw_before_build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/wheels/cibw_before_build.sh b/tools/wheels/cibw_before_build.sh index 9adc5c4e1d18..c49a54e9d3b3 100644 --- a/tools/wheels/cibw_before_build.sh +++ b/tools/wheels/cibw_before_build.sh @@ -62,4 +62,4 @@ if [[ $FREE_THREADED_BUILD == "True" ]]; then python -m pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython fi -dnf install -y blis +dnf install -y blis-devel From 44e4677645c347a5f2c63f6e021492a3ecac13ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 15:52:54 +0100 Subject: [PATCH 13/44] Try building without cibw --- .github/workflows/wheels.yml | 151 ++--------------------------------- 1 file changed, 5 insertions(+), 146 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 3bac03e1ec54..0bb0838bcaf8 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -16,39 +16,12 @@ name: Wheel builder on: push: -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - permissions: contents: read # to fetch code (actions/checkout) jobs: - get_commit_message: - name: Get commit message - runs-on: ubuntu-latest - outputs: - message: ${{ steps.commit_message.outputs.message }} - steps: - - name: Checkout numpy - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - # Gets the correct commit message for pull request - with: - ref: ${{ github.event.pull_request.head.sha }} - persist-credentials: false - - name: Get commit message - id: commit_message - env: - HEAD: ${{ github.ref }} - run: | - set -xe - COMMIT_MSG=$(git log --no-merges -1 --oneline) - echo "message=$COMMIT_MSG" >> $GITHUB_OUTPUT - echo github.ref "$HEAD" - build_wheels: - name: Build wheel ${{ matrix.python }}-${{ matrix.buildplat[1] }}-${{ matrix.buildplat[2] }} - needs: get_commit_message + name: Build wheel ${{ matrix.python }}-${{ matrix.mkl }} runs-on: ${{ matrix.buildplat[0] }} strategy: # Ensure that a wheel builder finishes even if another fails @@ -59,33 +32,8 @@ jobs: # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 buildplat: - [ubuntu-22.04, manylinux_x86_64, ""] -# - [ubuntu-22.04, musllinux_x86_64, ""] -# - [ubuntu-22.04-arm, manylinux_aarch64, ""] -# - [ubuntu-22.04-arm, musllinux_aarch64, ""] -# - [macos-13, macosx_x86_64, openblas] -# -# # targeting macos >= 14. Could probably build on macos-14, but it would be a cross-compile -# - [macos-13, macosx_x86_64, accelerate] -# - [macos-14, macosx_arm64, accelerate] # always use accelerate -# - [windows-2019, win_amd64, ""] -# - [windows-2019, win32, ""] - python: ["cp312"] # ["cp311", "cp312", "cp313", "cp313t", "pp311"] - exclude: - # Don't build PyPy 32-bit windows - - buildplat: [windows-2019, win32, ""] - python: "pp311" - # No PyPy on musllinux images - - buildplat: [ ubuntu-22.04, musllinux_x86_64, "" ] - python: "pp311" - - buildplat: [ ubuntu-22.04-arm, musllinux_aarch64, "" ] - python: "pp311" - - buildplat: [ macos13, macosx_x86_64, openblas ] - python: "cp313t" + python: ["3.12"] - env: - IS_32_BIT: ${{ matrix.buildplat[1] == 'win32' }} - IS_PUSH: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') }} - IS_SCHEDULE_DISPATCH: ${{ github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' }} steps: - name: Checkout numpy uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 @@ -93,100 +41,11 @@ jobs: submodules: true persist-credentials: false - - name: Setup MSVC (32-bit) - if: ${{ matrix.buildplat[1] == 'win32' }} - uses: bus1/cabuild/action/msdevshell@e22aba57d6e74891d059d66501b6b5aed8123c4d # v1 - with: - architecture: 'x86' - - - name: pkg-config-for-win - run: | - choco install -y --no-progress --stoponfirstfailure --checksum 6004DF17818F5A6DBF19CB335CC92702 pkgconfiglite - $CIBW = "${{ github.workspace }}/.openblas" - # pkgconfig needs a complete path, and not just "./openblas since the - # build is run in a tmp dir (?) - # It seems somewhere in the env passing, `\` is not - # passed through, so convert it to '/' - $CIBW = $CIBW.replace("\","/") - echo "CIBW_ENVIRONMENT_WINDOWS=PKG_CONFIG_PATH=$CIBW" >> $env:GITHUB_ENV - if: runner.os == 'windows' - # Used to push the built wheels - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: - python-version: "3.x" - - - name: Setup macOS - if: matrix.buildplat[0] == 'macos-13' || matrix.buildplat[0] == 'macos-14' - run: | - # Needed due to https://github.com/actions/runner-images/issues/3371 - # Supported versions: https://github.com/actions/runner-images/blob/main/images/macos/macos-14-arm64-Readme.md - echo "FC=gfortran-13" >> "$GITHUB_ENV" - echo "F77=gfortran-13" >> "$GITHUB_ENV" - echo "F90=gfortran-13" >> "$GITHUB_ENV" - if [[ ${{ matrix.buildplat[2] }} == 'accelerate' ]]; then - # macosx_arm64 and macosx_x86_64 with accelerate - # only target Sonoma onwards - CIBW="MACOSX_DEPLOYMENT_TARGET=14.0 INSTALL_OPENBLAS=false RUNNER_OS=macOS" - echo "CIBW_ENVIRONMENT_MACOS=$CIBW" >> "$GITHUB_ENV" - - # the macos-13 image that's used for building the x86_64 wheel can't test - # a wheel with deployment target >= 14 without further work - echo "CIBW_TEST_SKIP=*-macosx_x86_64" >> "$GITHUB_ENV" - else - # macosx_x86_64 with OpenBLAS - # if INSTALL_OPENBLAS isn't specified then scipy-openblas is automatically installed - CIBW="RUNNER_OS=macOS" - PKG_CONFIG_PATH="$PWD/.openblas" - DYLD="$DYLD_LIBRARY_PATH:/$PWD/.openblas/lib" - echo "CIBW_ENVIRONMENT_MACOS=$CIBW PKG_CONFIG_PATH=$PKG_CONFIG_PATH DYLD_LIBRARY_PATH=$DYLD" >> "$GITHUB_ENV" - fi - - - name: Set up free-threaded build - if: matrix.python == 'cp313t' - shell: bash -el {0} - run: | - echo "CIBW_BUILD_FRONTEND=pip; args: --no-build-isolation" >> "$GITHUB_ENV" + python-version: ${{ matrix.python }} - name: Build wheels - uses: pypa/cibuildwheel@42728e866bbc80d544a70825bd9990b9a26f1a50 # v2.23.1 - env: - CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }} - CIBW_CONFIG_SETTINGS: "setup-args=-Duse-ilp64=true setup-args=-Dallow-noblas=false build-dir=build variant-name=blas::variant::${{ matrix.blas }} setup-args=-Dblas=${{ matrix.blas }}" - - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: ${{ matrix.python }}-${{ matrix.buildplat[1] }}-${{ matrix.buildplat[2] }} - path: ./wheelhouse/*.whl - - - uses: mamba-org/setup-micromamba@0dea6379afdaffa5d528b3d1dabc45da37f443fc - with: - # for installation of anaconda-client, required for upload to - # anaconda.org - # Note that this step is *after* specific pythons have been used to - # build and test the wheel - # for installation of anaconda-client, for upload to anaconda.org - # environment will be activated after creation, and in future bash steps - init-shell: bash - environment-name: upload-env - create-args: >- - anaconda-client - - - name: Upload wheels - if: success() && github.repository == 'numpy/numpy' - shell: bash -el {0} - # see https://github.com/marketplace/actions/setup-miniconda for why - # `-el {0}` is required. - env: - NUMPY_STAGING_UPLOAD_TOKEN: ${{ secrets.NUMPY_STAGING_UPLOAD_TOKEN }} - NUMPY_NIGHTLY_UPLOAD_TOKEN: ${{ secrets.NUMPY_NIGHTLY_UPLOAD_TOKEN }} - run: | - source tools/wheels/upload_wheels.sh - set_upload_vars - # trigger an upload to - # https://anaconda.org/scientific-python-nightly-wheels/numpy - # for cron jobs or "Run workflow" (restricted to main branch). - # Tags will upload to - # https://anaconda.org/multibuild-wheels-staging/numpy - # The tokens were originally generated at anaconda.org - upload_wheels + run: + - python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false From 6f478df12b773e5764598672dbe9aa77e5e97b89 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 15:53:55 +0100 Subject: [PATCH 14/44] fix --- .github/workflows/wheels.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 0bb0838bcaf8..1bc2c9c87ffe 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -47,5 +47,4 @@ jobs: python-version: ${{ matrix.python }} - name: Build wheels - run: - - python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false + run: python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false From 543e30366940c01379982292bf9bb01bb19b6770 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 15:55:06 +0100 Subject: [PATCH 15/44] install build --- .github/workflows/wheels.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 1bc2c9c87ffe..cc77aba93b59 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -46,5 +46,8 @@ jobs: with: python-version: ${{ matrix.python }} + - name: Install deps + run: pip install build + - name: Build wheels run: python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false From 15a882c13136e136c8e0eda433419318af20668b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 15:56:40 +0100 Subject: [PATCH 16/44] enable blas variants again --- .github/workflows/wheels.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index cc77aba93b59..721e1b6202a3 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -21,7 +21,7 @@ permissions: jobs: build_wheels: - name: Build wheel ${{ matrix.python }}-${{ matrix.mkl }} + name: Build wheel ${{ matrix.python }}-${{ matrix.blas }} runs-on: ${{ matrix.buildplat[0] }} strategy: # Ensure that a wheel builder finishes even if another fails @@ -50,4 +50,4 @@ jobs: run: pip install build - name: Build wheels - run: python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false + run: python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false -Cblas=${{ matrix.blas }} -Cvariant-name=blas::variant::${{ matrix.blas }} From af08dcc08fc5bc534d6dca99bf8ff3341effc4a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 15:57:01 +0100 Subject: [PATCH 17/44] back to mkl --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 721e1b6202a3..a0571f09f953 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -27,7 +27,7 @@ jobs: # Ensure that a wheel builder finishes even if another fails fail-fast: false matrix: - blas: [openblas, blis] + blas: [openblas, mkl] # Github Actions doesn't support pairing matrix values together, let's improvise # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 buildplat: From 5c8a36de9ca96987ce3e5f9ab7d686b1941b4495 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 15:58:14 +0100 Subject: [PATCH 18/44] fix setup-args --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index a0571f09f953..d10175053bee 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -50,4 +50,4 @@ jobs: run: pip install build - name: Build wheels - run: python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false -Cblas=${{ matrix.blas }} -Cvariant-name=blas::variant::${{ matrix.blas }} + run: python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false -Csetup-args=-Dblas=${{ matrix.blas }} -Cvariant-name=blas::variant::${{ matrix.blas }} From a846c30fc3e16a0472fff021a2c32905112111f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 16:02:46 +0100 Subject: [PATCH 19/44] deps --- .github/workflows/wheels.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index d10175053bee..f9df669326bf 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -46,8 +46,11 @@ jobs: with: python-version: ${{ matrix.python }} - - name: Install deps + - name: Install build run: pip install build + - name: Install deps + run: apt install libmkl-full-dev libopenblas-dev + - name: Build wheels run: python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false -Csetup-args=-Dblas=${{ matrix.blas }} -Cvariant-name=blas::variant::${{ matrix.blas }} From 2faaa705bdbd62ec3e1a5248767f6656698d959f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 16:03:54 +0100 Subject: [PATCH 20/44] sudo --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index f9df669326bf..93b365c03b0b 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -50,7 +50,7 @@ jobs: run: pip install build - name: Install deps - run: apt install libmkl-full-dev libopenblas-dev + run: sudo apt install -y libmkl-full-dev libopenblas-dev - name: Build wheels run: python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false -Csetup-args=-Dblas=${{ matrix.blas }} -Cvariant-name=blas::variant::${{ matrix.blas }} From 149b0def06f47dff7502fc38cb73d82ebcbf06f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 16:39:42 +0100 Subject: [PATCH 21/44] use pixi instead --- .gitattributes | 2 ++ .github/workflows/wheels.yml | 13 +++---------- .gitignore | 4 ++++ pixi.toml | 24 ++++++++++++++++++++++++ 4 files changed, 33 insertions(+), 10 deletions(-) create mode 100644 pixi.toml diff --git a/.gitattributes b/.gitattributes index a7807beb597c..6a4c305b253d 100644 --- a/.gitattributes +++ b/.gitattributes @@ -106,3 +106,5 @@ numpy/version.py linguist-generated ./doc/source/reference/simd/*.inc text ./numpy/_core/src/_simd/*.inc text diff=c +# SCM syntax highlighting +pixi.lock linguist-language=YAML linguist-generated=true diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 93b365c03b0b..953804f999f4 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -41,16 +41,9 @@ jobs: submodules: true persist-credentials: false - # Used to push the built wheels - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: prefix-dev/setup-pixi@v0.8.3 with: - python-version: ${{ matrix.python }} - - - name: Install build - run: pip install build - - - name: Install deps - run: sudo apt install -y libmkl-full-dev libopenblas-dev + cache: true - name: Build wheels - run: python -m build -w -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false -Csetup-args=-Dblas=${{ matrix.blas }} -Cvariant-name=blas::variant::${{ matrix.blas }} + run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false -Csetup-args=-Dblas=${{ matrix.blas }} -Cvariant-name=blas::variant::${{ matrix.blas }} diff --git a/.gitignore b/.gitignore index df7f084e3645..1ae65c7b0bd2 100644 --- a/.gitignore +++ b/.gitignore @@ -150,3 +150,7 @@ tools/swig/test/Vector_wrap.cxx tools/swig/test/Array.py .openblas numpy/_distributor_init_local.py + +# pixi environments +.pixi +*.egg-info diff --git a/pixi.toml b/pixi.toml new file mode 100644 index 000000000000..ac11f17b0399 --- /dev/null +++ b/pixi.toml @@ -0,0 +1,24 @@ +[workspace] +channels = ["conda-forge"] +name = "numpy-wheel-build" +platforms = ["linux-64"] + +[tasks] +build = "python -m build -w" + +[dependencies] +c-compiler = "*" +cxx-compiler = "*" +fortran-compiler = "*" +python-build = "*" +pkg-config = "*" + +[feature.mkl.dependencies] +mkl-devel = "*" + +[feature.openblas.dependencies] +openblas = "*" + +[environments] +mkl = ["mkl"] +openblas = ["openblas"] From 92ea83f3196730f44208c6eee59e03979f90ec93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 16:41:27 +0100 Subject: [PATCH 22/44] no cache --- .github/workflows/wheels.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 953804f999f4..6be10be0e007 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -42,8 +42,6 @@ jobs: persist-credentials: false - uses: prefix-dev/setup-pixi@v0.8.3 - with: - cache: true - name: Build wheels run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false -Csetup-args=-Dblas=${{ matrix.blas }} -Cvariant-name=blas::variant::${{ matrix.blas }} From e1f32b90931c335f285b842662c404dadb0643fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Fri, 21 Mar 2025 19:47:51 +0100 Subject: [PATCH 23/44] Switch to using -Dvariant --- meson.options | 2 ++ numpy/meson.build | 12 ++++++++++++ 2 files changed, 14 insertions(+) diff --git a/meson.options b/meson.options index 1be05d324756..e2d19a595df2 100644 --- a/meson.options +++ b/meson.options @@ -40,3 +40,5 @@ option('test-simd', type: 'array', description: 'Specify a list of CPU features to be tested against NumPy SIMD interface') option('test-simd-args', type: 'string', value: '', description: 'Extra args to be passed to the `_simd` module that is used for testing the NumPy SIMD interface') +option('variant', type: 'array', value: [], + description: 'Wheel variant keys') diff --git a/numpy/meson.build b/numpy/meson.build index 7fcafa9c8184..8d5e2d98e4eb 100644 --- a/numpy/meson.build +++ b/numpy/meson.build @@ -67,6 +67,18 @@ else blas_interface = ['interface: lp64'] endif +foreach variant_meta : get_option('variant') + split_meta = variant_meta.split('::') + if split_meta.length() != 3 + error('Invalid variant key: ' + variant_meta) + endif + if split_meta[0].strip() == 'blas' and split_meta[1].strip() == 'variant' + blas_name = split_meta[2].strip() + lapack_name = blas_name + else + error('Unsupported variant key: ' + variant_meta) + endif +endforeach blas_order = get_option('blas-order') if blas_order == ['auto'] From 713d88f08e1bacb8e3e12ab4bf844bb9884de917 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 22 Mar 2025 15:21:12 +0100 Subject: [PATCH 24/44] Wire allow_noblas too --- numpy/meson.build | 1 + 1 file changed, 1 insertion(+) diff --git a/numpy/meson.build b/numpy/meson.build index 8d5e2d98e4eb..fccfcb81aa48 100644 --- a/numpy/meson.build +++ b/numpy/meson.build @@ -75,6 +75,7 @@ foreach variant_meta : get_option('variant') if split_meta[0].strip() == 'blas' and split_meta[1].strip() == 'variant' blas_name = split_meta[2].strip() lapack_name = blas_name + allow_noblas = false else error('Unsupported variant key: ' + variant_meta) endif From 794c39c0513de55266759cd3ddcb802bee0a8d36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 22 Mar 2025 15:29:02 +0100 Subject: [PATCH 25/44] Move variant processing top-level --- meson.build | 14 ++++++++++++++ numpy/meson.build | 21 +++++++-------------- 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/meson.build b/meson.build index 141e662f40b0..14fb9f02c434 100644 --- a/meson.build +++ b/meson.build @@ -88,5 +88,19 @@ if host_machine.system() == 'darwin' and cc.has_link_argument('-Wl,-ld_classic') add_project_link_arguments('-Wl,-ld_classic', language : ['c', 'cpp']) endif +blas_variant = '' + +foreach variant_meta : get_option('variant') + split_meta = variant_meta.split('::') + if split_meta.length() != 3 + error('Invalid variant key: ' + variant_meta) + endif + if split_meta[0].strip() == 'blas' and split_meta[1].strip() == 'variant' + blas_variant = split_meta[2].strip() + else + error('Unsupported variant key: ' + variant_meta) + endif +endforeach + subdir('meson_cpu') subdir('numpy') diff --git a/numpy/meson.build b/numpy/meson.build index fccfcb81aa48..3a5bf6e5bdbb 100644 --- a/numpy/meson.build +++ b/numpy/meson.build @@ -60,6 +60,13 @@ allow_noblas = get_option('allow-noblas') # (see cibuildwheel settings in pyproject.toml), but used by CI jobs already blas_symbol_suffix = get_option('blas-symbol-suffix') +# Variant overrides options directly specified +if blas_variant != '' + blas_name = blas_variant + lapack_name = blas_variant + allow_noblas = false +endif + use_ilp64 = get_option('use-ilp64') if use_ilp64 blas_interface = ['interface: ilp64'] @@ -67,20 +74,6 @@ else blas_interface = ['interface: lp64'] endif -foreach variant_meta : get_option('variant') - split_meta = variant_meta.split('::') - if split_meta.length() != 3 - error('Invalid variant key: ' + variant_meta) - endif - if split_meta[0].strip() == 'blas' and split_meta[1].strip() == 'variant' - blas_name = split_meta[2].strip() - lapack_name = blas_name - allow_noblas = false - else - error('Unsupported variant key: ' + variant_meta) - endif -endforeach - blas_order = get_option('blas-order') if blas_order == ['auto'] blas_order = [] From e91ffc4cf3c86b5c90509238a15c1ca1afa086ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 22 Mar 2025 15:48:00 +0100 Subject: [PATCH 26/44] Support x86_64-vN variants --- .github/workflows/wheels.yml | 3 ++- meson.build | 6 ++++++ meson_cpu/x86/meson.build | 14 ++++++++++++++ 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 6be10be0e007..9fe23fc146c3 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -33,6 +33,7 @@ jobs: buildplat: - [ubuntu-22.04, manylinux_x86_64, ""] python: ["3.12"] + x86_64: [v1, v2, v3, v4] steps: - name: Checkout numpy @@ -44,4 +45,4 @@ jobs: - uses: prefix-dev/setup-pixi@v0.8.3 - name: Build wheels - run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Csetup-args=-Dallow-noblas=false -Csetup-args=-Dblas=${{ matrix.blas }} -Cvariant-name=blas::variant::${{ matrix.blas }} + run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64 }} diff --git a/meson.build b/meson.build index 14fb9f02c434..b2f3289eafa8 100644 --- a/meson.build +++ b/meson.build @@ -89,6 +89,7 @@ if host_machine.system() == 'darwin' and cc.has_link_argument('-Wl,-ld_classic') endif blas_variant = '' +x86_64_variant = '' foreach variant_meta : get_option('variant') split_meta = variant_meta.split('::') @@ -97,6 +98,11 @@ foreach variant_meta : get_option('variant') endif if split_meta[0].strip() == 'blas' and split_meta[1].strip() == 'variant' blas_variant = split_meta[2].strip() + elif split_meta[0].strip() == 'x86_64' and split_meta[1].strip() == 'baseline' + if host_machine.cpu_family() != 'x86_64' + error('Variant valid only on x86_64: ' + variant_meta) + endif + x86_64_variant = split_meta[2].strip() else error('Unsupported variant key: ' + variant_meta) endif diff --git a/meson_cpu/x86/meson.build b/meson_cpu/x86/meson.build index 8c7a0fb59a57..02aa0f1f6f9f 100644 --- a/meson_cpu/x86/meson.build +++ b/meson_cpu/x86/meson.build @@ -1,6 +1,20 @@ source_root = meson.project_source_root() mod_features = import('features') +if x86_64_variant != '' + if x86_64_variant == 'v1' + CPU_CONF_BASELINE = 'min' + elif x86_64_variant == 'v2' + CPU_CONF_BASELINE = 'SSE42' + elif x86_64_variant == 'v3' + CPU_CONF_BASELINE = 'AVX2' + elif x86_64_variant == 'v4' + CPU_CONF_BASELINE = 'AVX512_SKX' + else + error('Unknown x86_64 variant: ' + x86_64_variant) + endif +endif + SSE = mod_features.new( 'SSE', 1, args: '-msse', test_code: files(source_root + '/numpy/distutils/checks/cpu_sse.c')[0] From 4df469574874c39171a32223a2b7ea07e0494081 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 22 Mar 2025 15:48:46 +0100 Subject: [PATCH 27/44] Include x86_64 variant in job name --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 9fe23fc146c3..41bace7984a7 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -21,7 +21,7 @@ permissions: jobs: build_wheels: - name: Build wheel ${{ matrix.python }}-${{ matrix.blas }} + name: Build wheel ${{ matrix.python }}-${{ matrix.blas }}-x86_64-${{ matrix.x86_64 }} runs-on: ${{ matrix.buildplat[0] }} strategy: # Ensure that a wheel builder finishes even if another fails From bb7ff743fc6dcc1b81b60e4a120771030cf9c4ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 22 Mar 2025 15:51:53 +0100 Subject: [PATCH 28/44] Compile verbosely --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 41bace7984a7..b9826092de17 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -45,4 +45,4 @@ jobs: - uses: prefix-dev/setup-pixi@v0.8.3 - name: Build wheels - run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64 }} + run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64 }} -Ccompile-args=-v From a7a3c0ee17f3b3d498dc167b9e8a36c9b93a7bc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 22 Mar 2025 15:58:53 +0100 Subject: [PATCH 29/44] Try setting -march --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index b9826092de17..935e6c4dd572 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -45,4 +45,4 @@ jobs: - uses: prefix-dev/setup-pixi@v0.8.3 - name: Build wheels - run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64 }} -Ccompile-args=-v + run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64 }} -Ccompile-args=-v -Csetup-args=-Dc_args=-march=x86-64-${{ matrix.x86_64}} -Csetup-args=-Dcpp_args=-march=x86-64-${{ matrix.x86_64}} From fe69754ce48c55ae09db460ddecb805c679fcc93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 22 Mar 2025 16:01:09 +0100 Subject: [PATCH 30/44] Fix v1 flag --- .github/workflows/wheels.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 935e6c4dd572..019562237c8a 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -33,7 +33,11 @@ jobs: buildplat: - [ubuntu-22.04, manylinux_x86_64, ""] python: ["3.12"] - x86_64: [v1, v2, v3, v4] + x86_64: + - [v1, -march=x86-64] + - [v2, -march=x86-64-v2] + - [v3, -march=x86-64-v3] + - [v4, -march=x86-64-v4] steps: - name: Checkout numpy @@ -45,4 +49,4 @@ jobs: - uses: prefix-dev/setup-pixi@v0.8.3 - name: Build wheels - run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64 }} -Ccompile-args=-v -Csetup-args=-Dc_args=-march=x86-64-${{ matrix.x86_64}} -Csetup-args=-Dcpp_args=-march=x86-64-${{ matrix.x86_64}} + run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=-march=x86-64-${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=-march=x86-64-${{ matrix.x86_64[1] }} From 96e645c38169133f2eb0e16b854fca75ec65fc3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 22 Mar 2025 16:01:48 +0100 Subject: [PATCH 31/44] fix workflow name --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 019562237c8a..2bc72f3e3ae5 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -21,7 +21,7 @@ permissions: jobs: build_wheels: - name: Build wheel ${{ matrix.python }}-${{ matrix.blas }}-x86_64-${{ matrix.x86_64 }} + name: Build wheel ${{ matrix.python }}-${{ matrix.blas }}-x86_64-${{ matrix.x86_64[0] }} runs-on: ${{ matrix.buildplat[0] }} strategy: # Ensure that a wheel builder finishes even if another fails From a6652296ff4bc31d766980470de03a1c5d3ada3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 22 Mar 2025 16:02:28 +0100 Subject: [PATCH 32/44] fix -march --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 2bc72f3e3ae5..e5e63d262ed0 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -49,4 +49,4 @@ jobs: - uses: prefix-dev/setup-pixi@v0.8.3 - name: Build wheels - run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=-march=x86-64-${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=-march=x86-64-${{ matrix.x86_64[1] }} + run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=-march=${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=-march=${{ matrix.x86_64[1] }} From 4994632f8e894aacfedb6ebec732b1d8051bd824 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 22 Mar 2025 16:04:07 +0100 Subject: [PATCH 33/44] fix march again --- .github/workflows/wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index e5e63d262ed0..fe90b656d6e1 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -49,4 +49,4 @@ jobs: - uses: prefix-dev/setup-pixi@v0.8.3 - name: Build wheels - run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=-march=${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=-march=${{ matrix.x86_64[1] }} + run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=${{ matrix.x86_64[1] }} From d00f3199d12e701f7b05f6fc51451e8b956b296a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Wed, 26 Mar 2025 20:11:34 +0100 Subject: [PATCH 34/44] Revert some irrelevant differences --- .gitattributes | 2 - .github/workflows/circleci.yml | 25 ++ .github/workflows/codeql.yml | 75 ++++ .github/workflows/compiler_sanitizers.yml | 129 +++++++ .github/workflows/cygwin.yml | 81 +++++ .github/workflows/dependency-review.yml | 24 ++ .github/workflows/emscripten.yml | 85 +++++ .github/workflows/labeler.yml | 19 + .github/workflows/linux.yml | 350 ++++++++++++++++++ .github/workflows/linux_blas.yml | 410 ++++++++++++++++++++++ .github/workflows/linux_musl.yml | 69 ++++ .github/workflows/linux_qemu.yml | 280 +++++++++++++++ .github/workflows/linux_simd.yml | 289 +++++++++++++++ .github/workflows/macos.yml | 164 +++++++++ .github/workflows/mypy.yml | 74 ++++ .github/workflows/mypy_primer.yml | 99 ++++++ .github/workflows/mypy_primer_comment.yml | 103 ++++++ .github/workflows/scorecards.yml | 55 +++ .github/workflows/variant-wheels.yml | 52 +++ .github/workflows/wheels.yml | 260 +++++++++++++- .github/workflows/windows.yml | 133 +++++++ .github/workflows/windows_arm64.yml | 208 +++++++++++ .gitignore | 4 - pyproject.toml | 5 +- tools/wheels/cibw_before_build.sh | 2 - 25 files changed, 2977 insertions(+), 20 deletions(-) create mode 100644 .github/workflows/circleci.yml create mode 100644 .github/workflows/codeql.yml create mode 100644 .github/workflows/compiler_sanitizers.yml create mode 100644 .github/workflows/cygwin.yml create mode 100644 .github/workflows/dependency-review.yml create mode 100644 .github/workflows/emscripten.yml create mode 100644 .github/workflows/labeler.yml create mode 100644 .github/workflows/linux.yml create mode 100644 .github/workflows/linux_blas.yml create mode 100644 .github/workflows/linux_musl.yml create mode 100644 .github/workflows/linux_qemu.yml create mode 100644 .github/workflows/linux_simd.yml create mode 100644 .github/workflows/macos.yml create mode 100644 .github/workflows/mypy.yml create mode 100644 .github/workflows/mypy_primer.yml create mode 100644 .github/workflows/mypy_primer_comment.yml create mode 100644 .github/workflows/scorecards.yml create mode 100644 .github/workflows/variant-wheels.yml create mode 100644 .github/workflows/windows.yml create mode 100644 .github/workflows/windows_arm64.yml diff --git a/.gitattributes b/.gitattributes index 6a4c305b253d..a7807beb597c 100644 --- a/.gitattributes +++ b/.gitattributes @@ -106,5 +106,3 @@ numpy/version.py linguist-generated ./doc/source/reference/simd/*.inc text ./numpy/_core/src/_simd/*.inc text diff=c -# SCM syntax highlighting -pixi.lock linguist-language=YAML linguist-generated=true diff --git a/.github/workflows/circleci.yml b/.github/workflows/circleci.yml new file mode 100644 index 000000000000..c0c8876b6bbe --- /dev/null +++ b/.github/workflows/circleci.yml @@ -0,0 +1,25 @@ +# To enable this workflow on a fork, comment out: +# +# if: github.repository == 'numpy/numpy' + +name: CircleCI artifact redirector + +on: [status] + +permissions: read-all + +jobs: + circleci_artifacts_redirector_job: + runs-on: ubuntu-latest + if: "github.repository == 'numpy/numpy' && !contains(github.event.head_commit.message, '[circle skip]') && !contains(github.event.head_commit.message, '[skip circle]') && github.event.context == 'ci/circleci: build'" + name: Run CircleCI artifacts redirector + permissions: + statuses: write + steps: + - name: GitHub Action step + uses: larsoner/circleci-artifacts-redirector-action@4e13a10d89177f4bfc8007a7064bdbeda848d8d1 # master + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + api-token: ${{ secrets.CIRCLE_TOKEN }} + artifact-path: 0/doc/build/html/index.html + circleci-jobs: build diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 000000000000..a6665adafed3 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,75 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: ["main"] + pull_request: + # The branches below must be a subset of the branches above + branches: ["main"] + schedule: + - cron: "0 0 * * 1" + +permissions: + contents: read + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: ["python"] + # CodeQL supports [ $supported-codeql-languages ] + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 + + # â„šī¸ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/compiler_sanitizers.yml b/.github/workflows/compiler_sanitizers.yml new file mode 100644 index 000000000000..9452289239bc --- /dev/null +++ b/.github/workflows/compiler_sanitizers.yml @@ -0,0 +1,129 @@ +name: Test with compiler sanitizers + +on: + push: + branches: + - main + pull_request: + branches: + - main + - maintenance/** + +defaults: + run: + shell: bash + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + clang_ASAN: + # To enable this workflow on a fork, comment out: + if: github.repository == 'numpy/numpy' + runs-on: macos-latest + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - name: Set up pyenv + run: | + git clone https://github.com/pyenv/pyenv.git "$HOME/.pyenv" + PYENV_ROOT="$HOME/.pyenv" + PYENV_BIN="$PYENV_ROOT/bin" + PYENV_SHIMS="$PYENV_ROOT/shims" + echo "$PYENV_BIN" >> $GITHUB_PATH + echo "$PYENV_SHIMS" >> $GITHUB_PATH + echo "PYENV_ROOT=$PYENV_ROOT" >> $GITHUB_ENV + - name: Check pyenv is working + run: + pyenv --version + - name: Set up LLVM + run: | + brew install llvm@19 + LLVM_PREFIX=$(brew --prefix llvm@19) + echo CC="$LLVM_PREFIX/bin/clang" >> $GITHUB_ENV + echo CXX="$LLVM_PREFIX/bin/clang++" >> $GITHUB_ENV + echo LDFLAGS="-L$LLVM_PREFIX/lib" >> $GITHUB_ENV + echo CPPFLAGS="-I$LLVM_PREFIX/include" >> $GITHUB_ENV + - name: Build Python with address sanitizer + run: | + CONFIGURE_OPTS="--with-address-sanitizer" pyenv install 3.13t + pyenv global 3.13t + - name: Install dependencies + run: | + # TODO: remove when a released cython supports free-threaded python + pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython + pip install -r requirements/build_requirements.txt + pip install -r requirements/ci_requirements.txt + pip install -r requirements/test_requirements.txt + # xdist captures stdout/stderr, but we want the ASAN output + pip uninstall -y pytest-xdist + - name: Build + run: + python -m spin build -j2 -- -Db_sanitize=address + - name: Test + run: | + # pass -s to pytest to see ASAN errors and warnings, otherwise pytest captures them + ASAN_OPTIONS=detect_leaks=0:symbolize=1:strict_init_order=true:allocator_may_return_null=1 \ + python -m spin test -- -v -s --timeout=600 --durations=10 + + clang_TSAN: + # To enable this workflow on a fork, comment out: + if: github.repository == 'numpy/numpy' + runs-on: macos-latest + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - name: Set up pyenv + run: | + git clone https://github.com/pyenv/pyenv.git "$HOME/.pyenv" + PYENV_ROOT="$HOME/.pyenv" + PYENV_BIN="$PYENV_ROOT/bin" + PYENV_SHIMS="$PYENV_ROOT/shims" + echo "$PYENV_BIN" >> $GITHUB_PATH + echo "$PYENV_SHIMS" >> $GITHUB_PATH + echo "PYENV_ROOT=$PYENV_ROOT" >> $GITHUB_ENV + - name: Check pyenv is working + run: + pyenv --version + - name: Set up LLVM + run: | + brew install llvm@19 + LLVM_PREFIX=$(brew --prefix llvm@19) + echo CC="$LLVM_PREFIX/bin/clang" >> $GITHUB_ENV + echo CXX="$LLVM_PREFIX/bin/clang++" >> $GITHUB_ENV + echo LDFLAGS="-L$LLVM_PREFIX/lib" >> $GITHUB_ENV + echo CPPFLAGS="-I$LLVM_PREFIX/include" >> $GITHUB_ENV + - name: Build Python with thread sanitizer support + run: | + # free-threaded Python is much more likely to trigger races + CONFIGURE_OPTS="--with-thread-sanitizer" pyenv install 3.13t + pyenv global 3.13t + - name: Install dependencies + run: | + # TODO: remove when a released cython supports free-threaded python + pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython + pip install -r requirements/build_requirements.txt + pip install -r requirements/ci_requirements.txt + pip install -r requirements/test_requirements.txt + # xdist captures stdout/stderr, but we want the TSAN output + pip uninstall -y pytest-xdist + - name: Build + run: + python -m spin build -j2 -- -Db_sanitize=thread + - name: Test + run: | + # These tests are slow, so only run tests in files that do "import threading" to make them count + TSAN_OPTIONS="allocator_may_return_null=1:suppressions=$GITHUB_WORKSPACE/tools/ci/tsan_suppressions.txt" \ + python -m spin test \ + `find numpy -name "test*.py" | xargs grep -l "import threading" | tr '\n' ' '` \ + -- -v -s --timeout=600 --durations=10 diff --git a/.github/workflows/cygwin.yml b/.github/workflows/cygwin.yml new file mode 100644 index 000000000000..174d04efb567 --- /dev/null +++ b/.github/workflows/cygwin.yml @@ -0,0 +1,81 @@ +name: Test on Cygwin +on: + pull_request: + branches: + - main + - maintenance/** + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + cygwin_build_test: + runs-on: windows-latest + # To enable this workflow on a fork, comment out: + if: github.repository == 'numpy/numpy' + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - name: Install Cygwin + uses: egor-tensin/setup-cygwin@d2c752bab416d4b0662591bd366fc2686297c82d # v4 + with: + platform: x86_64 + install-dir: 'C:\tools\cygwin' + packages: >- + python39=3.9.16-1 python39-devel=3.9.16-1 python39-pip python-pip-wheel + python-setuptools-wheel liblapack-devel liblapack0 gcc-fortran + gcc-g++ git dash cmake ninja + - name: Set Windows PATH + uses: egor-tensin/cleanup-path@f04bc953e6823bf491cc0bdcff959c630db1b458 # v4.0.1 + with: + dirs: 'C:\tools\cygwin\bin;C:\tools\cygwin\lib\lapack' + - name: Verify that bash is Cygwin bash + run: | + command bash + bash -c "uname -svrmo" + - name: Tell Cygwin's git about this repository. + run: | + dash -c "which git; /usr/bin/git config --system --add safe.directory /cygdrive/d/a/numpy/numpy" + - name: Verify python version + # Make sure it's the Cygwin one, not a Windows one + run: | + dash -c "which python3.9; /usr/bin/python3.9 --version -V" + - name: Build NumPy wheel + run: | + dash -c "/usr/bin/python3.9 -m pip install build pytest hypothesis pytest-xdist Cython meson" + dash -c "/usr/bin/python3.9 -m build . --wheel -Csetup-args=-Dblas=blas -Csetup-args=-Dlapack=lapack -Csetup-args=-Dcpu-dispatch=none -Csetup-args=-Dcpu-baseline=native" + - name: Install NumPy from wheel + run: | + bash -c "/usr/bin/python3.9 -m pip install dist/numpy-*cp39*.whl" + - name: Rebase NumPy compiled extensions + run: | + dash "tools/rebase_installed_dlls_cygwin.sh" 3.9 + - name: Run NumPy test suite + shell: "C:\\tools\\cygwin\\bin\\bash.exe -o igncr -eo pipefail {0}" + run: | + cd tools + /usr/bin/python3.9 -m pytest --pyargs numpy -n2 -m "not slow" + - name: Upload wheel if tests fail + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + if: failure() + with: + name: numpy-cygwin-wheel + path: dist/numpy-*cp39*.whl + - name: Check the extension modules on failure + if: failure() + run: | + dash -c "/usr/bin/python3.9 -m pip show numpy" + dash -c "/usr/bin/python3.9 -m pip show -f numpy | grep .dll" + dash -c "/bin/tr -d '\r' list_dlls_unix.sh" + dash "list_dlls_unix.sh" 3.9 + - name: Print installed package versions on failure + if: failure() + run: | + cygcheck -c diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml new file mode 100644 index 000000000000..58bf4a40055b --- /dev/null +++ b/.github/workflows/dependency-review.yml @@ -0,0 +1,24 @@ +# Dependency Review Action +# +# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging. +# +# Source repository: https://github.com/actions/dependency-review-action +# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement +name: 'Dependency Review' +on: [pull_request] + +permissions: + contents: read + +jobs: + dependency-review: + runs-on: ubuntu-latest + steps: + - name: 'Checkout Repository' + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + - name: 'Dependency Review' + uses: actions/dependency-review-action@3b139cfc5fae8b618d3eae3675e383bb1769c019 # v4.5.0 + with: + allow-ghsas: GHSA-cx63-2mw6-8hw5 diff --git a/.github/workflows/emscripten.yml b/.github/workflows/emscripten.yml new file mode 100644 index 000000000000..fe8d5376bd96 --- /dev/null +++ b/.github/workflows/emscripten.yml @@ -0,0 +1,85 @@ +name: Test Emscripten/Pyodide build + +on: + pull_request: + branches: + - main + - maintenance/** + # Note: this workflow gets triggered on the same schedule as the + # wheels.yml workflow to upload WASM wheels to Anaconda.org. + schedule: + # ┌───────────── minute (0 - 59) + # │ ┌───────────── hour (0 - 23) + # │ │ ┌───────────── day of the month (1 - 31) + # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC) + # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT) + # │ │ │ │ │ + - cron: "42 2 * * SUN,WED" + workflow_dispatch: + inputs: + push_wheels: + # Can be 'true' or 'false'. Default is 'false'. + # Warning: this will overwrite existing wheels. + description: > + Push wheels to Anaconda.org if the build succeeds + required: false + default: 'false' + +env: + FORCE_COLOR: 3 + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + + +jobs: + build-wasm-emscripten: + permissions: + contents: read # to fetch code (actions/checkout) + name: Build NumPy distribution for Pyodide + runs-on: ubuntu-22.04 + # To enable this workflow on a fork, comment out: + if: github.repository == 'numpy/numpy' + steps: + - name: Checkout NumPy + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - uses: pypa/cibuildwheel@42728e866bbc80d544a70825bd9990b9a26f1a50 # 2.23.1 + env: + CIBW_PLATFORM: pyodide + + - name: Upload wheel artifact(s) + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: cp312-pyodide_wasm32 + path: ./wheelhouse/*.whl + if-no-files-found: error + + # Push to https://anaconda.org/scientific-python-nightly-wheels/numpy + # WARNING: this job will overwrite any existing WASM wheels. + upload-wheels: + name: Upload NumPy WASM wheels to Anaconda.org + runs-on: ubuntu-22.04 + permissions: {} + needs: [build-wasm-emscripten] + if: >- + (github.repository == 'numpy/numpy') && + (github.event_name == 'workflow_dispatch' && github.event.inputs.push_wheels == 'true') || + (github.event_name == 'schedule') + steps: + - name: Download wheel artifact(s) + uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 + with: + path: wheelhouse/ + merge-multiple: true + + - name: Push to Anaconda PyPI index + uses: scientific-python/upload-nightly-action@82396a2ed4269ba06c6b2988bb4fd568ef3c3d6b # v0.6.1 + with: + artifacts_path: wheelhouse/ + anaconda_nightly_upload_token: ${{ secrets.NUMPY_NIGHTLY_UPLOAD_TOKEN }} diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 000000000000..7d2edc869893 --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,19 @@ +name: "Pull Request Labeler" +on: + pull_request_target: + types: [opened] + +permissions: {} + +jobs: + pr-labeler: + runs-on: ubuntu-latest + permissions: + pull-requests: write # to add labels + steps: + - name: Label the PR + uses: gerrymanoim/pr-prefix-labeler@c8062327f6de59a9ae1c19f7f07cacd0b976b6fa # v3 + continue-on-error: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + if: github.repository == 'numpy/numpy' diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml new file mode 100644 index 000000000000..b5bd098b7edd --- /dev/null +++ b/.github/workflows/linux.yml @@ -0,0 +1,350 @@ +name: Linux tests + +# This file is meant for testing across supported Python versions, build types +# and interpreters (PyPy, python-dbg, a pre-release Python in summer time), +# build-via-sdist, run benchmarks, measure code coverage, and other build +# options. + +on: + push: + branches: + # coverage comparison in the "full" step needs to run on main after merges + - main + pull_request: + branches: + - main + - maintenance/** + +defaults: + run: + shell: bash + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + lint: + # To enable this job and subsequent jobs on a fork, comment out: + if: github.repository == 'numpy/numpy' && github.event_name != 'push' + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-depth: 0 + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + - name: Install linter requirements + run: + python -m pip install -r requirements/linter_requirements.txt + - name: Run linter on PR + env: + BASE_REF: ${{ github.base_ref }} + run: + python tools/linter.py + + smoke_test: + # To enable this job on a fork, comment out: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + env: + MESON_ARGS: "-Dallow-noblas=true -Dcpu-baseline=none -Dcpu-dispatch=none" + strategy: + matrix: + version: ["3.11", "3.12", "3.13", "3.13t"] + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: astral-sh/setup-uv@f94ec6bedd8674c4426838e6b50417d36b6ab231 + with: + python-version: ${{ matrix.version }} + enable-cache: false + - run: + uv pip install --python=${{ matrix.version }} pip + # TODO: remove cython nightly install when cython does a release + - name: Install nightly Cython + if: matrix.version == '3.13t' + run: | + pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython + - uses: ./.github/meson_actions + + pypy: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: 'pypy3.11-v7.3.19' + - name: Setup using scipy-openblas + run: | + python -m pip install -r requirements/ci_requirements.txt + spin config-openblas --with-scipy-openblas=32 + - uses: ./.github/meson_actions + + debug: + needs: [smoke_test] + runs-on: ubuntu-24.04 + if: github.event_name != 'push' + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - name: Install debug Python + run: | + sudo apt-get update + sudo apt-get install python3-dbg ninja-build + - name: Build NumPy and install into venv + run: | + python3-dbg -m venv venv + source venv/bin/activate + pip install -U pip + pip install . -v -Csetup-args=-Dbuildtype=debug -Csetup-args=-Dallow-noblas=true + - name: Install test dependencies + run: | + source venv/bin/activate + pip install -r requirements/test_requirements.txt + - name: Run test suite + run: | + source venv/bin/activate + cd tools + pytest --timeout=600 --durations=10 --pyargs numpy -m "not slow" + + full: + # Install as editable, then run the full test suite with code coverage + needs: [smoke_test] + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + - name: Install build and test dependencies from PyPI + run: | + pip install -r requirements/build_requirements.txt + pip install -r requirements/test_requirements.txt + - name: Install gfortran and setup OpenBLAS (MacPython build) + run: | + set -xe + sudo apt update + sudo apt install gfortran libgfortran5 + python -m pip install -r requirements/ci32_requirements.txt + mkdir -p ./.openblas + python -c"import scipy_openblas32 as ob32; print(ob32.get_pkg_config())" > ./.openblas/scipy-openblas.pc + + - name: Install as editable + env: + PKG_CONFIG_PATH: ${{ github.workspace }}/.openblas + run: | + pip install -e . --no-build-isolation + - name: Run full test suite + run: | + pytest numpy --durations=10 --timeout=600 --cov-report=html:build/coverage + # TODO: gcov + env: + PYTHONOPTIMIZE: 2 + + + aarch64_test: + needs: [smoke_test] + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-22.04-arm + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + + - name: Install Python dependencies + run: | + python -m pip install -r requirements/build_requirements.txt + python -m pip install -r requirements/test_requirements.txt + python -m pip install -r requirements/ci32_requirements.txt + mkdir -p ./.openblas + python -c"import scipy_openblas32 as ob32; print(ob32.get_pkg_config())" > ./.openblas/scipy-openblas.pc + + - name: Build + env: + PKG_CONFIG_PATH: ${{ github.workspace }}/.openblas + run: | + spin build + + - name: Test + run: | + spin test -j2 -m full -- --timeout=600 --durations=10 + + benchmark: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + - name: Install build and benchmarking dependencies + run: | + sudo apt-get update + sudo apt-get install libopenblas-dev ninja-build + pip install asv virtualenv packaging -r requirements/build_requirements.txt + - name: Install NumPy + run: | + spin build -- -Dcpu-dispatch=none + # Ensure to keep the below steps as single-line bash commands (it's a + # workaround for asv#1333, and it may have side-effects on multi-line commands) + - name: Appease asv's need for machine info + shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' + run: | + asv machine --yes --config benchmarks/asv.conf.json + - name: Run benchmarks + shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' + run: | + spin bench --quick + # These are run on CircleCI + # - name: Check docstests + # shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' + # run: | + # pip install scipy-doctest==1.6.0 hypothesis==6.104.1 matplotlib scipy pytz pandas + # spin check-docs -v + # spin check-tutorials -v + + sdist: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + - name: Install gfortran and setup OpenBLAS (sdist build) + run: | + set -xe + python -m pip install -r requirements/ci_requirements.txt + mkdir -p ./.openblas + python -c"import scipy_openblas64 as ob64; print(ob64.get_pkg_config())" > ./.openblas/scipy-openblas.pc + - name: Build a wheel via an sdist + env: + PKG_CONFIG_PATH: ${{ github.workspace }}/.openblas + run: | + pip install build + python -m build + pip install dist/numpy*.whl + - name: Install test dependencies + run: | + pip install -r requirements/test_requirements.txt + - name: Run test suite + run: | + cd tools + pytest --pyargs numpy -m "not slow" + + array_api_tests: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + steps: + - name: Checkout NumPy + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - name: Checkout array-api-tests + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + repository: data-apis/array-api-tests + ref: '827edd804bcace9d64176b8115138d29ae3e8dec' # Latest commit as of 2024-07-30 + submodules: 'true' + path: 'array-api-tests' + persist-credentials: false + - name: Set up Python + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + - name: Install build and test dependencies from PyPI + run: | + python -m pip install -r requirements/build_requirements.txt + python -m pip install -r requirements/test_requirements.txt + python -m pip install -r array-api-tests/requirements.txt + - name: Build and install NumPy + run: | + python -m pip install . -v -Csetup-args=-Dallow-noblas=true -Csetup-args=-Dcpu-baseline=none -Csetup-args=-Dcpu-dispatch=none + - name: Run the test suite + env: + ARRAY_API_TESTS_MODULE: numpy + PYTHONWARNINGS: 'ignore::UserWarning::,ignore::DeprecationWarning::,ignore::RuntimeWarning::' + run: | + cd ${GITHUB_WORKSPACE}/array-api-tests + pytest array_api_tests -v -c pytest.ini --ci --max-examples=100 --derandomize --disable-deadline --xfails-file ${GITHUB_WORKSPACE}/tools/ci/array-api-xfails.txt + + custom_checks: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + - name: Install build and test dependencies from PyPI + run: | + pip install -r requirements/build_requirements.txt + pip install -r requirements/test_requirements.txt + pip install vulture + - name: Build and install NumPy + run: | + # Install using the fastest way to build (no BLAS, no SIMD) + spin build -j2 -- -Dallow-noblas=true -Dcpu-baseline=none -Dcpu-dispatch=none + - name: Check build-internal dependencies + run: | + ninja -C build -t missingdeps + - name: Check installed test and stub files + run: | + python tools/check_installed_files.py $(find ./build-install -path '*/site-packages/numpy') + - name: Check for unreachable code paths in Python modules + run: | + # Need the explicit `bash -c` here because `grep` returns exit code 1 for no matches + bash -c "! vulture . --min-confidence 100 --exclude doc/,numpy/distutils/,vendored-meson/ | grep 'unreachable'" + - name: Check usage of install_tag + run: | + rm -rf build-install + ./vendored-meson/meson/meson.py install -C build --destdir ../build-install --tags=runtime,python-runtime,devel + python tools/check_installed_files.py $(find ./build-install -path '*/site-packages/numpy') --no-tests diff --git a/.github/workflows/linux_blas.yml b/.github/workflows/linux_blas.yml new file mode 100644 index 000000000000..a2c5e56eaa9b --- /dev/null +++ b/.github/workflows/linux_blas.yml @@ -0,0 +1,410 @@ +name: BLAS tests (Linux) + +# This file is meant for testing different BLAS/LAPACK flavors and build +# options on Linux. All other yml files for Linux will only test without BLAS +# (mostly because that's easier and faster to build) or with the same 64-bit +# OpenBLAS build that is used in the wheel jobs. +# +# Jobs and their purpose: +# +# - openblas32_stable_nightly: +# Uses the 32-bit OpenBLAS builds, both the latest stable release +# and a nightly build. +# - openblas_no_pkgconfig_fedora: +# Test OpenBLAS on Fedora. Fedora doesn't ship .pc files for OpenBLAS, +# hence this exercises the "system dependency" detection method. +# - flexiblas_fedora: +# Tests FlexiBLAS (the default on Fedora for its own packages), via +# pkg-config. FlexiBLAS allows runtime switching of BLAS/LAPACK +# libraries, which is a useful capability (not tested in this job). +# - openblas_cmake: +# Tests whether OpenBLAS LP64 is detected correctly when only CMake +# and not pkg-config is installed. +# - netlib-debian: +# Installs libblas/liblapack, which in Debian contains libcblas within +# libblas. +# - netlib-split: +# Installs vanilla Netlib blas/lapack with separate libcblas, which is +# the last option tried in auto-detection. +# - mkl: +# Tests MKL installed from PyPI (because easiest/fastest, if broken) in +# 3 ways: both LP64 and ILP64 via pkg-config, and then using the +# Single Dynamic Library (SDL, or `libmkl_rt`). +# - blis: +# Simple test for LP64 via pkg-config +# - atlas: +# Simple test for LP64 via pkg-config + +on: + pull_request: + branches: + - main + - maintenance/** + +defaults: + run: + shell: bash + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + openblas32_stable_nightly: + # To enable this workflow on a fork, comment out: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + USE_NIGHTLY_OPENBLAS: [false, true] + env: + USE_NIGHTLY_OPENBLAS: ${{ matrix.USE_NIGHTLY_OPENBLAS }} + name: "Test Linux (${{ matrix.USE_NIGHTLY_OPENBLAS && 'nightly' || 'stable' }} OpenBLAS)" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r requirements/build_requirements.txt + # Install OpenBLAS + if [[ $USE_NIGHTLY_OPENBLAS == "true" ]]; then + python -m pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy-openblas32 + else + python -m pip install -r requirements/ci32_requirements.txt + fi + mkdir -p ./.openblas + python -c"import scipy_openblas32 as ob32; print(ob32.get_pkg_config())" > ./.openblas/scipy-openblas.pc + echo "PKG_CONFIG_PATH=${{ github.workspace }}/.openblas" >> $GITHUB_ENV + ld_library_path=$(python -c"import scipy_openblas32 as ob32; print(ob32.get_lib_dir())") + echo "LD_LIBRARY_PATH=$ld_library_path" >> $GITHUB_ENV + + - name: Build + shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' + env: + TERM: xterm-256color + run: + spin build -- --werror -Dallow-noblas=false + + - name: Check build-internal dependencies + run: + ninja -C build -t missingdeps + + - name: Check installed test and stub files + run: + python tools/check_installed_files.py $(find ./build-install -path '*/site-packages/numpy') + - name: Ensure scipy-openblas + run: | + set -ex + spin python tools/check_openblas_version.py 0.3.26 + + - name: Test + shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' + env: + TERM: xterm-256color + run: | + pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout + spin test -j auto -- --timeout=600 --durations=10 + + + openblas_no_pkgconfig_fedora: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + container: fedora:39 + name: "OpenBLAS (Fedora, no pkg-config, LP64/ILP64)" + steps: + - name: Install system dependencies + run: | + dnf install git gcc-gfortran g++ python3-devel openblas-devel -y + + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - name: Install dependencies + run: | + pip install -r requirements/build_requirements.txt + pip install pytest hypothesis typing_extensions pytest-timeout + + - name: Build (LP64) + run: spin build -- -Dblas=openblas -Dlapack=openblas -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: spin test -- numpy/linalg --timeout=600 --durations=10 + + - name: Build (ILP64) + run: | + rm -rf build + spin build -- -Duse-ilp64=true -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: spin test -- numpy/linalg --timeout=600 --durations=10 + + + flexiblas_fedora: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + container: fedora:39 + name: "FlexiBLAS (LP64, ILP64 on Fedora)" + steps: + - name: Install system dependencies + run: | + dnf install git gcc-gfortran g++ python3-devel flexiblas-devel -y + + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - name: Install dependencies + run: | + pip install -r requirements/build_requirements.txt + pip install pytest hypothesis typing_extensions pytest-timeout + + - name: Build + run: spin build -- -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: spin test -- numpy/linalg --timeout=600 --durations=10 + + - name: Build (ILP64) + run: | + rm -rf build + spin build -- -Ddisable-optimization=true -Duse-ilp64=true -Dallow-noblas=false + + - name: Test (ILP64) + run: spin test -- numpy/linalg --timeout=600 --durations=10 + + + openblas_cmake: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + name: "OpenBLAS with CMake" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r requirements/build_requirements.txt + pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout + sudo apt-get update + sudo apt-get install libopenblas-dev cmake + sudo apt-get remove pkg-config + + - name: Build + run: spin build -- -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: spin test -j auto -- numpy/linalg --timeout=600 --durations=10 + + + netlib-debian: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + name: "Debian libblas/liblapack" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r requirements/build_requirements.txt + sudo apt-get update + sudo apt-get install liblapack-dev pkg-config + + - name: Build + run: | + spin build -- -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: | + pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout + spin test -j auto -- numpy/linalg --timeout=600 --durations=10 + + + netlib-split: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + container: opensuse/tumbleweed + name: "OpenSUSE Netlib BLAS/LAPACK" + steps: + - name: Install system dependencies + run: | + # No blas.pc on OpenSUSE as of Nov 2023, so no need to install pkg-config. + # If it is needed in the future, use install name `pkgconf-pkg-config` + zypper install -y git gcc-c++ python3-pip python3-devel blas cblas lapack + + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - name: Install PyPI dependencies + run: | + pip install --break-system-packages -r requirements/build_requirements.txt + + - name: Build + run: | + spin build -- -Dblas=blas -Dlapack=lapack -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: | + pip install --break-system-packages pytest pytest-xdist hypothesis typing_extensions pytest-timeout + spin test -j auto -- numpy/linalg --timeout=600 --durations=10 + + + mkl: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + name: "MKL (LP64, ILP64, SDL)" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r requirements/build_requirements.txt + pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout + pip install mkl mkl-devel + + - name: Repair MKL pkg-config files and symlinks + run: | + # MKL 2023.2 works when installed from conda-forge (except for `-iomp` + # and `-tbb` pkg-config files), Spack, or with the standalone Intel + # installer. The standalone installer is the worst option, since it's + # large and clumsy to install and requires running a setvars.sh script + # before things work. The PyPI MKL packages are broken and need the + # fixes in this step. For details, see + # https://github.com/conda-forge/intel_repack-feedstock/issues/34 + cd $Python3_ROOT_DIR/lib/pkgconfig + sed -i 's/\/intel64//g' mkl*.pc + # add the expected .so -> .so.2 symlinks to fix linking + cd .. + for i in $( ls libmkl*.so.2 ); do ln -s $i ${i%.*}; done + + - name: Build with defaults (LP64) + run: | + pkg-config --libs mkl-dynamic-lp64-seq # check link flags + spin build -- -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: spin test -- numpy/linalg --timeout=600 --durations=10 + + - name: Build with ILP64 + run: | + git clean -xdf > /dev/null + pkg-config --libs mkl-dynamic-ilp64-seq + spin build -- -Duse-ilp64=true -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: spin test -- numpy/linalg --timeout=600 --durations=10 + + - name: Build without pkg-config (default options, SDL) + run: | + git clean -xdf > /dev/null + pushd $Python3_ROOT_DIR/lib/pkgconfig + rm mkl*.pc + popd + export MKLROOT=$Python3_ROOT_DIR + spin build -- -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: spin test -- numpy/linalg --timeout=600 --durations=10 + + blis: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + name: "BLIS" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r requirements/build_requirements.txt + pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout + sudo apt-get update + sudo apt-get install libblis-dev libopenblas-dev pkg-config + + - name: Add BLIS pkg-config file + run: | + # Needed because blis.pc missing in Debian: + # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=989076 + # The alternative here would be to use another distro or Miniforge + sudo cp tools/ci/_blis_debian.pc /usr/lib/x86_64-linux-gnu/pkgconfig/blis.pc + # Check if the patch works: + pkg-config --libs blis + pkg-config --cflags blis + + - name: Build + run: spin build -- -Dblas=blis -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: spin test -- numpy/linalg --timeout=600 --durations=10 + + atlas: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + name: "ATLAS" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r requirements/build_requirements.txt + pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout + sudo apt-get update + sudo apt-get install libatlas-base-dev pkg-config + + - name: Build + run: spin build -- -Dblas=blas-atlas -Dlapack=lapack-atlas -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: spin test -- numpy/linalg + diff --git a/.github/workflows/linux_musl.yml b/.github/workflows/linux_musl.yml new file mode 100644 index 000000000000..547c031bc84b --- /dev/null +++ b/.github/workflows/linux_musl.yml @@ -0,0 +1,69 @@ +name: Test musllinux_x86_64 + +on: + pull_request: + branches: + - main + - maintenance/** + + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + + +permissions: + contents: read # to fetch code (actions/checkout) + + +jobs: + musllinux_x86_64: + runs-on: ubuntu-latest + # To enable this workflow on a fork, comment out: + if: github.repository == 'numpy/numpy' + container: + # Use container used for building musllinux wheels + # it has git installed, all the pythons, etc + image: quay.io/pypa/musllinux_1_2_x86_64 + + steps: + - name: setup + run: | + apk update --quiet + + # using git commands to clone because versioneer doesn't work when + # actions/checkout is used for the clone step in a container + + git config --global --add safe.directory $PWD + + if [ $GITHUB_EVENT_NAME != pull_request ]; then + git clone --recursive --branch=$GITHUB_REF_NAME https://github.com/${GITHUB_REPOSITORY}.git $GITHUB_WORKSPACE + git reset --hard $GITHUB_SHA + else + git clone --recursive https://github.com/${GITHUB_REPOSITORY}.git $GITHUB_WORKSPACE + git fetch origin $GITHUB_REF:my_ref_name + git checkout $GITHUB_BASE_REF + git -c user.email="you@example.com" merge --no-commit my_ref_name + fi + git submodule update --init + + ln -s /usr/local/bin/python3.11 /usr/local/bin/python + + - name: test-musllinux_x86_64 + env: + PKG_CONFIG_PATH: ${{ github.workspace }}/.openblas + run: | + python -m venv test_env + source test_env/bin/activate + + pip install -r requirements/ci_requirements.txt + pip install -r requirements/build_requirements.txt -r requirements/test_requirements.txt + + # use meson to build and test + spin build --with-scipy-openblas=64 + spin test -j auto -- --timeout=600 --durations=10 + + - name: Meson Log + shell: bash + run: | + cat build/meson-logs/meson-log.txt diff --git a/.github/workflows/linux_qemu.yml b/.github/workflows/linux_qemu.yml new file mode 100644 index 000000000000..6324de0ac85c --- /dev/null +++ b/.github/workflows/linux_qemu.yml @@ -0,0 +1,280 @@ +# Meson's Python module doesn't support crosscompiling, +# and python dependencies may be another potential hurdle. +# There might also be a need to run runtime tests during configure time. +# +# The recommended practice is to rely on Docker to provide the x86_64 crosscompile toolchain, +# enabling native execution via binfmt. +# +# In simpler terms, everything except the crosscompile toolchain will be emulated. + +name: Linux Qemu tests + +on: + pull_request: + branches: + - main + - maintenance/** + workflow_dispatch: + +defaults: + run: + shell: bash + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + linux_qemu: + # Only workflow_dispatch is enabled on forks. + # To enable this job and subsequent jobs on a fork for other events, comment out: + if: github.repository == 'numpy/numpy' || github.event_name == 'workflow_dispatch' + runs-on: ubuntu-22.04 + continue-on-error: true + strategy: + fail-fast: false + matrix: + BUILD_PROP: + - [ + "armhf", + "arm-linux-gnueabihf", + "arm32v7/ubuntu:22.04", + "-Dallow-noblas=true", + # test_unary_spurious_fpexception is currently skipped + # FIXME(@seiko2plus): Requires confirmation for the following issue: + # The presence of an FP invalid exception caused by sqrt. Unsure if this is a qemu bug or not. + "(test_kind or test_multiarray or test_simd or test_umath or test_ufunc) and not test_unary_spurious_fpexception", + "arm" + ] + - [ + "ppc64le", + "powerpc64le-linux-gnu", + "ppc64le/ubuntu:22.04", + "-Dallow-noblas=true", + "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", + "ppc64le" + ] + - [ + "ppc64le - baseline(Power9)", + "powerpc64le-linux-gnu", + "ppc64le/ubuntu:22.04", + "-Dallow-noblas=true -Dcpu-baseline=vsx3", + "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", + "ppc64le" + ] + - [ + "s390x", + "s390x-linux-gnu", + "s390x/ubuntu:22.04", + "-Dallow-noblas=true", + # Skipping TestRationalFunctions.test_gcd_overflow test + # because of a possible qemu bug that appears to be related to int64 overflow in absolute operation. + # TODO(@seiko2plus): Confirm the bug and provide a minimal reproducer, then report it to upstream. + "(test_kind or test_multiarray or test_simd or test_umath or test_ufunc) and not test_gcd_overflow", + "s390x" + ] + - [ + "s390x - baseline(Z13)", + "s390x-linux-gnu", + "s390x/ubuntu:22.04", + "-Dallow-noblas=true -Dcpu-baseline=vx", + "(test_kind or test_multiarray or test_simd or test_umath or test_ufunc) and not test_gcd_overflow", + "s390x" + ] + - [ + "riscv64", + "riscv64-linux-gnu", + "riscv64/ubuntu:22.04", + "-Dallow-noblas=true", + "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", + "riscv64" + ] + env: + TOOLCHAIN_NAME: ${{ matrix.BUILD_PROP[1] }} + DOCKER_CONTAINER: ${{ matrix.BUILD_PROP[2] }} + MESON_OPTIONS: ${{ matrix.BUILD_PROP[3] }} + RUNTIME_TEST_FILTER: ${{ matrix.BUILD_PROP[4] }} + ARCH: ${{ matrix.BUILD_PROP[5] }} + TERM: xterm-256color + + name: "${{ matrix.BUILD_PROP[0] }}" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - name: Initialize binfmt_misc for qemu-user-static + run: | + # see https://hub.docker.com/r/tonistiigi/binfmt for available versions + docker run --rm --privileged tonistiigi/binfmt:qemu-v9.2.2-52 --install all + + - name: Install GCC cross-compilers + run: | + sudo apt update + sudo apt install -y ninja-build gcc-${TOOLCHAIN_NAME} g++-${TOOLCHAIN_NAME} gfortran-${TOOLCHAIN_NAME} + + - name: Cache docker container + uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + id: container-cache + with: + path: ~/docker_${{ matrix.BUILD_PROP[1] }} + key: container-${{ runner.os }}-${{ matrix.BUILD_PROP[1] }}-${{ matrix.BUILD_PROP[2] }}-${{ hashFiles('requirements/build_requirements.txt') }} + + - name: Creates new container + if: steps.container-cache.outputs.cache-hit != 'true' + run: | + docker run --platform=linux/${ARCH} --name the_container --interactive \ + -v /:/host -v $(pwd):/numpy ${DOCKER_CONTAINER} /bin/bash -c " + apt update && + apt install -y cmake git python3 python-is-python3 python3-dev python3-pip && + mkdir -p /lib64 && ln -s /host/lib64/ld-* /lib64/ && + ln -s /host/lib/x86_64-linux-gnu /lib/x86_64-linux-gnu && + rm -rf /usr/${TOOLCHAIN_NAME} && ln -s /host/usr/${TOOLCHAIN_NAME} /usr/${TOOLCHAIN_NAME} && + rm -rf /usr/lib/gcc/${TOOLCHAIN_NAME} && ln -s /host/usr/lib/gcc-cross/${TOOLCHAIN_NAME} /usr/lib/gcc/${TOOLCHAIN_NAME} && + rm -f /usr/bin/gcc && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-gcc /usr/bin/gcc && + rm -f /usr/bin/g++ && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-g++ /usr/bin/g++ && + rm -f /usr/bin/gfortran && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-gfortran /usr/bin/gfortran && + rm -f /usr/bin/ar && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ar /usr/bin/ar && + rm -f /usr/bin/as && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-as /usr/bin/as && + rm -f /usr/bin/ld && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ld /usr/bin/ld && + rm -f /usr/bin/ld.bfd && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ld.bfd /usr/bin/ld.bfd && + rm -f /usr/bin/ninja && ln -s /host/usr/bin/ninja /usr/bin/ninja && + git config --global --add safe.directory /numpy && + # No need to build ninja from source, the host ninja is used for the build + grep -v ninja /numpy/requirements/build_requirements.txt > /tmp/build_requirements.txt && + python -m pip install -r /tmp/build_requirements.txt && + python -m pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout && + rm -f /usr/local/bin/ninja && mkdir -p /usr/local/bin && ln -s /host/usr/bin/ninja /usr/local/bin/ninja + " + docker commit the_container the_container + mkdir -p "~/docker_${TOOLCHAIN_NAME}" + docker save -o "~/docker_${TOOLCHAIN_NAME}/the_container.tar" the_container + + - name: Load container from cache + if: steps.container-cache.outputs.cache-hit == 'true' + run: docker load -i "~/docker_${TOOLCHAIN_NAME}/the_container.tar" + + - name: Meson Build + run: | + docker run --rm --platform=linux/${ARCH} -e "TERM=xterm-256color" \ + -v $(pwd):/numpy -v /:/host the_container \ + /bin/script -e -q -c "/bin/bash --noprofile --norc -eo pipefail -c ' + cd /numpy && spin build --clean -- ${MESON_OPTIONS} + '" + + - name: Meson Log + if: always() + run: 'cat build/meson-logs/meson-log.txt' + + - name: Run Tests + run: | + docker run --rm --platform=linux/${ARCH} -e "TERM=xterm-256color" \ + -v $(pwd):/numpy -v /:/host the_container \ + /bin/script -e -q -c "/bin/bash --noprofile --norc -eo pipefail -c ' + export F90=/usr/bin/gfortran + cd /numpy && spin test -- --timeout=600 --durations=10 -k \"${RUNTIME_TEST_FILTER}\" + '" + + + linux_loongarch64_qemu: + # Only workflow_dispatch is enabled on forks. + # To enable this job and subsequent jobs on a fork for other events, comment out: + if: github.repository == 'numpy/numpy' || github.event_name == 'workflow_dispatch' + runs-on: ubuntu-24.04 + continue-on-error: true + strategy: + fail-fast: false + matrix: + BUILD_PROP: + - [ + "loongarch64", + "loongarch64-linux-gnu", + "cnclarechen/numpy-loong64-debian:v1", + "-Dallow-noblas=true", + "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", + "loong64" + ] + env: + TOOLCHAIN_NAME: ${{ matrix.BUILD_PROP[1] }} + DOCKER_CONTAINER: ${{ matrix.BUILD_PROP[2] }} + MESON_OPTIONS: ${{ matrix.BUILD_PROP[3] }} + RUNTIME_TEST_FILTER: ${{ matrix.BUILD_PROP[4] }} + ARCH: ${{ matrix.BUILD_PROP[5] }} + TERM: xterm-256color + + name: "${{ matrix.BUILD_PROP[0] }}" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + + - name: Initialize binfmt_misc for qemu-user-static + run: | + docker run --rm --privileged loongcr.lcpu.dev/multiarch/archlinux --reset -p yes + + - name: Install GCC cross-compilers + run: | + sudo apt update + sudo apt install -y ninja-build gcc-14-${TOOLCHAIN_NAME} g++-14-${TOOLCHAIN_NAME} gfortran-14-${TOOLCHAIN_NAME} + + - name: Cache docker container + uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + id: container-cache + with: + path: ~/docker_${{ matrix.BUILD_PROP[1] }} + key: container-${{ runner.os }}-${{ matrix.BUILD_PROP[1] }}-${{ matrix.BUILD_PROP[2] }}-${{ hashFiles('requirements/build_requirements.txt') }} + + - name: Creates new container + if: steps.container-cache.outputs.cache-hit != 'true' + run: | + docker run --platform=linux/${ARCH} --name the_container --interactive \ + -v /:/host -v $(pwd):/numpy ${DOCKER_CONTAINER} /bin/bash -c " + mkdir -p /lib64 && ln -s /host/lib64/ld-* /lib64/ && + ln -s /host/lib/x86_64-linux-gnu /lib/x86_64-linux-gnu && + ln -s /host/usr/${TOOLCHAIN_NAME} /usr/${TOOLCHAIN_NAME} && + ln -s /host/usr/lib/gcc-cross/${TOOLCHAIN_NAME} /usr/lib/gcc/${TOOLCHAIN_NAME} && + rm -f /usr/bin/gcc && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-gcc-14 /usr/bin/gcc && + rm -f /usr/bin/g++ && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-g++-14 /usr/bin/g++ && + rm -f /usr/bin/gfortran && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-gfortran-14 /usr/bin/gfortran && + rm -f /usr/bin/ar && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ar /usr/bin/ar && + rm -f /usr/bin/as && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-as /usr/bin/as && + rm -f /usr/bin/ld && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ld /usr/bin/ld && + rm -f /usr/bin/ld.bfd && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ld.bfd /usr/bin/ld.bfd && + rm -f /usr/bin/ninja && ln -s /host/usr/bin/ninja /usr/bin/ninja && + git config --global --add safe.directory /numpy && + python -m pip install --break-system-packages -r /numpy/requirements/build_requirements.txt && + python -m pip install --break-system-packages pytest pytest-xdist hypothesis typing_extensions + " + docker commit the_container the_container + mkdir -p "~/docker_${TOOLCHAIN_NAME}" + docker save -o "~/docker_${TOOLCHAIN_NAME}/the_container.tar" the_container + + - name: Load container from cache + if: steps.container-cache.outputs.cache-hit == 'true' + run: docker load -i "~/docker_${TOOLCHAIN_NAME}/the_container.tar" + + - name: Meson Build + run: | + docker run --rm --platform=linux/${ARCH} -e "TERM=xterm-256color" \ + -v $(pwd):/numpy -v /:/host the_container \ + /bin/script -e -q -c "/bin/bash --noprofile --norc -eo pipefail -c ' + cd /numpy/ && spin build --clean -- ${MESON_OPTIONS} + '" + + - name: Meson Log + if: always() + run: 'cat build/meson-logs/meson-log.txt' + + - name: Run Tests + run: | + docker run --rm --platform=linux/${ARCH} -e "TERM=xterm-256color" \ + -v $(pwd):/numpy -v /:/host the_container \ + /bin/script -e -q -c "/bin/bash --noprofile --norc -eo pipefail -c ' + cd /numpy && spin test -- -k \"${RUNTIME_TEST_FILTER}\" + '" diff --git a/.github/workflows/linux_simd.yml b/.github/workflows/linux_simd.yml new file mode 100644 index 000000000000..265261603a6f --- /dev/null +++ b/.github/workflows/linux_simd.yml @@ -0,0 +1,289 @@ +name: Linux SIMD tests + +# This file is meant for testing different SIMD-related build options and +# optimization levels. See `meson_options.txt` for the available build options. +# +# Jobs and their purposes: +# +# - baseline_only: +# Focuses on completing as quickly as possible and acts as a filter for other, more resource-intensive jobs. +# Utilizes only the default baseline targets (e.g., SSE3 on X86_64) without enabling any runtime dispatched features. +# +# - old_gcc: +# Tests the oldest supported GCC version with default CPU/baseline/dispatch settings. +# +# - without_optimizations: +# Completely disables all SIMD optimizations and other compiler optimizations such as loop unrolling. +# +# - native: +# Tests against the host CPU features set as the baseline without enabling any runtime dispatched features. +# Intended to assess the entire NumPy codebase against host flags, even for code sections lacking handwritten SIMD intrinsics. +# +# - without_avx512/avx2/fma3: +# Uses runtime SIMD dispatching but disables AVX2, FMA3, and AVX512. +# Intended to evaluate 128-bit SIMD extensions without FMA support. +# +# - without_avx512: +# Uses runtime SIMD dispatching but disables AVX512. +# Intended to evaluate 128-bit/256-bit SIMD extensions. +# +# - intel_sde: +# Executes only the SIMD tests for various AVX512 SIMD extensions under the Intel Software Development Emulator (SDE). +# +on: + pull_request: + branches: + - main + - maintenance/** + +defaults: + run: + shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' + +env: + TERM: xterm-256color + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + baseline_only: + # To enable this workflow on a fork, comment out: + if: github.repository == 'numpy/numpy' + runs-on: ubuntu-latest + env: + MESON_ARGS: "-Dallow-noblas=true -Dcpu-dispatch=none" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + - uses: ./.github/meson_actions + name: Build/Test + + old_gcc: + if: github.event_name != 'push' + needs: [baseline_only] + runs-on: ubuntu-latest + env: + MESON_ARGS: "-Dallow-noblas=true" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + + - name: Install GCC9/10 + run: | + echo "deb http://archive.ubuntu.com/ubuntu focal main universe" | sudo tee /etc/apt/sources.list.d/focal.list + sudo apt update + sudo apt install -y g++-9 g++-10 + + - name: Enable gcc-9 + run: | + sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 1 + sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-9 1 + + - uses: ./.github/meson_actions + name: Build/Test against gcc-9 + + - name: Enable gcc-10 + run: | + sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 2 + sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 2 + + - uses: ./.github/meson_actions + name: Build/Test against gcc-10 + + arm64_simd: + if: github.repository == 'numpy/numpy' + needs: [baseline_only] + runs-on: ubuntu-22.04-arm + strategy: + fail-fast: false + matrix: + config: + - name: "baseline only" + args: "-Dallow-noblas=true -Dcpu-dispatch=none" + - name: "with ASIMD" + args: "-Dallow-noblas=true -Dcpu-baseline=asimd" + - name: "native" + args: "-Dallow-noblas=true -Dcpu-baseline=native -Dcpu-dispatch=none" + name: "ARM64 SIMD - ${{ matrix.config.name }}" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + - name: Install dependencies + run: | + python -m pip install -r requirements/build_requirements.txt + python -m pip install pytest pytest-xdist hypothesis typing_extensions pytest-timeout + - name: Build + run: | + spin build -- ${{ matrix.config.args }} + - name: Test + run: | + spin test -- --timeout=600 --durations=10 + + specialize: + needs: [baseline_only] + runs-on: ubuntu-latest + if: github.event_name != 'push' + continue-on-error: true + strategy: + fail-fast: false + matrix: + BUILD_PROP: + - [ + "without optimizations", + "-Dallow-noblas=true -Ddisable-optimization=true", + "3.12" + ] + - [ + "native", + "-Dallow-noblas=true -Dcpu-baseline=native -Dcpu-dispatch=none", + "3.11" + ] + - [ + "without avx512", + "-Dallow-noblas=true -Dcpu-dispatch=SSSE3,SSE41,POPCNT,SSE42,AVX,F16C,AVX2,FMA3", + "3.11" + ] + - [ + "without avx512/avx2/fma3", + "-Dallow-noblas=true -Dcpu-dispatch=SSSE3,SSE41,POPCNT,SSE42,AVX,F16C", + "3.11" + ] + + env: + MESON_ARGS: ${{ matrix.BUILD_PROP[1] }} + + name: "${{ matrix.BUILD_PROP[0] }}" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: "${{ matrix.BUILD_PROP[2] }}" + - uses: ./.github/meson_actions + name: Build/Test + + intel_sde_avx512: + needs: [baseline_only] + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + + - name: Install Intel SDE + run: | + curl -o /tmp/sde.tar.xz https://downloadmirror.intel.com/788820/sde-external-9.27.0-2023-09-13-lin.tar.xz + mkdir /tmp/sde && tar -xvf /tmp/sde.tar.xz -C /tmp/sde/ + sudo mv /tmp/sde/* /opt/sde && sudo ln -s /opt/sde/sde64 /usr/bin/sde + + - name: Install dependencies + run: | + python -m pip install -r requirements/build_requirements.txt + python -m pip install pytest pytest-xdist hypothesis typing_extensions + + - name: Build + run: CC=gcc-13 CXX=g++-13 spin build -- -Dallow-noblas=true -Dcpu-baseline=avx512_skx -Dtest-simd='BASELINE,AVX512_KNL,AVX512_KNM,AVX512_SKX,AVX512_CLX,AVX512_CNL,AVX512_ICL,AVX512_SPR' + + - name: Meson Log + if: always() + run: cat build/meson-logs/meson-log.txt + + - name: SIMD tests (SKX) + run: | + export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) + export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" + cd build-install && + sde -skx -- python -c "import numpy; numpy.show_config()" && + sde -skx -- python -m pytest $NUMPY_SITE/numpy/_core/tests/test_simd* + + - name: linalg/ufunc/umath tests (TGL) + run: | + export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) + export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" + cd build-install && + sde -tgl -- python -c "import numpy; numpy.show_config()" && + sde -tgl -- python -m pytest $NUMPY_SITE/numpy/_core/tests/test_umath* \ + $NUMPY_SITE/numpy/_core/tests/test_ufunc.py \ + $NUMPY_SITE/numpy/_core/tests/test_multiarray.py \ + $NUMPY_SITE/numpy/linalg/tests/test_* + + + intel_sde_spr: + needs: [baseline_only] + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + + - name: Install Intel SDE + run: | + curl -o /tmp/sde.tar.xz https://downloadmirror.intel.com/788820/sde-external-9.27.0-2023-09-13-lin.tar.xz + mkdir /tmp/sde && tar -xvf /tmp/sde.tar.xz -C /tmp/sde/ + sudo mv /tmp/sde/* /opt/sde && sudo ln -s /opt/sde/sde64 /usr/bin/sde + + - name: Install dependencies + run: | + python -m pip install -r requirements/build_requirements.txt + python -m pip install pytest pytest-xdist hypothesis typing_extensions + + - name: Build + run: CC=gcc-13 CXX=g++-13 spin build -- -Dallow-noblas=true -Dcpu-baseline=avx512_spr + + - name: Meson Log + if: always() + run: cat build/meson-logs/meson-log.txt + + - name: SIMD tests (SPR) + run: | + export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) + export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" + cd build-install && + sde -spr -- python -c "import numpy; numpy.show_config()" && + sde -spr -- python -m pytest $NUMPY_SITE/numpy/_core/tests/test_simd* + + - name: linalg/ufunc/umath tests on Intel SPR + run: | + export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) + export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" + cd build-install && + sde -spr -- python -c "import numpy; numpy.show_config()" && + sde -spr -- python -m pytest $NUMPY_SITE/numpy/_core/tests/test_umath* \ + $NUMPY_SITE/numpy/_core/tests/test_ufunc.py \ + $NUMPY_SITE/numpy/_core/tests/test_multiarray.py \ + $NUMPY_SITE/numpy/linalg/tests/test_* diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml new file mode 100644 index 000000000000..3a401379c03e --- /dev/null +++ b/.github/workflows/macos.yml @@ -0,0 +1,164 @@ +name: macOS tests + +on: + pull_request: + branches: + - main + - maintenance/** + + +permissions: + contents: read # to fetch code (actions/checkout) + +env: + CCACHE_DIR: "${{ github.workspace }}/.ccache" + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + x86_conda: + name: macOS x86-64 conda + # To enable this workflow on a fork, comment out: + if: github.repository == 'numpy/numpy' + runs-on: macos-13 + strategy: + fail-fast: false + matrix: + python-version: ["3.12"] + + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - name: Prepare cache dirs and timestamps + id: prep-ccache + shell: bash -l {0} + run: | + mkdir -p "${CCACHE_DIR}" + echo "dir=$CCACHE_DIR" >> $GITHUB_OUTPUT + NOW=$(date -u +"%F-%T") + echo "timestamp=${NOW}" >> $GITHUB_OUTPUT + echo "today=$(/bin/date -u '+%Y%m%d')" >> $GITHUB_OUTPUT + + - name: Setup compiler cache + uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + id: cache-ccache + with: + path: ${{ steps.prep-ccache.outputs.dir }} + key: ${{ github.workflow }}-${{ matrix.python-version }}-ccache-macos-${{ steps.prep-ccache.outputs.timestamp }} + restore-keys: | + ${{ github.workflow }}-${{ matrix.python-version }}-ccache-macos- + + - name: Setup Miniforge + uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1 + with: + python-version: ${{ matrix.python-version }} + channels: conda-forge + channel-priority: true + activate-environment: numpy-dev + use-only-tar-bz2: false + miniforge-variant: Miniforge3 + miniforge-version: latest + use-mamba: true + + # Updates if `environment.yml` or the date changes. The latter is needed to + # ensure we re-solve once a day (since we don't lock versions). Could be + # replaced by a conda-lock based approach in the future. + - name: Cache conda environment + uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + env: + # Increase this value to reset cache if environment.yml has not changed + CACHE_NUMBER: 1 + with: + path: ${{ env.CONDA }}/envs/numpy-dev + key: + ${{ runner.os }}--${{ steps.prep-ccache.outputs.today }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('environment.yml') }} + id: envcache + + - name: Update Conda Environment + run: mamba env update -n numpy-dev -f environment.yml + if: steps.envcache.outputs.cache-hit != 'true' + + - name: Build and Install NumPy + shell: bash -l {0} + run: | + conda activate numpy-dev + CC="ccache $CC" spin build -j2 -- -Dallow-noblas=false + + - name: Run test suite (full) + shell: bash -l {0} + run: | + conda activate numpy-dev + export OMP_NUM_THREADS=2 + spin test -j2 -m full + + - name: Ccache statistics + shell: bash -l {0} + run: | + conda activate numpy-dev + ccache -s + + + accelerate: + name: Accelerate - ${{ matrix.build_runner[1] }} - ${{ matrix.version }} + # To enable this workflow on a fork, comment out: + if: github.repository == 'numpy/numpy' + runs-on: ${{ matrix.build_runner[0] }} + strategy: + fail-fast: false + matrix: + build_runner: + - [ macos-13, "macos_x86_64" ] + - [ macos-14, "macos_arm64" ] + version: ["3.11", "3.13t"] + + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - uses: astral-sh/setup-uv@f94ec6bedd8674c4426838e6b50417d36b6ab231 + with: + python-version: ${{ matrix.version }} + enable-cache: false + + - run: + uv pip install --python=${{ matrix.version }} pip + + - uses: maxim-lobanov/setup-xcode@60606e260d2fc5762a71e64e74b2174e8ea3c8bd # v1.6.0 + if: ${{ matrix.build_runner[0] == 'macos-13' }} + with: + xcode-version: '14.3' + + # TODO: remove cython nightly install when cython does a release + - name: Install nightly Cython + if: matrix.version == '3.13t' + run: | + pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython + + - name: Install dependencies + run: | + pip install -r requirements/build_requirements.txt + pip install -r requirements/setuptools_requirement.txt + pip install pytest pytest-xdist pytest-timeout hypothesis + + - name: Build against Accelerate (LP64) + run: spin build -- -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test (linalg only) + run: spin test -j2 -- numpy/linalg --timeout=600 --durations=10 + + - name: Build NumPy against Accelerate (ILP64) + run: | + rm -r build build-install + spin build -- -Duse-ilp64=true -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test (fast tests) + run: spin test -j2 -- --timeout=600 --durations=10 diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml new file mode 100644 index 000000000000..446fb899e308 --- /dev/null +++ b/.github/workflows/mypy.yml @@ -0,0 +1,74 @@ +name: Run MyPy + +# Mypy is too slow to run as part of regular CI. The purpose of the jobs in +# this file is to cover running Mypy across: +# +# - OSes: Linux, Windows and macOS +# - Python versions: lowest/highest supported versions, and an intermediate one +# +# The build matrix aims for sparse coverage across those two dimensions. +# Use of BLAS/LAPACK and SIMD is disabled on purpose, because those things +# don't matter for static typing and this speeds up the builds. +# +# This is a separate job file so it's easy to trigger by hand. + +on: + pull_request: + branches: + - main + - maintenance/** + paths-ignore: + - 'benchmarks/' + - '.circlecl/' + - 'docs/' + - 'meson_cpu/' + - 'tools/' + workflow_dispatch: + +defaults: + run: + shell: bash + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + mypy: + # To enable this workflow on a fork, comment out: + if: github.repository == 'numpy/numpy' + name: "MyPy" + runs-on: ${{ matrix.os_python[0] }} + strategy: + fail-fast: false + matrix: + os_python: + - [ubuntu-latest, '3.12'] + - [windows-latest, '3.11'] + - [macos-latest, '3.11'] + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: ${{ matrix.os_python[1] }} + - name: Install dependencies + run: | + pip install -r requirements/build_requirements.txt + # orjson makes mypy faster but the default requirements.txt + # can't install it because orjson doesn't support 32 bit Linux + pip install orjson + pip install -r requirements/test_requirements.txt + - name: Build + run: | + spin build -j2 -- -Dallow-noblas=true -Ddisable-optimization=true --vsenv + - name: Run Mypy + run: | + spin mypy diff --git a/.github/workflows/mypy_primer.yml b/.github/workflows/mypy_primer.yml new file mode 100644 index 000000000000..7e6d7dba9cb4 --- /dev/null +++ b/.github/workflows/mypy_primer.yml @@ -0,0 +1,99 @@ +name: Run mypy_primer + +on: + # Only run on PR, since we diff against main + pull_request: + paths: + - "**/*.pyi" + - ".github/workflows/mypy_primer.yml" + - ".github/workflows/mypy_primer_comment.yml" + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + mypy_primer: + name: Run + runs-on: ubuntu-latest + strategy: + matrix: + shard-index: [0] # e.g. change this to [0, 1, 2] and --num-shards below to 3 + fail-fast: false + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + path: numpy_to_test + fetch-depth: 0 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: "3.12" + - name: Install dependencies + run: pip install git+https://github.com/hauntsaninja/mypy_primer.git + - name: Run mypy_primer + shell: bash + run: | + cd numpy_to_test + MYPY_VERSION=$(grep mypy== requirements/test_requirements.txt | sed -n 's/mypy==\([^;]*\).*/\1/p') + + echo "new commit" + git checkout $GITHUB_SHA + git rev-list --format=%s --max-count=1 HEAD + + MERGE_BASE=$(git merge-base $GITHUB_SHA origin/$GITHUB_BASE_REF) + git worktree add ../numpy_base $MERGE_BASE + cd ../numpy_base + + echo "base commit" + git rev-list --format=%s --max-count=1 HEAD + + echo '' + cd .. + # fail action if exit code isn't zero or one + # TODO: note that we don't build numpy, so if a project attempts to use the + # numpy mypy plugin, we may see some issues involving version skew. + ( + mypy_primer \ + --new v${MYPY_VERSION} --old v${MYPY_VERSION} \ + --known-dependency-selector numpy \ + --old-prepend-path numpy_base --new-prepend-path numpy_to_test \ + --num-shards 1 --shard-index ${{ matrix.shard-index }} \ + --debug \ + --output concise \ + | tee diff_${{ matrix.shard-index }}.txt + ) || [ $? -eq 1 ] + - if: ${{ matrix.shard-index == 0 }} + name: Save PR number + run: | + echo ${{ github.event.pull_request.number }} | tee pr_number.txt + - name: Upload mypy_primer diff + PR number + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + if: ${{ matrix.shard-index == 0 }} + with: + name: mypy_primer_diffs-${{ matrix.shard-index }} + path: | + diff_${{ matrix.shard-index }}.txt + pr_number.txt + - name: Upload mypy_primer diff + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + if: ${{ matrix.shard-index != 0 }} + with: + name: mypy_primer_diffs-${{ matrix.shard-index }} + path: diff_${{ matrix.shard-index }}.txt + + join_artifacts: + name: Join artifacts + runs-on: ubuntu-latest + needs: [mypy_primer] + permissions: + contents: read + steps: + - name: Merge artifacts + uses: actions/upload-artifact/merge@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: mypy_primer_diffs + pattern: mypy_primer_diffs-* + delete-merged: true diff --git a/.github/workflows/mypy_primer_comment.yml b/.github/workflows/mypy_primer_comment.yml new file mode 100644 index 000000000000..be0dda7f7dec --- /dev/null +++ b/.github/workflows/mypy_primer_comment.yml @@ -0,0 +1,103 @@ +name: Comment with mypy_primer diff + +on: + workflow_run: + workflows: + - Run mypy_primer + types: + - completed + +permissions: + contents: read + pull-requests: write + +jobs: + comment: + name: Comment PR from mypy_primer + runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} + steps: + - name: Download diffs + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + const fs = require('fs'); + const artifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{ github.event.workflow_run.id }}, + }); + const [matchArtifact] = artifacts.data.artifacts.filter((artifact) => + artifact.name == "mypy_primer_diffs"); + + const download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: "zip", + }); + fs.writeFileSync("diff.zip", Buffer.from(download.data)); + + - run: unzip diff.zip + + - name: Get PR number + id: get-pr-number + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + const fs = require('fs'); + return parseInt(fs.readFileSync("pr_number.txt", { encoding: "utf8" })) + + - name: Hide old comments + uses: kanga333/comment-hider@c12bb20b48aeb8fc098e35967de8d4f8018fffdf # v0.4.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + issue_number: ${{ steps.get-pr-number.outputs.result }} + + - run: cat diff_*.txt | tee fulldiff.txt + + - name: Post comment + id: post-comment + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const MAX_CHARACTERS = 50000 + const MAX_CHARACTERS_PER_PROJECT = MAX_CHARACTERS / 3 + + const fs = require('fs') + let data = fs.readFileSync('fulldiff.txt', { encoding: 'utf8' }) + + function truncateIfNeeded(original, maxLength) { + if (original.length <= maxLength) { + return original + } + let truncated = original.substring(0, maxLength) + // further, remove last line that might be truncated + truncated = truncated.substring(0, truncated.lastIndexOf('\n')) + let lines_truncated = original.split('\n').length - truncated.split('\n').length + return `${truncated}\n\n... (truncated ${lines_truncated} lines) ...` + } + + const projects = data.split('\n\n') + // don't let one project dominate + data = projects.map(project => truncateIfNeeded(project, MAX_CHARACTERS_PER_PROJECT)).join('\n\n') + // posting comment fails if too long, so truncate + data = truncateIfNeeded(data, MAX_CHARACTERS) + + console.log("Diff from mypy_primer:") + console.log(data) + + let body + if (data.trim()) { + body = 'Diff from [mypy_primer](https://github.com/hauntsaninja/mypy_primer), ' + body += 'showing the effect of this PR on type check results on a corpus of open source code:\n```diff\n' + body += data + '```' + const prNumber = parseInt(fs.readFileSync("pr_number.txt", { encoding: "utf8" })) + await github.rest.issues.createComment({ + issue_number: prNumber, + owner: context.repo.owner, + repo: context.repo.repo, + body + }) + } diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml new file mode 100644 index 000000000000..0a11922b0877 --- /dev/null +++ b/.github/workflows/scorecards.yml @@ -0,0 +1,55 @@ +name: Scorecards supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: "19 23 * * 5" + push: + branches: ["main"] + +# Declare default permissions as read only. +permissions: {} + +jobs: + analysis: + name: Scorecards analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + + steps: + - name: "Checkout code" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v3.1.0 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 + with: + results_file: results.sarif + results_format: sarif + # Publish results to OpenSSF REST API for easy access by consumers. + # Allows the repository to include the Scorecard badge. + # See https://github.com/ossf/scorecard-action#publishing-results. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable + # uploads of run results in SARIF format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@5f8171a638ada777af81d42b55959a643bb29017 # v2.1.27 + with: + sarif_file: results.sarif diff --git a/.github/workflows/variant-wheels.yml b/.github/workflows/variant-wheels.yml new file mode 100644 index 000000000000..fe90b656d6e1 --- /dev/null +++ b/.github/workflows/variant-wheels.yml @@ -0,0 +1,52 @@ +# Workflow to build and test wheels. +# To work on the wheel building infrastructure on a fork, comment out: +# +# if: github.repository == 'numpy/numpy' +# +# in the get_commit_message job. Be sure to include [wheel build] in your commit +# message to trigger the build. All files related to wheel building are located +# at tools/wheels/ +# Alternatively, you can add labels to the pull request in order to trigger wheel +# builds. +# The labels that trigger builds are: +# 36 - Build(for changes to the building process, +# 14 - Release(ensure wheels build before release) +name: Wheel builder + +on: + push: + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + build_wheels: + name: Build wheel ${{ matrix.python }}-${{ matrix.blas }}-x86_64-${{ matrix.x86_64[0] }} + runs-on: ${{ matrix.buildplat[0] }} + strategy: + # Ensure that a wheel builder finishes even if another fails + fail-fast: false + matrix: + blas: [openblas, mkl] + # Github Actions doesn't support pairing matrix values together, let's improvise + # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 + buildplat: + - [ubuntu-22.04, manylinux_x86_64, ""] + python: ["3.12"] + x86_64: + - [v1, -march=x86-64] + - [v2, -march=x86-64-v2] + - [v3, -march=x86-64-v3] + - [v4, -march=x86-64-v4] + + steps: + - name: Checkout numpy + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: true + persist-credentials: false + + - uses: prefix-dev/setup-pixi@v0.8.3 + + - name: Build wheels + run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=${{ matrix.x86_64[1] }} diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index fe90b656d6e1..bcd888cd8047 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -14,31 +14,100 @@ name: Wheel builder on: + schedule: + # ┌───────────── minute (0 - 59) + # │ ┌───────────── hour (0 - 23) + # │ │ ┌───────────── day of the month (1 - 31) + # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC) + # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT) + # │ │ │ │ │ + - cron: "42 2 * * SUN,WED" + pull_request: + branches: + - main + - maintenance/** push: + tags: + - v* + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true permissions: contents: read # to fetch code (actions/checkout) jobs: + get_commit_message: + name: Get commit message + runs-on: ubuntu-latest + # Only workflow_dispatch is enabled on forks. + # To enable this job and subsequent jobs on a fork for other events, comment out: + if: github.repository == 'numpy/numpy' || github.event_name == 'workflow_dispatch' + outputs: + message: ${{ steps.commit_message.outputs.message }} + steps: + - name: Checkout numpy + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + # Gets the correct commit message for pull request + with: + ref: ${{ github.event.pull_request.head.sha }} + persist-credentials: false + - name: Get commit message + id: commit_message + env: + HEAD: ${{ github.ref }} + run: | + set -xe + COMMIT_MSG=$(git log --no-merges -1 --oneline) + echo "message=$COMMIT_MSG" >> $GITHUB_OUTPUT + echo github.ref "$HEAD" + build_wheels: - name: Build wheel ${{ matrix.python }}-${{ matrix.blas }}-x86_64-${{ matrix.x86_64[0] }} + name: Build wheel ${{ matrix.python }}-${{ matrix.buildplat[1] }}-${{ matrix.buildplat[2] }} + needs: get_commit_message + if: >- + contains(needs.get_commit_message.outputs.message, '[wheel build]') || + github.event_name == 'schedule' || + github.event_name == 'workflow_dispatch' || + (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && ( ! endsWith(github.ref, 'dev0'))) runs-on: ${{ matrix.buildplat[0] }} strategy: # Ensure that a wheel builder finishes even if another fails fail-fast: false matrix: - blas: [openblas, mkl] # Github Actions doesn't support pairing matrix values together, let's improvise # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 buildplat: - [ubuntu-22.04, manylinux_x86_64, ""] - python: ["3.12"] - x86_64: - - [v1, -march=x86-64] - - [v2, -march=x86-64-v2] - - [v3, -march=x86-64-v3] - - [v4, -march=x86-64-v4] + - [ubuntu-22.04, musllinux_x86_64, ""] + - [ubuntu-22.04-arm, manylinux_aarch64, ""] + - [ubuntu-22.04-arm, musllinux_aarch64, ""] + - [macos-13, macosx_x86_64, openblas] + # targeting macos >= 14. Could probably build on macos-14, but it would be a cross-compile + - [macos-13, macosx_x86_64, accelerate] + - [macos-14, macosx_arm64, accelerate] # always use accelerate + - [windows-2019, win_amd64, ""] + - [windows-2019, win32, ""] + python: ["cp311", "cp312", "cp313", "cp313t", "pp311"] + exclude: + # Don't build PyPy 32-bit windows + - buildplat: [windows-2019, win32, ""] + python: "pp311" + # No PyPy on musllinux images + - buildplat: [ ubuntu-22.04, musllinux_x86_64, "" ] + python: "pp311" + - buildplat: [ ubuntu-22.04-arm, musllinux_aarch64, "" ] + python: "pp311" + - buildplat: [ macos13, macosx_x86_64, openblas ] + python: "cp313t" + + env: + IS_32_BIT: ${{ matrix.buildplat[1] == 'win32' }} + IS_PUSH: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') }} + IS_SCHEDULE_DISPATCH: ${{ github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' }} steps: - name: Checkout numpy uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 @@ -46,7 +115,178 @@ jobs: submodules: true persist-credentials: false - - uses: prefix-dev/setup-pixi@v0.8.3 + - name: Setup MSVC (32-bit) + if: ${{ matrix.buildplat[1] == 'win32' }} + uses: bus1/cabuild/action/msdevshell@e22aba57d6e74891d059d66501b6b5aed8123c4d # v1 + with: + architecture: 'x86' + + - name: pkg-config-for-win + run: | + choco install -y --no-progress --stoponfirstfailure --checksum 6004DF17818F5A6DBF19CB335CC92702 pkgconfiglite + $CIBW = "${{ github.workspace }}/.openblas" + # pkgconfig needs a complete path, and not just "./openblas since the + # build is run in a tmp dir (?) + # It seems somewhere in the env passing, `\` is not + # passed through, so convert it to '/' + $CIBW = $CIBW.replace("\","/") + echo "CIBW_ENVIRONMENT_WINDOWS=PKG_CONFIG_PATH=$CIBW" >> $env:GITHUB_ENV + if: runner.os == 'windows' + + # Used to push the built wheels + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: "3.x" + + - name: Setup macOS + if: matrix.buildplat[0] == 'macos-13' || matrix.buildplat[0] == 'macos-14' + run: | + # Needed due to https://github.com/actions/runner-images/issues/3371 + # Supported versions: https://github.com/actions/runner-images/blob/main/images/macos/macos-14-arm64-Readme.md + echo "FC=gfortran-13" >> "$GITHUB_ENV" + echo "F77=gfortran-13" >> "$GITHUB_ENV" + echo "F90=gfortran-13" >> "$GITHUB_ENV" + if [[ ${{ matrix.buildplat[2] }} == 'accelerate' ]]; then + # macosx_arm64 and macosx_x86_64 with accelerate + # only target Sonoma onwards + CIBW="MACOSX_DEPLOYMENT_TARGET=14.0 INSTALL_OPENBLAS=false RUNNER_OS=macOS" + echo "CIBW_ENVIRONMENT_MACOS=$CIBW" >> "$GITHUB_ENV" + + # the macos-13 image that's used for building the x86_64 wheel can't test + # a wheel with deployment target >= 14 without further work + echo "CIBW_TEST_SKIP=*-macosx_x86_64" >> "$GITHUB_ENV" + else + # macosx_x86_64 with OpenBLAS + # if INSTALL_OPENBLAS isn't specified then scipy-openblas is automatically installed + CIBW="RUNNER_OS=macOS" + PKG_CONFIG_PATH="$PWD/.openblas" + DYLD="$DYLD_LIBRARY_PATH:/$PWD/.openblas/lib" + echo "CIBW_ENVIRONMENT_MACOS=$CIBW PKG_CONFIG_PATH=$PKG_CONFIG_PATH DYLD_LIBRARY_PATH=$DYLD" >> "$GITHUB_ENV" + fi + + - name: Set up free-threaded build + if: matrix.python == 'cp313t' + shell: bash -el {0} + run: | + echo "CIBW_BUILD_FRONTEND=pip; args: --no-build-isolation" >> "$GITHUB_ENV" - name: Build wheels - run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=${{ matrix.x86_64[1] }} + uses: pypa/cibuildwheel@42728e866bbc80d544a70825bd9990b9a26f1a50 # v2.23.1 + env: + CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }} + + - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: ${{ matrix.python }}-${{ matrix.buildplat[1] }}-${{ matrix.buildplat[2] }} + path: ./wheelhouse/*.whl + + - uses: mamba-org/setup-micromamba@0dea6379afdaffa5d528b3d1dabc45da37f443fc + with: + # for installation of anaconda-client, required for upload to + # anaconda.org + # Note that this step is *after* specific pythons have been used to + # build and test the wheel + # for installation of anaconda-client, for upload to anaconda.org + # environment will be activated after creation, and in future bash steps + init-shell: bash + environment-name: upload-env + create-args: >- + anaconda-client + + - name: Upload wheels + if: success() && github.repository == 'numpy/numpy' + shell: bash -el {0} + # see https://github.com/marketplace/actions/setup-miniconda for why + # `-el {0}` is required. + env: + NUMPY_STAGING_UPLOAD_TOKEN: ${{ secrets.NUMPY_STAGING_UPLOAD_TOKEN }} + NUMPY_NIGHTLY_UPLOAD_TOKEN: ${{ secrets.NUMPY_NIGHTLY_UPLOAD_TOKEN }} + run: | + source tools/wheels/upload_wheels.sh + set_upload_vars + # trigger an upload to + # https://anaconda.org/scientific-python-nightly-wheels/numpy + # for cron jobs or "Run workflow" (restricted to main branch). + # Tags will upload to + # https://anaconda.org/multibuild-wheels-staging/numpy + # The tokens were originally generated at anaconda.org + upload_wheels + + build_sdist: + name: Build sdist + needs: get_commit_message + if: >- + contains(needs.get_commit_message.outputs.message, '[wheel build]') || + github.event_name == 'schedule' || + github.event_name == 'workflow_dispatch' || + (github.event_name == 'pull_request' && + (contains(github.event.pull_request.labels.*.name, '36 - Build') || + contains(github.event.pull_request.labels.*.name, '14 - Release'))) || + (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && ( ! endsWith(github.ref, 'dev0'))) + runs-on: ubuntu-latest + env: + IS_PUSH: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') }} + # commented out so the sdist doesn't upload to nightly + # IS_SCHEDULE_DISPATCH: ${{ github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' }} + steps: + - name: Checkout numpy + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: true + persist-credentials: false + # Used to push the built wheels + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + # Build sdist on lowest supported Python + python-version: "3.11" + - name: Build sdist + run: | + python -m pip install -U pip build + python -m build --sdist -Csetup-args=-Dallow-noblas=true + - name: Test the sdist + run: | + # TODO: Don't run test suite, and instead build wheels from sdist + # Depends on pypa/cibuildwheel#1020 + python -m pip install dist/*.gz -Csetup-args=-Dallow-noblas=true + pip install -r requirements/test_requirements.txt + cd .. # Can't import numpy within numpy src directory + python -c "import numpy, sys; print(numpy.__version__); sys.exit(numpy.test() is False)" + + - name: Check README rendering for PyPI + run: | + python -mpip install twine + twine check dist/* + + - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: sdist + path: ./dist/* + + - uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1 + with: + # for installation of anaconda-client, required for upload to + # anaconda.org + # default (and activated) environment name is test + # Note that this step is *after* specific pythons have been used to + # build and test + auto-update-conda: true + python-version: "3.11" + + - name: Upload sdist + if: success() && github.repository == 'numpy/numpy' + shell: bash -el {0} + env: + NUMPY_STAGING_UPLOAD_TOKEN: ${{ secrets.NUMPY_STAGING_UPLOAD_TOKEN }} + # commented out so the sdist doesn't upload to nightly + # NUMPY_NIGHTLY_UPLOAD_TOKEN: ${{ secrets.NUMPY_NIGHTLY_UPLOAD_TOKEN }} + run: | + conda install -y anaconda-client + source tools/wheels/upload_wheels.sh + set_upload_vars + # trigger an upload to + # https://anaconda.org/scientific-python-nightly-wheels/numpy + # for cron jobs or "Run workflow" (restricted to main branch). + # Tags will upload to + # https://anaconda.org/multibuild-wheels-staging/numpy + # The tokens were originally generated at anaconda.org + upload_wheels diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml new file mode 100644 index 000000000000..5d9f5f4db7c9 --- /dev/null +++ b/.github/workflows/windows.yml @@ -0,0 +1,133 @@ +name: Windows tests + +on: + pull_request: + branches: + - main + - maintenance/** + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + python64bit_openblas: + name: x86-64, LP64 OpenBLAS + runs-on: windows-2019 + # To enable this job on a fork, comment out: + if: github.repository == 'numpy/numpy' + strategy: + fail-fast: false + matrix: + compiler-pyversion: + - ["MSVC", "3.11"] + - ["Clang-cl", "3.13t"] + + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - name: Setup Python + uses: astral-sh/setup-uv@f94ec6bedd8674c4426838e6b50417d36b6ab231 + with: + python-version: ${{ matrix.compiler-pyversion[1] }} + enable-cache: false + + - run: + uv pip install --python=${{ matrix.version }} pip + + # TODO: remove cython nightly install when cython does a release + - name: Install nightly Cython + if: matrix.compiler-pyversion[1] == '3.13t' + run: | + pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython + + - name: Install build dependencies from PyPI + run: | + pip install -r requirements/build_requirements.txt + + - name: Install pkg-config + run: | + choco install -y --stoponfirstfailure --checksum 6004DF17818F5A6DBF19CB335CC92702 pkgconfiglite + echo "PKG_CONFIG_PATH=${{ github.workspace }}/.openblas" >> $env:GITHUB_ENV + + + - name: Install Clang-cl + if: matrix.compiler-pyversion[0] == 'Clang-cl' + run: | + # llvm is preinstalled, but leave + # this here in case we need to pin the + # version at some point. + #choco install llvm -y + + - name: Install NumPy (MSVC) + if: matrix.compiler-pyversion[0] == 'MSVC' + run: | + pip install -r requirements/ci_requirements.txt + spin build --with-scipy-openblas=32 -j2 -- --vsenv + + - name: Install NumPy (Clang-cl) + if: matrix.compiler-pyversion[0] == 'Clang-cl' + run: | + "[binaries]","c = 'clang-cl'","cpp = 'clang-cl'","ar = 'llvm-lib'","c_ld = 'lld-link'","cpp_ld = 'lld-link'" | Out-File $PWD/clang-cl-build.ini -Encoding ascii + pip install -r requirements/ci_requirements.txt + spin build --with-scipy-openblas=32 -j2 -- --vsenv --native-file=$PWD/clang-cl-build.ini + + - name: Meson Log + shell: bash + if: ${{ failure() }} + run: | + cat build/meson-logs/meson-log.txt + + - name: Install test dependencies + run: | + python -m pip install -r requirements/test_requirements.txt + python -m pip install threadpoolctl + + - name: Run test suite + run: | + spin test -- --timeout=600 --durations=10 + + msvc_32bit_python_no_openblas: + name: MSVC, 32-bit Python, no BLAS + runs-on: windows-2019 + # To enable this job on a fork, comment out: + if: github.repository == 'numpy/numpy' + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - name: Setup Python (32-bit) + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: '3.11' + architecture: 'x86' + + - name: Setup MSVC (32-bit) + uses: bus1/cabuild/action/msdevshell@e22aba57d6e74891d059d66501b6b5aed8123c4d # v1 + with: + architecture: 'x86' + + - name: Build and install + run: | + python -m pip install . -v -Ccompile-args="-j2" -Csetup-args="-Dallow-noblas=true" + + - name: Install test dependencies + run: | + python -m pip install -r requirements/test_requirements.txt + + - name: Run test suite (fast) + run: | + cd tools + python -m pytest --pyargs numpy -m "not slow" -n2 --timeout=600 --durations=10 diff --git a/.github/workflows/windows_arm64.yml b/.github/workflows/windows_arm64.yml new file mode 100644 index 000000000000..42d96aa1989d --- /dev/null +++ b/.github/workflows/windows_arm64.yml @@ -0,0 +1,208 @@ +name: Windows Arm64 + +on: + workflow_dispatch: + +env: + python_version: 3.12 + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + windows_arm: + runs-on: windows-2019 + + # To enable this job on a fork, comment out: + if: github.repository == 'numpy/numpy' + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: recursive + fetch-tags: true + persist-credentials: false + + - name: Setup Python + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + with: + python-version: ${{env.python_version}} + architecture: x64 + + - name: Install build dependencies from PyPI + run: | + python -m pip install -r requirements/build_requirements.txt + + - name: Prepare python + shell: powershell + run: | + $ErrorActionPreference = "Stop" + + #Detecting python location and version + $PythonDir = (Split-Path -Parent (get-command python).Path) + $PythonVersionParts = ( -split (python -V)) + $PythonVersion = $PythonVersionParts[1] + + #Downloading the package for appropriate python version from nuget + $PythonARM64NugetLink = "https://www.nuget.org/api/v2/package/pythonarm64/$PythonVersion" + $PythonARM64NugetZip = "nuget_python.zip" + $PythonARM64NugetDir = "temp_nuget" + Invoke-WebRequest $PythonARM64NugetLink -OutFile $PythonARM64NugetZip + + #Changing the libs folder to enable python libraries to be linked for arm64 + Expand-Archive $PythonARM64NugetZip $PythonARM64NugetDir + Copy-Item $PythonARM64NugetDir\tools\libs\* $PythonDir\libs + Remove-Item -Force -Recurse $PythonARM64NugetDir + Remove-Item -Force $PythonARM64NugetZip + + if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE } + + - name: Prepare Licence + shell: powershell + run: | + $ErrorActionPreference = "Stop" + + $CurrentDir = (get-location).Path + $LicenseFile = "$CurrentDir\LICENSE.txt" + Set-Content $LicenseFile ([Environment]::NewLine) + Add-Content $LicenseFile "----" + Add-Content $LicenseFile ([Environment]::NewLine) + Add-Content $LicenseFile (Get-Content "$CurrentDir\LICENSES_bundled.txt") + Add-Content $LicenseFile (Get-Content "$CurrentDir\tools\wheels\LICENSE_win32.txt") + + if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE } + + - name: Wheel build + shell: powershell + run: | + $ErrorActionPreference = "Stop" + + #Creating cross compile script for messon subsystem + $CurrentDir = (get-location) + $CrossScript = "$CurrentDir\arm64_w64.txt" + $CrossScriptContent = + { + [host_machine] + system = 'windows' + subsystem = 'windows' + kernel = 'nt' + cpu_family = 'aarch64' + cpu = 'aarch64' + endian = 'little' + + [binaries] + c='cl.exe' + cpp = 'cl.exe' + + [properties] + sizeof_short = 2 + sizeof_int = 4 + sizeof_long = 4 + sizeof_long_long = 8 + sizeof_float = 4 + sizeof_double = 8 + sizeof_long_double = 8 + sizeof_size_t = 8 + sizeof_wchar_t = 2 + sizeof_off_t = 4 + sizeof_Py_intptr_t = 8 + sizeof_PY_LONG_LONG = 8 + longdouble_format = 'IEEE_DOUBLE_LE' + } + Set-Content $CrossScript $CrossScriptContent.ToString() + + #Setting up cross compilers from MSVC + $Products = 'Community', 'Professional', 'Enterprise', 'BuildTools' | % { "Microsoft.VisualStudio.Product.$_" } + $VsInstallPath = (vswhere -products $Products -latest -format json | ConvertFrom-Json).installationPath + $VSVars = (Get-ChildItem -Path $VsInstallPath -Recurse -Filter "vcvarsamd64_arm64.bat").FullName + $ScriptingObj = New-Object -ComObject Scripting.FileSystemObject + $VSVarsShort = $ScriptingObj.GetFile($VSVars).ShortPath + cmd /c "$VSVarsShort && set" | + ForEach-Object { + if ($_ -match "=") { + $Var = $_.split("=") + set-item -force -path "ENV:\$($Var[0])" -value "$($Var[1])" + } + } + + #Building the wheel + pip wheel . --config-settings=setup-args="--cross-file=$CrossScript" + + if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE } + + - name: Fix wheel + shell: powershell + run: | + $ErrorActionPreference = "Stop" + + #Finding whl file + $CurrentDir = (get-location) + $WhlName = ((Get-ChildItem -Filter "*.whl").FullName) + $ZipWhlName = "$CurrentDir\ZipWhlName.zip" + $UnzippedWhl = "$CurrentDir\unzipedWhl" + + #Expanding whl file + Rename-Item -Path $WhlName $ZipWhlName + if (Test-Path $UnzippedWhl) { + Remove-Item -Force -Recurse $UnzippedWhl + } + Expand-Archive -Force -Path $ZipWhlName $UnzippedWhl + + #Renaming all files to show that their arch is arm64 + Get-ChildItem -Recurse -Path $UnzippedWhl *win_amd64* | Rename-Item -NewName { $_.Name -replace 'win_amd64', 'win_arm64' } + $DIST_DIR = (Get-ChildItem -Recurse -Path $UnzippedWhl *dist-info).FullName + + #Changing amd64 references from metafiles + (GET-Content $DIST_DIR/RECORD) -replace 'win_amd64', 'win_arm64' | Set-Content $DIST_DIR/RECORD + (GET-Content $DIST_DIR/WHEEL) -replace 'win_amd64', 'win_arm64' | Set-Content $DIST_DIR/WHEEL + + #Packing whl file + Compress-Archive -Path $UnzippedWhl\* -DestinationPath $ZipWhlName -Force + $WhlName = $WhlName.Replace("win_amd64", "win_arm64") + Rename-Item -Path $ZipWhlName $WhlName + + if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE } + + - name: Upload Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: ${{ env.python_version }}-win_arm64 + path: ./*.whl + + - name: Setup Mamba + uses: mamba-org/setup-micromamba@0dea6379afdaffa5d528b3d1dabc45da37f443fc + with: + # for installation of anaconda-client, required for upload to + # anaconda.org + # Note that this step is *after* specific pythons have been used to + # build and test the wheel + # for installation of anaconda-client, for upload to anaconda.org + # environment will be activated after creation, and in future bash steps + init-shell: bash + environment-name: upload-env + create-args: >- + anaconda-client + + # - name: Upload wheels + # if: success() + # shell: bash -el {0} + # # see https://github.com/marketplace/actions/setup-miniconda for why + # # `-el {0}` is required. + # env: + # NUMPY_STAGING_UPLOAD_TOKEN: ${{ secrets.NUMPY_STAGING_UPLOAD_TOKEN }} + # NUMPY_NIGHTLY_UPLOAD_TOKEN: ${{ secrets.NUMPY_NIGHTLY_UPLOAD_TOKEN }} + # run: | + # source tools/wheels/upload_wheels.sh + # set_upload_vars + # # trigger an upload to + # # https://anaconda.org/scientific-python-nightly-wheels/numpy + # # for cron jobs or "Run workflow" (restricted to main branch). + # # Tags will upload to + # # https://anaconda.org/multibuild-wheels-staging/numpy + # # The tokens were originally generated at anaconda.org + # upload_wheels + diff --git a/.gitignore b/.gitignore index 1ae65c7b0bd2..df7f084e3645 100644 --- a/.gitignore +++ b/.gitignore @@ -150,7 +150,3 @@ tools/swig/test/Vector_wrap.cxx tools/swig/test/Array.py .openblas numpy/_distributor_init_local.py - -# pixi environments -.pixi -*.egg-info diff --git a/pyproject.toml b/pyproject.toml index 47d0ad20a05a..646d7bebdcb9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -145,11 +145,12 @@ build-frontend = "build" skip = "*_i686 *_ppc64le *_s390x *_universal2" before-build = "bash {project}/tools/wheels/cibw_before_build.sh {project}" # The build will use openblas64 everywhere, except on arm64 macOS >=14.0 (uses Accelerate) -config-settings = "setup-args=-Duse-ilp64=true setup-args=-Dallow-noblas=false build-dir=build variant-name=foo::bar::baz variant-name=a::b::c" +config-settings = "setup-args=-Duse-ilp64=true setup-args=-Dallow-noblas=false build-dir=build" +before-test = "pip install -r {project}/requirements/test_requirements.txt" +test-command = "bash {project}/tools/wheels/cibw_test_command.sh {project}" enable = ["cpython-freethreading", "pypy", "cpython-prerelease"] [tool.cibuildwheel.linux] -repair-wheel-command = "" manylinux-x86_64-image = "manylinux_2_28" manylinux-aarch64-image = "manylinux_2_28" musllinux-x86_64-image = "musllinux_1_2" diff --git a/tools/wheels/cibw_before_build.sh b/tools/wheels/cibw_before_build.sh index c49a54e9d3b3..e2f464d32a2a 100644 --- a/tools/wheels/cibw_before_build.sh +++ b/tools/wheels/cibw_before_build.sh @@ -61,5 +61,3 @@ if [[ $FREE_THREADED_BUILD == "True" ]]; then python -m pip install meson-python ninja python -m pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple cython fi - -dnf install -y blis-devel From 58b97173db4dfea5db38a32b8e131564433546f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Wed, 26 Mar 2025 20:21:11 +0100 Subject: [PATCH 35/44] Disable dispatching to higher arches on non-highest target --- meson_cpu/x86/meson.build | 3 +++ 1 file changed, 3 insertions(+) diff --git a/meson_cpu/x86/meson.build b/meson_cpu/x86/meson.build index 02aa0f1f6f9f..10f9e5d16397 100644 --- a/meson_cpu/x86/meson.build +++ b/meson_cpu/x86/meson.build @@ -4,10 +4,13 @@ mod_features = import('features') if x86_64_variant != '' if x86_64_variant == 'v1' CPU_CONF_BASELINE = 'min' + CPU_CONF_DISPATCH = '' elif x86_64_variant == 'v2' CPU_CONF_BASELINE = 'SSE42' + CPU_CONF_DISPATCH = '' elif x86_64_variant == 'v3' CPU_CONF_BASELINE = 'AVX2' + CPU_CONF_DISPATCH = '' elif x86_64_variant == 'v4' CPU_CONF_BASELINE = 'AVX512_SKX' else From 3e1f2f4d2753d3d126d70fcb3b881aa225d4cffa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Mon, 14 Apr 2025 08:13:15 +0200 Subject: [PATCH 36/44] Update for new x86_64 plugin --- .github/workflows/variant-wheels.yml | 2 +- meson.build | 2 +- pyproject.toml | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/variant-wheels.yml b/.github/workflows/variant-wheels.yml index fe90b656d6e1..88ac10a20bb0 100644 --- a/.github/workflows/variant-wheels.yml +++ b/.github/workflows/variant-wheels.yml @@ -49,4 +49,4 @@ jobs: - uses: prefix-dev/setup-pixi@v0.8.3 - name: Build wheels - run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::baseline::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=${{ matrix.x86_64[1] }} + run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::level::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=${{ matrix.x86_64[1] }} diff --git a/meson.build b/meson.build index b2f3289eafa8..80f90376e8a0 100644 --- a/meson.build +++ b/meson.build @@ -98,7 +98,7 @@ foreach variant_meta : get_option('variant') endif if split_meta[0].strip() == 'blas' and split_meta[1].strip() == 'variant' blas_variant = split_meta[2].strip() - elif split_meta[0].strip() == 'x86_64' and split_meta[1].strip() == 'baseline' + elif split_meta[0].strip() == 'x86_64' and split_meta[1].strip() == 'level' if host_machine.cpu_family() != 'x86_64' error('Variant valid only on x86_64: ' + variant_meta) endif diff --git a/pyproject.toml b/pyproject.toml index 646d7bebdcb9..5ad6e202666d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,6 +3,7 @@ build-backend = "mesonpy" requires = [ "meson-python @ https://github.com/mgorny/meson-python/archive/wheel-variants.tar.gz", "Cython>=3.0.6", # keep in sync with version check in meson.build + "variant_x86_64 @ https://github.com/wheelnext/variant_x86_64/archive/main.tar.gz ; 'x86_64' in variants" ] [project] From 1e9dbc0f5bb55027b0feac819eca11b29351e994 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Mon, 14 Apr 2025 10:15:37 +0200 Subject: [PATCH 37/44] Use our build fork --- pixi.toml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pixi.toml b/pixi.toml index ac11f17b0399..cad75728d4db 100644 --- a/pixi.toml +++ b/pixi.toml @@ -4,14 +4,18 @@ name = "numpy-wheel-build" platforms = ["linux-64"] [tasks] -build = "python -m build -w" +build = "python -m build --installer=uv -w" [dependencies] c-compiler = "*" cxx-compiler = "*" fortran-compiler = "*" -python-build = "*" pkg-config = "*" +python = "*" +uv = "*" + +[pypi-dependencies] +build = { git = "https://github.com/mgorny/build", branch = "variant-deps" } [feature.mkl.dependencies] mkl-devel = "*" From 8c40e2fcdc600ac996160c5a0e0574f542038077 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Mon, 14 Apr 2025 10:21:44 +0200 Subject: [PATCH 38/44] Add artifact upload --- .github/workflows/variant-wheels.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/variant-wheels.yml b/.github/workflows/variant-wheels.yml index 88ac10a20bb0..d8d6375d6275 100644 --- a/.github/workflows/variant-wheels.yml +++ b/.github/workflows/variant-wheels.yml @@ -50,3 +50,10 @@ jobs: - name: Build wheels run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::level::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=${{ matrix.x86_64[1] }} + + - uses: actions/upload-artifact@v4 + with: + name: wheel-${{ matrix.blas }}-x86-64-${{ matrix.x86_64[0] }} + path: dist/*.whl + if-no-files-found: error + compression-level: 0 From 8d8cd5c1b2b47c051c018fc024c59d5f957580f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Mon, 14 Apr 2025 10:22:32 +0200 Subject: [PATCH 39/44] Remove redundant cflags (should be done via build-setup now) --- .github/workflows/variant-wheels.yml | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/.github/workflows/variant-wheels.yml b/.github/workflows/variant-wheels.yml index d8d6375d6275..2f91cc354373 100644 --- a/.github/workflows/variant-wheels.yml +++ b/.github/workflows/variant-wheels.yml @@ -21,7 +21,7 @@ permissions: jobs: build_wheels: - name: Build wheel ${{ matrix.python }}-${{ matrix.blas }}-x86_64-${{ matrix.x86_64[0] }} + name: Build wheel ${{ matrix.python }}-${{ matrix.blas }}-x86_64-${{ matrix.x86_64 }} runs-on: ${{ matrix.buildplat[0] }} strategy: # Ensure that a wheel builder finishes even if another fails @@ -33,11 +33,7 @@ jobs: buildplat: - [ubuntu-22.04, manylinux_x86_64, ""] python: ["3.12"] - x86_64: - - [v1, -march=x86-64] - - [v2, -march=x86-64-v2] - - [v3, -march=x86-64-v3] - - [v4, -march=x86-64-v4] + x86_64: [v1, v2, v3, v4] steps: - name: Checkout numpy @@ -49,11 +45,11 @@ jobs: - uses: prefix-dev/setup-pixi@v0.8.3 - name: Build wheels - run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::level::${{ matrix.x86_64[0] }} -Ccompile-args=-v -Csetup-args=-Dc_args=${{ matrix.x86_64[1] }} -Csetup-args=-Dcpp_args=${{ matrix.x86_64[1] }} + run: pixi run -e ${{ matrix.blas }} build -Csetup-args=-Duse-ilp64=true -Cvariant=blas::variant::${{ matrix.blas }} -Cvariant=x86_64::level::${{ matrix.x86_64 }} -Ccompile-args=-v - uses: actions/upload-artifact@v4 with: - name: wheel-${{ matrix.blas }}-x86-64-${{ matrix.x86_64[0] }} + name: wheel-${{ matrix.blas }}-x86-64-${{ matrix.x86_64 }} path: dist/*.whl if-no-files-found: error compression-level: 0 From f4e80073bef9f57032436540133dd9e7b623b63f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Mon, 14 Apr 2025 19:17:44 +0200 Subject: [PATCH 40/44] Use pip & numpy-demo branch --- pixi.toml | 4 ++-- pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pixi.toml b/pixi.toml index cad75728d4db..9121ed504422 100644 --- a/pixi.toml +++ b/pixi.toml @@ -4,15 +4,15 @@ name = "numpy-wheel-build" platforms = ["linux-64"] [tasks] -build = "python -m build --installer=uv -w" +build = "python -m build -w" [dependencies] c-compiler = "*" cxx-compiler = "*" fortran-compiler = "*" pkg-config = "*" +pip = "*" python = "*" -uv = "*" [pypi-dependencies] build = { git = "https://github.com/mgorny/build", branch = "variant-deps" } diff --git a/pyproject.toml b/pyproject.toml index 5ad6e202666d..a0d83ec01a14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ build-backend = "mesonpy" requires = [ "meson-python @ https://github.com/mgorny/meson-python/archive/wheel-variants.tar.gz", "Cython>=3.0.6", # keep in sync with version check in meson.build - "variant_x86_64 @ https://github.com/wheelnext/variant_x86_64/archive/main.tar.gz ; 'x86_64' in variants" + "variant_x86_64 @ https://github.com/mgorny/variant_x86_64/archive/numpy-demo.tar.gz ; 'x86_64' in variants" ] [project] From d1f89efffd6bf5a9439fb60a9a92d860ef809b29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Mon, 14 Apr 2025 19:33:12 +0200 Subject: [PATCH 41/44] trigger build From d32be73e8ac198a5d84b332137fdd39fe7d9d07a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Mon, 14 Apr 2025 19:41:36 +0200 Subject: [PATCH 42/44] Unset CFLAGS/CXXFLAGS leaking from env --- pixi.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pixi.toml b/pixi.toml index 9121ed504422..1f4516d0b7be 100644 --- a/pixi.toml +++ b/pixi.toml @@ -4,7 +4,7 @@ name = "numpy-wheel-build" platforms = ["linux-64"] [tasks] -build = "python -m build -w" +build = "unset CFLAGS CXXFLAGS; python -m build -w" [dependencies] c-compiler = "*" From 003e33a9e1ba2a97d9c2883535277ecab0eeea50 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Tue, 15 Apr 2025 16:37:40 +0200 Subject: [PATCH 43/44] Revert "Unset CFLAGS/CXXFLAGS leaking from env" This reverts commit d32be73e8ac198a5d84b332137fdd39fe7d9d07a. That shouldn't be necessary, I was getting confused by meson output. --- pixi.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pixi.toml b/pixi.toml index 1f4516d0b7be..9121ed504422 100644 --- a/pixi.toml +++ b/pixi.toml @@ -4,7 +4,7 @@ name = "numpy-wheel-build" platforms = ["linux-64"] [tasks] -build = "unset CFLAGS CXXFLAGS; python -m build -w" +build = "python -m build -w" [dependencies] c-compiler = "*" From bd125ad607dc92b5395461e282ab0d8c1fd3bd13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Thu, 17 Apr 2025 14:28:16 +0200 Subject: [PATCH 44/44] Switch to main branch of x86_64 variant plugin --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a0d83ec01a14..5ad6e202666d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ build-backend = "mesonpy" requires = [ "meson-python @ https://github.com/mgorny/meson-python/archive/wheel-variants.tar.gz", "Cython>=3.0.6", # keep in sync with version check in meson.build - "variant_x86_64 @ https://github.com/mgorny/variant_x86_64/archive/numpy-demo.tar.gz ; 'x86_64' in variants" + "variant_x86_64 @ https://github.com/wheelnext/variant_x86_64/archive/main.tar.gz ; 'x86_64' in variants" ] [project]