Skip to content

Commit a433d30

Browse files
authored
Merge pull request #695 from bccp/fixes
This PR fixes the build for recent numpy and scipy versions, fixes compatibility with the new classylss, fixes a variety of bugs found by running a linter, and fixes the github action to successfully run the tests.
2 parents e6342d1 + f68b2ee commit a433d30

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

80 files changed

+996
-825
lines changed

.github/workflows/main.yaml

Lines changed: 46 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,8 @@
11
# main test workflow; ported from .travis.yaml
22

3-
43
name: main
54

65
on:
7-
schedule:
8-
- cron: '0 0 * * 5' # Runs on every Friday.
96
push:
107
branches: [ '*', $default-branch ]
118
tags: ['[0-9]*'] # anything looks like a version.
@@ -17,79 +14,64 @@ jobs:
1714
runs-on: ${{ matrix.os }}
1815
env:
1916
OMP_NUM_THREADS: 1
20-
2117
defaults:
2218
run:
2319
shell: bash -l {0} # for conda.
24-
2520
strategy:
26-
fail-fast: false
2721
matrix:
28-
os: [ ubuntu-latest, macos-latest ]
29-
numpy-version: [ '1.20' ]
30-
python-version: [ 3.8 ]
31-
include:
32-
- {deploy: true, os: ubuntu-latest, numpy-version: '1.20', python-version: 3.8 }
22+
os: [ubuntu-latest]
23+
python-version: [ '3.8', '3.10', '3.13' ]
3324
steps:
34-
3525
- name: Checkout source code
36-
uses: actions/checkout@v2
37-
38-
- name: Cache conda
39-
uses: actions/cache@v1
40-
env:
41-
# Increase this value to reset cache.
42-
CACHE_NUMBER: 0
43-
with:
44-
path: ~/conda_pkgs_dir
45-
key:
46-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}
47-
48-
- name: Setup Conda Environment
49-
uses: conda-incubator/[email protected]
50-
with:
51-
activate-environment: test
52-
channels: bccp
53-
show-channel-urls: true
54-
use-only-tar-bz2: false
55-
auto-update-conda: true
56-
python-version: ${{ matrix.python-version }}
57-
26+
uses: actions/checkout@v4
5827
- name: Setup test env
5928
run: |
60-
conda install -q -y \
61-
python=${{ matrix.python-version }} \
62-
numpy=${{ matrix.numpy-version }} \
63-
nose cython mpi4py \
64-
--file requirements.txt \
65-
--file requirements-extras.txt
66-
67-
# submitting coverage to coveralls
68-
pip install coveralls
69-
# install nbodykit itself
70-
pip install .[extras]
71-
conda list --explicit
72-
73-
- name: Install Compilers
29+
sudo apt update
30+
sudo apt install -y build-essential libopenmpi-dev openmpi-bin python3-numpy python3-scipy python3-mpi4py python3-sympy python3-astropy cython3 python3-dask python3-pandas python3-h5py libgsl-dev
31+
python -m pip install --upgrade pip
32+
pip install build pytest pytest-mpi
33+
pip install -r requirements.txt
34+
pip install -r requirements-extras.txt
35+
- name: Build
7436
run: |
75-
if [ "$RUNNER_OS" == "macOS" ]; then
76-
conda install -q -y clang_osx-64 clangxx_osx-64 gfortran_osx-64
77-
else
78-
conda install -q -y gcc_linux-64 gxx_linux-64 gfortran_linux-64
79-
fi
80-
37+
python -m build
38+
- name: Install
39+
run: |
40+
python -m pip install --user dist/nbodykit*whl
41+
- name: Build in place for tests
42+
run: |
43+
python setup.py build_ext --inplace
8144
- name: Python Unit tests
8245
run: |
83-
python run-tests.py -v --mpirun='mpirun -n 4' --with-coverage
84-
85-
- name: Build Python sdist
86-
if: startsWith(github.ref, 'refs/tags') && matrix.deploy
46+
cd nbodykit
47+
python -m pytest --with-mpi
48+
- name: MPI Unit tests
8749
run: |
88-
bash check_tag.sh "${GITHUB_REF##*/}" bigfile/version.py
89-
python setup.py sdist
50+
cd nbodykit
51+
mpirun -n 4 --oversubscribe python -m pytest --with-mpi
52+
- name: Store the distribution packages
53+
if: matrix.os == 'ubuntu-latest' && matrix.python-version == '3.10' #Only do this once!
54+
uses: actions/upload-artifact@v4
55+
with:
56+
name: python-package-distributions
57+
path: dist/nbodykit*.tar.gz
9058

91-
- name: Publish distribution to PyPI
92-
uses: pypa/gh-action-pypi-publish@master
93-
if: startsWith(github.ref, 'refs/tags') && matrix.deploy
59+
publish-to-pypi:
60+
name: Publish to PyPI
61+
if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes
62+
needs:
63+
- build
64+
runs-on: ubuntu-latest
65+
environment:
66+
name: pypi
67+
url: https://pypi.org/p/nbodykit
68+
permissions:
69+
id-token: write # IMPORTANT: mandatory for trusted publishing
70+
steps:
71+
- name: Download the source dist
72+
uses: actions/download-artifact@v4
9473
with:
95-
password: ${{ secrets.PYPI_SECRET }}
74+
name: python-package-distributions
75+
path: dist/
76+
- name: Publish distribution to PyPI
77+
uses: pypa/gh-action-pypi-publish@release/v1

docs/environment.yml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,11 @@ channels:
1111
- astropy
1212

1313
dependencies:
14-
- python=3.6
14+
- python
1515
- numpy
1616
- scipy
1717
- astropy
1818
- mpi4py
19-
- runtests
2019
- pmesh
2120
- kdcount
2221
- mpsort

nbodykit/__init__.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,19 @@
22

33
from mpi4py import MPI
44

5+
from contextlib import contextmanager
6+
import logging
7+
58
import dask
9+
import dask.cache
610

711
import warnings
812

913

1014
try:
1115
# prevents too many threads exception when using MPI and dask
1216
# by disabling threading in dask.
13-
dask.config.set(scheduler='synchronous')
17+
dask.config.set(scheduler='synchronous')
1418
except:
1519
# deprecated since 0.18.1
1620
dask.set_options(get=dask.get)
@@ -20,9 +24,6 @@
2024
_global_options['dask_chunk_size'] = 100000
2125
_global_options['paint_chunk_size'] = 1024 * 1024 * 4
2226

23-
from contextlib import contextmanager
24-
import logging
25-
2627
def _unpickle(name):
2728
return getattr(MPI, name)
2829

@@ -97,7 +98,7 @@ def use_mpi(comm=None):
9798
This function shall only be used before any nbodykit object is created.
9899
99100
"""
100-
dask.config.set(scheduler='synchronous')
101+
dask.config.set(scheduler='synchronous')
101102
if comm is None:
102103
comm = MPI.COMM_WORLD
103104
CurrentMPIComm._stack[-1] = comm
@@ -139,7 +140,7 @@ def enter(cls, comm):
139140
with CurrentMPIComm.enter(comm):
140141
cat = UniformCatalog(...)
141142
142-
is identical to
143+
is identical to
143144
144145
.. code:: python
145146
@@ -189,7 +190,6 @@ def set(cls, comm):
189190
cls._stack[-1] = comm
190191
cls._stack[-1].barrier()
191192

192-
import dask.cache
193193
class GlobalCache(dask.cache.Cache):
194194
"""
195195
A Cache object.
@@ -270,7 +270,6 @@ def setup_logging(log_level="info"):
270270
#
271271
# [ 000000.43 ] 0: 06-28 14:49 measurestats INFO Nproc = [2, 1, 1]
272272
# [ 000000.43 ] 0: 06-28 14:49 measurestats INFO Rmax = 120
273-
import logging
274273

275274
levels = {
276275
"info" : logging.INFO,
@@ -279,7 +278,7 @@ def setup_logging(log_level="info"):
279278
}
280279

281280
import time
282-
logger = logging.getLogger();
281+
logger = logging.getLogger()
283282
t0 = time.time()
284283

285284
rank = MPI.COMM_WORLD.rank

nbodykit/algorithms/convpower/tests/test_catalog.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
1-
from runtests.mpi import MPITest
21
from nbodykit.lab import *
32
from nbodykit import setup_logging
43

54
from numpy.testing import assert_allclose
65
import pytest
6+
from mpi4py import MPI
77

88
setup_logging()
99

10-
@MPITest([1])
10+
@pytest.mark.parametrize("comm", [MPI.COMM_WORLD,])
11+
@pytest.mark.mpi
1112
def test_missing_columns(comm):
1213

1314
# create FKP catalog
@@ -17,7 +18,8 @@ def test_missing_columns(comm):
1718
with pytest.raises(ValueError):
1819
cat = FKPCatalog(source1, source2, BoxSize=512.0, BoxPad=0.02)
1920

20-
@MPITest([1])
21+
@pytest.mark.parametrize("comm", [MPI.COMM_WORLD,])
22+
@pytest.mark.mpi
2123
def test_boxsize(comm):
2224

2325
# data and randoms

nbodykit/algorithms/convpower/tests/test_catalogmesh.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
from runtests.mpi import MPITest
21
from nbodykit.lab import *
32
from nbodykit import setup_logging
43

@@ -7,7 +6,12 @@
76

87
setup_logging()
98

10-
@MPITest([1, 4])
9+
from mpi4py import MPI
10+
11+
setup_logging()
12+
13+
@pytest.mark.parametrize("comm", [MPI.COMM_WORLD,])
14+
@pytest.mark.mpi
1115
def test_paint(comm):
1216

1317
NBAR1 = 3e-5; WEIGHT1 = 1.05

nbodykit/algorithms/fftpower.py

Lines changed: 17 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from nbodykit.meshtools import SlabIterator
88
from nbodykit.base.catalog import CatalogSourceBase
99
from nbodykit.base.mesh import MeshSource
10+
from mpi4py import MPI
1011

1112
class FFTBase(object):
1213
"""
@@ -143,7 +144,7 @@ def _compute_3d_power(self, first, second):
143144

144145

145146
class FFTPower(FFTBase):
146-
"""
147+
r"""
147148
Algorithm to compute the 1d or 2d power spectrum and/or multipoles
148149
in a periodic box, using a Fast Fourier Transform (FFT).
149150
@@ -227,10 +228,10 @@ def __init__(self, first, mode, Nmesh=None, BoxSize=None, second=None,
227228
self.attrs.update(self.power.attrs)
228229

229230
def run(self):
230-
"""
231+
r"""
231232
Compute the power spectrum in a periodic box, using FFTs.
232233
233-
Returns
234+
Returns
234235
-------
235236
power : :class:`~nbodykit.binned_statistic.BinnedStatistic`
236237
a BinnedStatistic object that holds the measured :math:`P(k)` or
@@ -735,24 +736,27 @@ def _find_unique_edges(x, x0, xmax, comm):
735736
736737
Returns edges and the true centers
737738
"""
738-
def find_unique_local(x, x0):
739-
fx2 = 0
740-
for xi, x0i in zip(x, x0):
741-
fx2 = fx2 + xi ** 2
739+
fx2 = 0
740+
for xi in x:
741+
fx2 = fx2 + xi ** 2
742742

743+
def find_unique_local(fx2, binning):
744+
"""Find unique values in a floating point array by making integer bins"""
743745
fx2 = numpy.ravel(fx2)
744-
ix2 = numpy.int64(fx2 / (x0.min() * 0.5) ** 2 + 0.5)
746+
ix2 = numpy.int64(fx2 / binning + 0.5)
745747
ix2, ind = numpy.unique(ix2, return_index=True)
746748
fx2 = fx2[ind]
747-
return fx2 ** 0.5
749+
return fx2
748750

749-
fx = find_unique_local(x, x0)
751+
binning = (x0.min() * 0.05)**2
752+
fx = find_unique_local(fx2, binning)**0.5
750753

751754
fx = fx[fx < xmax]
752755
fx = numpy.concatenate(comm.allgather(fx), axis=0)
753-
# may have duplicates after allgather
754-
fx = numpy.unique(fx)
755-
fx.sort()
756+
# May have duplicates after allgather: need to re-bin.
757+
# We want to be picky about duplicates, so use a small bin size
758+
minx0 = comm.allreduce(x0.min(), op=MPI.MIN)
759+
fx = find_unique_local(fx, minx0 * 1e-5)
756760

757761
# now make some reasonable bins.
758762
width = numpy.diff(fx)

nbodykit/algorithms/fof.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -230,8 +230,8 @@ def _assign_labels(minid, comm, thresh):
230230
# assign origind for recovery of ordering, since
231231
# we need to work in sorted fofid
232232
data['fofid'] = minid
233-
data['origind'] = numpy.arange(len(data), dtype='u4')
234-
data['origind'] += numpy.sum(comm.allgather(len(data))[:comm.rank], dtype='intp') \
233+
data['origind'] = numpy.arange(len(data), dtype='u8')
234+
data['origind'] += numpy.sum(comm.allgather(int(len(data)))[:comm.rank], dtype='u8')
235235

236236
data = DistributedArray(data, comm)
237237

nbodykit/algorithms/kdtree.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from nbodykit.utils import split_size_3d
66
from pmesh.domain import GridND
7-
from scipy.spatial.ckdtree import cKDTree as KDTree
7+
from scipy.spatial import cKDTree as KDTree
88

99
class KDDensity(object):
1010
"""

0 commit comments

Comments
 (0)