Skip to content

Commit

Permalink
MNT: remove superfluous code and add more tests, add fsspec to dev re…
Browse files Browse the repository at this point in the history
…quirements
  • Loading branch information
kmuehlbauer committed Feb 27, 2024
1 parent de57922 commit cb3ab83
Show file tree
Hide file tree
Showing 6 changed files with 91 additions and 53 deletions.
1 change: 1 addition & 0 deletions ci/unittests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ dependencies:
- cmweather
- coverage
- dask
- fsspec
- h5netcdf
- h5py
- lat_lon_parser
Expand Down
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
cmweather
dask
h5netcdf
h5py
h5netcdf >= 1.0.0
h5py >= 3.0.0
lat_lon_parser
netCDF4
numpy
Expand Down
1 change: 1 addition & 0 deletions requirements_dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ twine
pytest
black
isort
fsspec
108 changes: 81 additions & 27 deletions tests/io/test_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@

"""Tests for `io` module."""

import io
import tempfile

import datatree
import fsspec
import h5py
import numpy as np
import pytest
Expand Down Expand Up @@ -98,6 +100,16 @@ def test_open_cfradial1_dataset(cfradial1_file):
assert ds.sweep_number == 8


@pytest.mark.parametrize("sweep", ["sweep_0", 0, [0, 1], ["sweep_0", "sweep_1"]])
def test_open_odim_datatree_sweep(odim_file, sweep):
dtree = open_odim_datatree(odim_file, sweep=sweep)
if isinstance(sweep, (str, int)):
lswp = len([sweep])
else:
lswp = len(sweep)
assert len(dtree.groups[1:]) == lswp


def test_open_odim_datatree(odim_file):
dtree = open_odim_datatree(odim_file)

Expand Down Expand Up @@ -173,34 +185,50 @@ def test_open_odim_datatree(odim_file):
assert ds.sweep_number.values == int(grp[7:])


def test_open_odim_dataset(odim_file):
@pytest.mark.parametrize("first_dim", ["auto", "time"])
@pytest.mark.parametrize("fix_second_angle", [False, True])
def test_open_odim_dataset(odim_file, first_dim, fix_second_angle):
# open first sweep group
ds = xr.open_dataset(odim_file, group="sweep_0", engine="odim")
assert dict(ds.sizes) == {"azimuth": 360, "range": 1200}
ds = xr.open_dataset(
odim_file,
group="sweep_0",
engine="odim",
first_dim=first_dim,
fix_second_angle=fix_second_angle,
)
dim0 = "time" if first_dim == "time" else "azimuth"
assert dict(ds.sizes) == {dim0: 360, "range": 1200}
assert set(ds.data_vars) & (
sweep_dataset_vars | non_standard_sweep_dataset_vars
) == {"WRADH", "VRADH", "PHIDP", "DBZH", "RHOHV", "KDP", "TH", "ZDR"}
assert ds.sweep_number == 0

# open last sweep group
ds = xr.open_dataset(odim_file, group="sweep_11", engine="odim")
assert dict(ds.sizes) == {"azimuth": 360, "range": 280}
assert set(ds.data_vars) & (
sweep_dataset_vars | non_standard_sweep_dataset_vars
) == {"VRADH", "KDP", "WRADH", "TH", "RHOHV", "PHIDP", "ZDR", "DBZH"}
assert ds.sweep_number == 11

# open last sweep group, auto
ds = xr.open_dataset(
odim_file,
group="sweep_11",
engine="odim",
backend_kwargs=dict(first_dim="time"),
first_dim=first_dim,
fix_second_angle=fix_second_angle,
)
assert dict(ds.sizes) == {"time": 360, "range": 280}
assert dict(ds.sizes) == {dim0: 360, "range": 280}
assert set(ds.data_vars) & (
sweep_dataset_vars | non_standard_sweep_dataset_vars
) == {"VRADH", "KDP", "WRADH", "TH", "RHOHV", "PHIDP", "ZDR", "DBZH"}
assert ds.sweep_number == 11


def test_open_odim_dataset_stream(odim_file):
with open(odim_file, mode="rb") as fhandle:
contents = io.BytesIO(fhandle.read())
xr.open_dataset(contents, group="sweep_0", engine="odim")


def test_open_odim_dataset_fsspec(odim_file):
with fsspec.open(odim_file, mode="rb") as fhandle:
xr.open_dataset(fhandle, group="sweep_0", engine="odim")


def test_open_odim_store(odim_file):
store = xradar.io.backends.odim.OdimStore.open(odim_file, group="sweep_0")
assert store.substore[0].root.a1gate == 86
Expand All @@ -220,6 +248,16 @@ def test_open_odim_store(odim_file):
)


@pytest.mark.parametrize("sweep", ["sweep_0", 0, [0, 1], ["sweep_0", "sweep_1"]])
def test_open_gamic_datatree_sweep(gamic_file, sweep):
dtree = open_gamic_datatree(gamic_file, sweep=sweep)
if isinstance(sweep, (str, int)):
lswp = len([sweep])
else:
lswp = len(sweep)
assert len(dtree.groups[1:]) == lswp


def test_open_gamic_datatree(gamic_file):
dtree = open_gamic_datatree(gamic_file)

Expand Down Expand Up @@ -300,10 +338,19 @@ def test_open_gamic_datatree(gamic_file):
assert ds.sweep_number == i


def test_open_gamic_dataset(gamic_file):
@pytest.mark.parametrize("first_dim", ["auto", "time"])
@pytest.mark.parametrize("fix_second_angle", [False, True])
def test_open_gamic_dataset(gamic_file, first_dim, fix_second_angle):
# open first sweep group
ds = xr.open_dataset(gamic_file, group="sweep_0", engine="gamic")
assert dict(ds.sizes) == {"azimuth": 361, "range": 360}
ds = xr.open_dataset(
gamic_file,
group="sweep_0",
engine="gamic",
first_dim=first_dim,
fix_second_angle=fix_second_angle,
)
dim0 = "time" if first_dim == "time" else "azimuth"
assert dict(ds.sizes) == {dim0: 361, "range": 360}
assert set(ds.data_vars) & (
sweep_dataset_vars | non_standard_sweep_dataset_vars
) == {
Expand All @@ -323,8 +370,14 @@ def test_open_gamic_dataset(gamic_file):
assert ds.sweep_number == 0

# open last sweep group
ds = xr.open_dataset(gamic_file, group="sweep_9", engine="gamic")
assert dict(ds.sizes) == {"azimuth": 360, "range": 1000}
ds = xr.open_dataset(
gamic_file,
group="sweep_9",
engine="gamic",
first_dim=first_dim,
fix_second_angle=fix_second_angle,
)
assert dict(ds.sizes) == {dim0: 360, "range": 1000}
assert set(ds.data_vars) & (
sweep_dataset_vars | non_standard_sweep_dataset_vars
) == {
Expand All @@ -343,15 +396,16 @@ def test_open_gamic_dataset(gamic_file):
}
assert ds.sweep_number == 9

# open last sweep group, auto
ds = xr.open_dataset(
gamic_file,
group="sweep_9",
engine="gamic",
backend_kwargs=dict(first_dim="time"),
)
assert dict(ds.sizes) == {"time": 360, "range": 1000}
assert ds.sweep_number == 9

def test_open_gamic_dataset_stream(gamic_file):
with open(gamic_file, mode="rb") as fhandle:
contents = io.BytesIO(fhandle.read())
xr.open_dataset(contents, group="sweep_9", engine="gamic")


def test_open_gamic_dataset_fsspec(gamic_file):
with fsspec.open(gamic_file, mode="rb") as fhandle:
xr.open_dataset(fhandle, group="sweep_9", engine="gamic")


def test_open_gamic_store(gamic_file):
Expand Down
16 changes: 4 additions & 12 deletions xradar/io/backends/gamic.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import numpy as np
import xarray as xr
from datatree import DataTree
from packaging.version import Version
from xarray.backends.common import (
AbstractDataStore,
BackendEntrypoint,
Expand Down Expand Up @@ -278,18 +277,11 @@ def open(
raise ValueError("invalid format for h5netcdf backend")

kwargs = {"invalid_netcdf": invalid_netcdf}

if phony_dims is not None:
if Version(h5netcdf.__version__) >= Version("0.8.0"):
kwargs["phony_dims"] = phony_dims
else:
raise ValueError(
"h5netcdf backend keyword argument 'phony_dims' needs "
"h5netcdf >= 0.8.0."
)
if Version(h5netcdf.__version__) >= Version("0.10.0") and Version(
h5netcdf.core.h5py.__version__
) >= Version("3.0.0"):
kwargs["decode_vlen_strings"] = decode_vlen_strings
kwargs["phony_dims"] = phony_dims

kwargs["decode_vlen_strings"] = decode_vlen_strings

if lock is None:
if util.has_import("dask"):
Expand Down
14 changes: 2 additions & 12 deletions xradar/io/backends/odim.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import numpy as np
import xarray as xr
from datatree import DataTree
from packaging.version import Version
from xarray.backends.common import (
AbstractDataStore,
BackendArray,
Expand Down Expand Up @@ -674,17 +673,8 @@ def open(

kwargs = {"invalid_netcdf": invalid_netcdf}
if phony_dims is not None:
if Version(h5netcdf.__version__) >= Version("0.8.0"):
kwargs["phony_dims"] = phony_dims
else:
raise ValueError(
"h5netcdf backend keyword argument 'phony_dims' needs "
"h5netcdf >= 0.8.0."
)
if Version(h5netcdf.__version__) >= Version("0.10.0") and Version(
h5netcdf.core.h5py.__version__
) >= Version("3.0.0"):
kwargs["decode_vlen_strings"] = decode_vlen_strings
kwargs["phony_dims"] = phony_dims
kwargs["decode_vlen_strings"] = decode_vlen_strings

if lock is None:
if util.has_import("dask"):
Expand Down

0 comments on commit cb3ab83

Please sign in to comment.