Skip to content

Commit 4ca0860

Browse files
committed
Merge pull request #376 from shoyer/fallback-time-units
BUG: Fix failing to determine time units
2 parents bd911aa + dee4488 commit 4ca0860

File tree

3 files changed

+21
-18
lines changed

3 files changed

+21
-18
lines changed

doc/whats-new.rst

+4-3
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,6 @@ Enhancements
7272
- :py:func:`~xray.open_dataset` and :py:meth:`~xray.Dataset.to_netcdf` now
7373
accept an ``engine`` argument to explicitly select which underlying library
7474
(netcdf4 or scipy) is used for reading/writing a netCDF file.
75-
- New documentation section on :ref:`combining multiple files`.
7675

7776
.. _same as pandas: http://pandas.pydata.org/pandas-docs/stable/timeseries.html#up-and-downsampling
7877

@@ -81,13 +80,15 @@ Bug fixes
8180

8281
- Fixed a bug where data netCDF variables read from disk with
8382
``engine='scipy'`` could still be associated with the file on disk, even
84-
after closing the file (`issue`:341:). This manifested itself in warnings
83+
after closing the file (:issue:`341`). This manifested itself in warnings
8584
about mmapped arrays and segmentation faults (if the data was accessed).
8685
- Silenced spurious warnings about all-NaN slices when using nan-aware
87-
aggregation methods (`issue`:344:).
86+
aggregation methods (:issue:`344`).
8887
- Dataset aggregations with ``keep_attrs=True`` now preserve attributes on
8988
data variables, not just the dataset itself.
9089
- Tests for xray now pass when run on Windows. DOUBLE CHECK THIS.
90+
- Fixed a regression in v0.4 where saving to netCDF could fail with the error
91+
``ValueError: could not automatically determine time units``.
9192

9293
v0.4 (2 March, 2015)
9394
--------------------

xray/conventions.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def _infer_time_units_from_diff(unique_timedeltas):
157157
diffs = unique_timedeltas / unit_delta
158158
if np.all(diffs == diffs.astype(int)):
159159
return time_unit
160-
raise ValueError('could not automatically determine time units')
160+
return 'seconds'
161161

162162

163163
def infer_datetime_units(dates):
@@ -226,9 +226,7 @@ def encode_cf_datetime(dates, units=None, calendar=None):
226226
"""Given an array of datetime objects, returns the tuple `(num, units,
227227
calendar)` suitable for a CF complient time variable.
228228
229-
Unlike encode_cf_datetime, this function does not (yet) speedup encoding
230-
of datetime64 arrays. However, unlike `date2num`, it can handle datetime64
231-
arrays.
229+
Unlike `date2num`, this function can handle datetime64 arrays.
232230
233231
See also
234232
--------

xray/test/test_conventions.py

+15-11
Original file line numberDiff line numberDiff line change
@@ -107,24 +107,25 @@ class TestDatetime(TestCase):
107107
def test_cf_datetime(self):
108108
import netCDF4 as nc4
109109
for num_dates, units in [
110-
(np.arange(100), 'days since 2000-01-01'),
111-
(np.arange(100).reshape(10, 10), 'days since 2000-01-01'),
112-
(12300 + np.arange(50), 'hours since 1680-01-01 00:00:00'),
110+
(np.arange(10), 'days since 2000-01-01'),
111+
(np.arange(10).reshape(2, 5), 'days since 2000-01-01'),
112+
(12300 + np.arange(5), 'hours since 1680-01-01 00:00:00'),
113113
# here we add a couple minor formatting errors to test
114114
# the robustness of the parsing algorithm.
115-
(12300 + np.arange(50), 'hour since 1680-01-01 00:00:00'),
116-
(12300 + np.arange(50), u'Hour since 1680-01-01 00:00:00'),
117-
(12300 + np.arange(50), ' Hour since 1680-01-01 00:00:00 '),
115+
(12300 + np.arange(5), 'hour since 1680-01-01 00:00:00'),
116+
(12300 + np.arange(5), u'Hour since 1680-01-01 00:00:00'),
117+
(12300 + np.arange(5), ' Hour since 1680-01-01 00:00:00 '),
118118
(10, 'days since 2000-01-01'),
119119
([10], 'daYs since 2000-01-01'),
120120
([[10]], 'days since 2000-01-01'),
121121
([10, 10], 'days since 2000-01-01'),
122122
(0, 'days since 1000-01-01'),
123123
([0], 'days since 1000-01-01'),
124124
([[0]], 'days since 1000-01-01'),
125-
(np.arange(20), 'days since 1000-01-01'),
126-
(np.arange(0, 100000, 10000), 'days since 1900-01-01'),
125+
(np.arange(2), 'days since 1000-01-01'),
126+
(np.arange(0, 100000, 20000), 'days since 1900-01-01'),
127127
(17093352.0, 'hours since 1-1-1 00:00:0.0'),
128+
([0.5, 1.5], 'hours since 1900-01-01T00:00:00'),
128129
]:
129130
for calendar in ['standard', 'gregorian', 'proleptic_gregorian']:
130131
expected = _ensure_naive_tz(nc4.num2date(num_dates, units, calendar))
@@ -152,15 +153,15 @@ def test_cf_datetime(self):
152153
# units/encoding cannot be preserved in this case:
153154
# (Pdb) pd.to_datetime('1-1-1 00:00:0.0')
154155
# Timestamp('2001-01-01 00:00:00')
155-
self.assertArrayEqual(num_dates, np.around(encoded))
156+
self.assertArrayEqual(num_dates, np.around(encoded, 1))
156157
if (hasattr(num_dates, 'ndim') and num_dates.ndim == 1
157158
and '1000' not in units):
158159
# verify that wrapping with a pandas.Index works
159160
# note that it *does not* currently work to even put
160161
# non-datetime64 compatible dates into a pandas.Index :(
161162
encoded, _, _ = conventions.encode_cf_datetime(
162163
pd.Index(actual), units, calendar)
163-
self.assertArrayEqual(num_dates, np.around(encoded))
164+
self.assertArrayEqual(num_dates, np.around(encoded, 1))
164165

165166
def test_decoded_cf_datetime_array(self):
166167
actual = conventions.DecodedCFDatetimeArray(
@@ -344,7 +345,10 @@ def test_infer_datetime_units(self):
344345
'1900-01-02 00:00:01'],
345346
'seconds since 1900-01-01 00:00:00'),
346347
(pd.to_datetime(['1900-01-01', '1900-01-02', 'NaT']),
347-
'days since 1900-01-01 00:00:00')]:
348+
'days since 1900-01-01 00:00:00'),
349+
(pd.to_datetime(['1900-01-01',
350+
'1900-01-02T00:00:00.005']),
351+
'seconds since 1900-01-01 00:00:00')]:
348352
self.assertEqual(expected, conventions.infer_datetime_units(dates))
349353

350354
def test_infer_timedelta_units(self):

0 commit comments

Comments
 (0)