|
23 | 23 | from xarray.core import indexing
|
24 | 24 | from xarray.core.common import contains_cftime_datetimes, is_np_datetime_like
|
25 | 25 | from xarray.core.formatting import first_n_items, format_timestamp, last_item
|
| 26 | +from xarray.core.pdcompat import nanosecond_precision_timestamp |
26 | 27 | from xarray.core.pycompat import is_duck_dask_array
|
27 | 28 | from xarray.core.variable import Variable
|
28 | 29 |
|
@@ -224,7 +225,9 @@ def _decode_datetime_with_pandas(
|
224 | 225 | delta, ref_date = _unpack_netcdf_time_units(units)
|
225 | 226 | delta = _netcdf_to_numpy_timeunit(delta)
|
226 | 227 | try:
|
227 |
| - ref_date = pd.Timestamp(ref_date) |
| 228 | + # TODO: the strict enforcement of nanosecond precision Timestamps can be |
| 229 | + # relaxed when addressing GitHub issue #7493. |
| 230 | + ref_date = nanosecond_precision_timestamp(ref_date) |
228 | 231 | except ValueError:
|
229 | 232 | # ValueError is raised by pd.Timestamp for non-ISO timestamp
|
230 | 233 | # strings, in which case we fall back to using cftime
|
@@ -391,7 +394,9 @@ def infer_datetime_units(dates) -> str:
|
391 | 394 | dates = to_datetime_unboxed(dates)
|
392 | 395 | dates = dates[pd.notnull(dates)]
|
393 | 396 | reference_date = dates[0] if len(dates) > 0 else "1970-01-01"
|
394 |
| - reference_date = pd.Timestamp(reference_date) |
| 397 | + # TODO: the strict enforcement of nanosecond precision Timestamps can be |
| 398 | + # relaxed when addressing GitHub issue #7493. |
| 399 | + reference_date = nanosecond_precision_timestamp(reference_date) |
395 | 400 | else:
|
396 | 401 | reference_date = dates[0] if len(dates) > 0 else "1970-01-01"
|
397 | 402 | reference_date = format_cftime_datetime(reference_date)
|
@@ -432,14 +437,16 @@ def cftime_to_nptime(times, raise_on_invalid: bool = True) -> np.ndarray:
|
432 | 437 | If raise_on_invalid is True (default), invalid dates trigger a ValueError.
|
433 | 438 | Otherwise, the invalid element is replaced by np.NaT."""
|
434 | 439 | times = np.asarray(times)
|
| 440 | + # TODO: the strict enforcement of nanosecond precision datetime values can |
| 441 | + # be relaxed when addressing GitHub issue #7493. |
435 | 442 | new = np.empty(times.shape, dtype="M8[ns]")
|
436 | 443 | for i, t in np.ndenumerate(times):
|
437 | 444 | try:
|
438 | 445 | # Use pandas.Timestamp in place of datetime.datetime, because
|
439 | 446 | # NumPy casts it safely it np.datetime64[ns] for dates outside
|
440 | 447 | # 1678 to 2262 (this is not currently the case for
|
441 | 448 | # datetime.datetime).
|
442 |
| - dt = pd.Timestamp( |
| 449 | + dt = nanosecond_precision_timestamp( |
443 | 450 | t.year, t.month, t.day, t.hour, t.minute, t.second, t.microsecond
|
444 | 451 | )
|
445 | 452 | except ValueError as e:
|
@@ -498,6 +505,10 @@ def convert_time_or_go_back(date, date_type):
|
498 | 505 |
|
499 | 506 | This is meant to convert end-of-month dates into a new calendar.
|
500 | 507 | """
|
| 508 | + # TODO: the strict enforcement of nanosecond precision Timestamps can be |
| 509 | + # relaxed when addressing GitHub issue #7493. |
| 510 | + if date_type == pd.Timestamp: |
| 511 | + date_type = nanosecond_precision_timestamp |
501 | 512 | try:
|
502 | 513 | return date_type(
|
503 | 514 | date.year,
|
@@ -641,7 +652,10 @@ def encode_cf_datetime(
|
641 | 652 |
|
642 | 653 | delta_units = _netcdf_to_numpy_timeunit(delta)
|
643 | 654 | time_delta = np.timedelta64(1, delta_units).astype("timedelta64[ns]")
|
644 |
| - ref_date = pd.Timestamp(_ref_date) |
| 655 | + |
| 656 | + # TODO: the strict enforcement of nanosecond precision Timestamps can be |
| 657 | + # relaxed when addressing GitHub issue #7493. |
| 658 | + ref_date = nanosecond_precision_timestamp(_ref_date) |
645 | 659 |
|
646 | 660 | # If the ref_date Timestamp is timezone-aware, convert to UTC and
|
647 | 661 | # make it timezone-naive (GH 2649).
|
|
0 commit comments