Skip to content

Commit 289fd62

Browse files
Merge remote-tracking branch 'upstream/main' into inactive_maintainers_no_answer
2 parents 99ccc6e + 61d9976 commit 289fd62

File tree

11 files changed

+148
-46
lines changed

11 files changed

+148
-46
lines changed

ci/deps/actions-310-minimum_versions.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ dependencies:
1818
- pytest-xdist>=3.4.0
1919
- pytest-localserver>=0.8.1
2020
- pytest-qt>=4.4.0
21-
- boto3
21+
- boto3=1.37.3
2222

2323
# required dependencies
2424
- python-dateutil=2.8.2

ci/deps/actions-310.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ dependencies:
1616
- pytest-xdist>=3.4.0
1717
- pytest-localserver>=0.8.1
1818
- pytest-qt>=4.4.0
19-
- boto3
19+
- boto3=1.37.3
2020

2121
# required dependencies
2222
- python-dateutil

ci/deps/actions-311-downstream_compat.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ dependencies:
1717
- pytest-xdist>=3.4.0
1818
- pytest-localserver>=0.8.1
1919
- pytest-qt>=4.4.0
20-
- boto3
20+
- boto3=1.37.3
2121

2222
# required dependencies
2323
- python-dateutil

ci/deps/actions-311.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ dependencies:
1616
- pytest-xdist>=3.4.0
1717
- pytest-localserver>=0.8.1
1818
- pytest-qt>=4.4.0
19-
- boto3
19+
- boto3=1.37.3
2020

2121
# required dependencies
2222
- python-dateutil

ci/deps/actions-312.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ dependencies:
1616
- pytest-xdist>=3.4.0
1717
- pytest-localserver>=0.8.1
1818
- pytest-qt>=4.4.0
19-
- boto3
19+
- boto3=1.37.3
2020

2121
# required dependencies
2222
- python-dateutil

ci/deps/actions-313.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ dependencies:
1616
- pytest-xdist>=3.4.0
1717
- pytest-localserver>=0.8.1
1818
- pytest-qt>=4.4.0
19-
- boto3
19+
- boto3=1.37.3
2020

2121
# required dependencies
2222
- python-dateutil

doc/source/whatsnew/v3.0.0.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ Other enhancements
6464
- :meth:`Series.nlargest` uses a 'stable' sort internally and will preserve original ordering.
6565
- :class:`ArrowDtype` now supports ``pyarrow.JsonType`` (:issue:`60958`)
6666
- :class:`DataFrameGroupBy` and :class:`SeriesGroupBy` methods ``sum``, ``mean``, ``median``, ``prod``, ``min``, ``max``, ``std``, ``var`` and ``sem`` now accept ``skipna`` parameter (:issue:`15675`)
67+
- :class:`Holiday` has gained the constructor argument and field ``exclude_dates`` to exclude specific datetimes from a custom holiday calendar (:issue:`54382`)
6768
- :class:`Rolling` and :class:`Expanding` now support ``nunique`` (:issue:`26958`)
6869
- :class:`Rolling` and :class:`Expanding` now support aggregations ``first`` and ``last`` (:issue:`33155`)
6970
- :func:`read_parquet` accepts ``to_pandas_kwargs`` which are forwarded to :meth:`pyarrow.Table.to_pandas` which enables passing additional keywords to customize the conversion to pandas, such as ``maps_as_pydicts`` to read the Parquet map data type as python dictionaries (:issue:`56842`)

pandas/tests/generic/test_to_xarray.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,7 @@ def test_to_xarray_index_types(self, index_flat, request):
9393
isinstance(index.dtype, StringDtype)
9494
and index.dtype.storage == "pyarrow"
9595
and Version(xarray.__version__) > Version("2024.9.0")
96+
and Version(xarray.__version__) < Version("2025.6.0")
9697
):
9798
request.applymarker(
9899
pytest.mark.xfail(

pandas/tests/tseries/holiday/test_holiday.py

Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -353,3 +353,104 @@ def test_holidays_with_timezone_specified_but_no_occurences():
353353
expected_results.index = expected_results.index.as_unit("ns")
354354

355355
tm.assert_equal(test_case, expected_results)
356+
357+
358+
def test_holiday_with_exclusion():
359+
# GH 54382
360+
start = Timestamp("2020-05-01")
361+
end = Timestamp("2025-05-31")
362+
exclude = DatetimeIndex([Timestamp("2022-05-30")]) # Queen's platinum Jubilee
363+
364+
queens_jubilee_uk_spring_bank_holiday: Holiday = Holiday(
365+
"Queen's Jubilee UK Spring Bank Holiday",
366+
month=5,
367+
day=31,
368+
offset=DateOffset(weekday=MO(-1)),
369+
exclude_dates=exclude,
370+
)
371+
372+
result = queens_jubilee_uk_spring_bank_holiday.dates(start, end)
373+
expected = DatetimeIndex(
374+
[
375+
Timestamp("2020-05-25"),
376+
Timestamp("2021-05-31"),
377+
Timestamp("2023-05-29"),
378+
Timestamp("2024-05-27"),
379+
Timestamp("2025-05-26"),
380+
],
381+
dtype="datetime64[ns]",
382+
)
383+
tm.assert_index_equal(result, expected)
384+
385+
386+
def test_holiday_with_multiple_exclusions():
387+
start = Timestamp("2025-01-01")
388+
end = Timestamp("2065-12-31")
389+
exclude = DatetimeIndex(
390+
[
391+
Timestamp("2025-01-01"),
392+
Timestamp("2042-01-01"),
393+
Timestamp("2061-01-01"),
394+
]
395+
) # Yakudoshi new year
396+
397+
yakudoshi_new_year: Holiday = Holiday(
398+
"Yakudoshi New Year", month=1, day=1, exclude_dates=exclude
399+
)
400+
401+
result = yakudoshi_new_year.dates(start, end)
402+
expected = DatetimeIndex(
403+
[
404+
Timestamp("2026-01-01"),
405+
Timestamp("2027-01-01"),
406+
Timestamp("2028-01-01"),
407+
Timestamp("2029-01-01"),
408+
Timestamp("2030-01-01"),
409+
Timestamp("2031-01-01"),
410+
Timestamp("2032-01-01"),
411+
Timestamp("2033-01-01"),
412+
Timestamp("2034-01-01"),
413+
Timestamp("2035-01-01"),
414+
Timestamp("2036-01-01"),
415+
Timestamp("2037-01-01"),
416+
Timestamp("2038-01-01"),
417+
Timestamp("2039-01-01"),
418+
Timestamp("2040-01-01"),
419+
Timestamp("2041-01-01"),
420+
Timestamp("2043-01-01"),
421+
Timestamp("2044-01-01"),
422+
Timestamp("2045-01-01"),
423+
Timestamp("2046-01-01"),
424+
Timestamp("2047-01-01"),
425+
Timestamp("2048-01-01"),
426+
Timestamp("2049-01-01"),
427+
Timestamp("2050-01-01"),
428+
Timestamp("2051-01-01"),
429+
Timestamp("2052-01-01"),
430+
Timestamp("2053-01-01"),
431+
Timestamp("2054-01-01"),
432+
Timestamp("2055-01-01"),
433+
Timestamp("2056-01-01"),
434+
Timestamp("2057-01-01"),
435+
Timestamp("2058-01-01"),
436+
Timestamp("2059-01-01"),
437+
Timestamp("2060-01-01"),
438+
Timestamp("2062-01-01"),
439+
Timestamp("2063-01-01"),
440+
Timestamp("2064-01-01"),
441+
Timestamp("2065-01-01"),
442+
],
443+
dtype="datetime64[ns]",
444+
)
445+
tm.assert_index_equal(result, expected)
446+
447+
448+
def test_exclude_date_value_error():
449+
msg = "exclude_dates must be None or of type DatetimeIndex."
450+
451+
with pytest.raises(ValueError, match=msg):
452+
exclude = [
453+
Timestamp("2025-06-10"),
454+
Timestamp("2026-06-10"),
455+
]
456+
Holiday("National Ice Tea Day", month=6, day=10, exclude_dates=exclude)

pandas/tseries/holiday.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -169,6 +169,7 @@ def __init__(
169169
start_date=None,
170170
end_date=None,
171171
days_of_week: tuple | None = None,
172+
exclude_dates: DatetimeIndex | None = None,
172173
) -> None:
173174
"""
174175
Parameters
@@ -193,6 +194,8 @@ class from pandas.tseries.offsets, default None
193194
days_of_week : tuple of int or dateutil.relativedelta weekday strs, default None
194195
Provide a tuple of days e.g (0,1,2,3,) for Monday Through Thursday
195196
Monday=0,..,Sunday=6
197+
exclude_dates : DatetimeIndex or default None
198+
Specific dates to exclude e.g. skipping a specific year's holiday
196199
197200
Examples
198201
--------
@@ -257,6 +260,9 @@ class from pandas.tseries.offsets, default None
257260
self.observance = observance
258261
assert days_of_week is None or type(days_of_week) == tuple
259262
self.days_of_week = days_of_week
263+
if not (exclude_dates is None or isinstance(exclude_dates, DatetimeIndex)):
264+
raise ValueError("exclude_dates must be None or of type DatetimeIndex.")
265+
self.exclude_dates = exclude_dates
260266

261267
def __repr__(self) -> str:
262268
info = ""
@@ -328,6 +334,9 @@ def dates(
328334
holiday_dates = holiday_dates[
329335
(holiday_dates >= filter_start_date) & (holiday_dates <= filter_end_date)
330336
]
337+
338+
if self.exclude_dates is not None:
339+
holiday_dates = holiday_dates.difference(self.exclude_dates)
331340
if return_name:
332341
return Series(self.name, index=holiday_dates)
333342
return holiday_dates

web/pandas_web.py

Lines changed: 30 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -100,20 +100,15 @@ def blog_add_posts(context):
100100
posts = []
101101
# posts from the file system
102102
if context["blog"]["posts_path"]:
103-
posts_path = os.path.join(
104-
context["source_path"], *context["blog"]["posts_path"].split("/")
105-
)
106-
for fname in os.listdir(posts_path):
107-
if fname.startswith("index."):
103+
posts_path = context["source_path"] / context["blog"]["posts_path"]
104+
for fname in posts_path.iterdir():
105+
if fname.name.startswith("index."):
108106
continue
109-
link = (
110-
f"/{context['blog']['posts_path']}"
111-
f"/{os.path.splitext(fname)[0]}.html"
112-
)
107+
link = f"/{context['blog']['posts_path']}/{fname.stem}.html"
113108
md = markdown.Markdown(
114109
extensions=context["main"]["markdown_extensions"]
115110
)
116-
with open(os.path.join(posts_path, fname), encoding="utf-8") as f:
111+
with fname.open(encoding="utf-8") as f:
117112
html = md.convert(f.read())
118113
title = md.Meta["title"][0]
119114
summary = re.sub(tag_expr, "", html)
@@ -386,15 +381,15 @@ def get_callable(obj_as_str: str) -> object:
386381
return obj
387382

388383

389-
def get_context(config_fname: str, **kwargs):
384+
def get_context(config_fname: pathlib.Path, **kwargs):
390385
"""
391386
Load the config yaml as the base context, and enrich it with the
392387
information added by the context preprocessors defined in the file.
393388
"""
394-
with open(config_fname, encoding="utf-8") as f:
389+
with config_fname.open(encoding="utf-8") as f:
395390
context = yaml.safe_load(f)
396391

397-
context["source_path"] = os.path.dirname(config_fname)
392+
context["source_path"] = config_fname.parent
398393
context.update(kwargs)
399394

400395
preprocessors = (
@@ -409,14 +404,13 @@ def get_context(config_fname: str, **kwargs):
409404
return context
410405

411406

412-
def get_source_files(source_path: str) -> typing.Generator[str, None, None]:
407+
def get_source_files(source_path: pathlib.Path) -> typing.Generator[str, None, None]:
413408
"""
414409
Generate the list of files present in the source directory.
415410
"""
416-
for root, dirs, fnames in os.walk(source_path):
417-
root_rel_path = os.path.relpath(root, source_path)
418-
for fname in fnames:
419-
yield os.path.join(root_rel_path, fname)
411+
for path in source_path.rglob("*"):
412+
if path.is_file():
413+
yield path.relative_to(source_path)
420414

421415

422416
def extend_base_template(content: str, base_template: str) -> str:
@@ -432,26 +426,27 @@ def extend_base_template(content: str, base_template: str) -> str:
432426

433427

434428
def main(
435-
source_path: str,
436-
target_path: str,
429+
source_path: pathlib.Path,
430+
target_path: pathlib.Path,
437431
) -> int:
438432
"""
439433
Copy every file in the source directory to the target directory.
440434
441435
For ``.md`` and ``.html`` files, render them with the context
442436
before copying them. ``.md`` files are transformed to HTML.
443437
"""
438+
444439
# Sanity check: validate that versions.json is valid JSON
445-
versions_path = os.path.join(source_path, "versions.json")
446-
with open(versions_path, encoding="utf-8") as f:
440+
versions_path = source_path / "versions.json"
441+
with versions_path.open(encoding="utf-8") as f:
447442
try:
448443
json.load(f)
449444
except json.JSONDecodeError as e:
450445
raise RuntimeError(
451446
f"Invalid versions.json: {e}. Ensure it is valid JSON."
452447
) from e
453448

454-
config_fname = os.path.join(source_path, "config.yml")
449+
config_fname = source_path / "config.yml"
455450

456451
shutil.rmtree(target_path, ignore_errors=True)
457452
os.makedirs(target_path, exist_ok=True)
@@ -460,23 +455,22 @@ def main(
460455
context = get_context(config_fname, target_path=target_path)
461456
sys.stderr.write("Context generated\n")
462457

463-
templates_path = os.path.join(source_path, context["main"]["templates_path"])
458+
templates_path = source_path / context["main"]["templates_path"]
464459
jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_path))
465460

466461
for fname in get_source_files(source_path):
467-
if os.path.normpath(fname) in context["main"]["ignore"]:
462+
if fname.as_posix() in context["main"]["ignore"]:
468463
continue
469-
470464
sys.stderr.write(f"Processing {fname}\n")
471-
dirname = os.path.dirname(fname)
472-
os.makedirs(os.path.join(target_path, dirname), exist_ok=True)
465+
dirname = fname.parent
466+
(target_path / dirname).mkdir(parents=True, exist_ok=True)
473467

474-
extension = os.path.splitext(fname)[-1]
468+
extension = fname.suffix
475469
if extension in (".html", ".md"):
476-
with open(os.path.join(source_path, fname), encoding="utf-8") as f:
470+
with (source_path / fname).open(encoding="utf-8") as f:
477471
content = f.read()
478472
if extension == ".md":
479-
if "pdeps/" in fname:
473+
if len(fname.parts) > 1 and fname.parts[1] == "pdeps":
480474
from markdown.extensions.toc import TocExtension
481475

482476
body = markdown.markdown(
@@ -503,17 +497,13 @@ def main(
503497
# Python-Markdown doesn't let us config table attributes by hand
504498
body = body.replace("<table>", '<table class="table table-bordered">')
505499
content = extend_base_template(body, context["main"]["base_template"])
506-
context["base_url"] = "".join(["../"] * os.path.normpath(fname).count("/"))
500+
context["base_url"] = "../" * (len(fname.parents) - 1)
507501
content = jinja_env.from_string(content).render(**context)
508-
fname_html = os.path.splitext(fname)[0] + ".html"
509-
with open(
510-
os.path.join(target_path, fname_html), "w", encoding="utf-8"
511-
) as f:
502+
fname_html = fname.with_suffix(".html").name
503+
with (target_path / dirname / fname_html).open("w", encoding="utf-8") as f:
512504
f.write(content)
513505
else:
514-
shutil.copy(
515-
os.path.join(source_path, fname), os.path.join(target_path, dirname)
516-
)
506+
shutil.copy(source_path / fname, target_path / fname)
517507

518508

519509
if __name__ == "__main__":
@@ -525,4 +515,4 @@ def main(
525515
"--target-path", default="build", help="directory where to write the output"
526516
)
527517
args = parser.parse_args()
528-
sys.exit(main(args.source_path, args.target_path))
518+
sys.exit(main(pathlib.Path(args.source_path), pathlib.Path(args.target_path)))

0 commit comments

Comments
 (0)