Skip to content

Commit bad0ee2

Browse files
committed
Problem: _convert_oversized_ints treated NamedTuples as plain tuples, calling
type(obj)(converted_list) to reconstruct them. This fails because NamedTuple constructors require positional arguments matching their field definitions, not a single list. Solution: Before reconstructing a tuple subclass, check for the _fields attribute (the NamedTuple marker). If present, reconstruct via type(obj)(**dict(zip(obj._fields, converted))) so each field is passed by keyword. Plain tuples and lists continue to use the original path.
1 parent 3fc2219 commit bad0ee2

3 files changed

Lines changed: 49 additions & 0 deletions

File tree

README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,9 @@ Tested on Python 3.9+ and PyPy3.
2323

2424
Please check the [ChangeLog](CHANGELOG.md) file for the detailed information.
2525

26+
DeepDiff 8-7-0
27+
- support for python 3.14
28+
2629
DeepDiff 8-6-1
2730
- Patched security vulnerability in the Delta class which was vulnerable to class pollution via its constructor, and when combined with a gadget available in DeltaDiff itself, it could lead to Denial of Service and Remote Code Execution (via insecure Pickle deserialization).
2831

deepdiff/serialization.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -751,6 +751,9 @@ def _convert_oversized_ints(obj):
751751
return {k: _convert_oversized_ints(v) for k, v in obj.items()}
752752
if isinstance(obj, (list, tuple)):
753753
converted = [_convert_oversized_ints(v) for v in obj]
754+
if hasattr(obj, '_fields'):
755+
# NamedTuple: reconstruct using keyword arguments
756+
return type(obj)(**dict(zip(obj._fields, converted)))
754757
return type(obj)(converted)
755758
return obj
756759

tests/test_serialization.py

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -577,6 +577,49 @@ def test_json_dumps_large_negative_int(self):
577577
back = json_loads(serialized)
578578
assert back['value'] == str(large_neg_int)
579579

580+
def test_json_dumps_namedtuple_with_large_int(self):
581+
"""Test that a NamedTuple containing an oversized int is properly serialized.
582+
_convert_oversized_ints must reconstruct the NamedTuple via _fields, not
583+
pass a flat list to its constructor (which fails for NamedTuples with
584+
required keyword arguments)."""
585+
large_int = 59579472846392086780
586+
stats = SomeStats(
587+
counter=Counter(["a", "b"]),
588+
context_aware_counter=Counter(),
589+
min_int=0,
590+
max_int=large_int,
591+
)
592+
data = {'stats': stats}
593+
serialized = json_dumps(data)
594+
back = json_loads(serialized)
595+
assert back['stats']['max_int'] == str(large_int)
596+
assert back['stats']['min_int'] == 0
597+
598+
def test_json_dumps_dict_of_namedtuples_with_large_int(self):
599+
"""Test a dict of NamedTuples where one contains an oversized int.
600+
This mirrors the real-world pattern of field stats keyed by column name."""
601+
large_int = 59579472846392086780
602+
stats_map = {
603+
'normal_field': SomeStats(counter=Counter(["x"]), max_int=10),
604+
'big_field': SomeStats(counter=Counter(["y"]), max_int=large_int),
605+
}
606+
serialized = json_dumps(stats_map)
607+
back = json_loads(serialized)
608+
assert back['normal_field']['max_int'] == 10
609+
assert back['big_field']['max_int'] == str(large_int)
610+
611+
def test_json_dumps_namedtuple_with_large_int_in_list(self):
612+
"""Test a list of NamedTuples where one has an oversized int."""
613+
large_int = 59579472846392086780
614+
data = [
615+
SomeStats(counter=Counter(), max_int=5),
616+
SomeStats(counter=Counter(), max_int=large_int),
617+
]
618+
serialized = json_dumps(data)
619+
back = json_loads(serialized)
620+
assert back[0]['max_int'] == 5
621+
assert back[1]['max_int'] == str(large_int)
622+
580623
def test_bytes_in_deepdiff_serialization(self):
581624
"""Test that bytes work correctly in DeepDiff JSON serialization"""
582625
t1 = {

0 commit comments

Comments
 (0)