Skip to content

PYTHON-5248 - Drop support for MongoDB 4.0 #2353

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Jun 20, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
642 changes: 234 additions & 408 deletions .evergreen/generated_configs/tasks.yml

Large diffs are not rendered by default.

17 changes: 1 addition & 16 deletions .evergreen/generated_configs/variants.yml
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ buildvariants:
COMPRESSOR: zlib
- name: compression-zstd-rhel8
tasks:
- name: .test-standard !.server-4.0
- name: .test-standard !.server-4.2
display_name: Compression zstd RHEL8
run_on:
- rhel87-small
Expand Down Expand Up @@ -522,13 +522,6 @@ buildvariants:
PYTHON_BINARY: /opt/python/3.9/bin/python3

# Server version tests
- name: mongodb-v4.0
tasks:
- name: .server-version
display_name: "* MongoDB v4.0"
run_on:
- rhel87-small
tags: [coverage_tag]
- name: mongodb-v4.2
tasks:
- name: .server-version
Expand Down Expand Up @@ -664,11 +657,3 @@ buildvariants:
- rhel87-small
expansions:
STORAGE_ENGINE: inmemory
- name: storage-mmapv1-rhel8
tasks:
- name: .test-standard !.sharded_cluster-auth-ssl .server-4.0
display_name: Storage MMAPv1 RHEL8
run_on:
- rhel87-small
expansions:
STORAGE_ENGINE: mmapv1
12 changes: 3 additions & 9 deletions .evergreen/scripts/generate_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
get_task_name,
get_variant_name,
get_versions_from,
get_versions_until,
handle_c_ext,
write_functions_to_file,
write_tasks_to_file,
Expand Down Expand Up @@ -196,7 +195,7 @@ def create_compression_variants():
for compressor in "snappy", "zlib", "zstd":
expansions = dict(COMPRESSOR=compressor)
if compressor == "zstd":
tasks = [".test-standard !.server-4.0"]
tasks = [".test-standard !.server-4.2"]
else:
tasks = [".test-standard"]
display_name = get_variant_name(f"Compression {compressor}", host)
Expand Down Expand Up @@ -249,16 +248,11 @@ def create_pyopenssl_variants():

def create_storage_engine_variants():
host = DEFAULT_HOST
engines = ["InMemory", "MMAPv1"]
engines = ["InMemory"]
variants = []
for engine in engines:
expansions = dict(STORAGE_ENGINE=engine.lower())
if engine == engines[0]:
tasks = [".test-standard .standalone-noauth-nossl"]
else:
# MongoDB 4.2 drops support for MMAPv1
versions = get_versions_until("4.0")
tasks = [f".test-standard !.sharded_cluster-auth-ssl .server-{v}" for v in versions]
tasks = [".test-standard .standalone-noauth-nossl"]
display_name = get_variant_name(f"Storage {engine}", host)
variant = create_variant(tasks, display_name, host=host, expansions=expansions)
variants.append(variant)
Expand Down
2 changes: 1 addition & 1 deletion .evergreen/scripts/generate_config_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
# Globals
##############

ALL_VERSIONS = ["4.0", "4.2", "4.4", "5.0", "6.0", "7.0", "8.0", "rapid", "latest"]
ALL_VERSIONS = ["4.2", "4.4", "5.0", "6.0", "7.0", "8.0", "rapid", "latest"]
CPYTHONS = ["3.9", "3.10", "3.11", "3.12", "3.13"]
PYPYS = ["pypy3.10"]
ALL_PYTHONS = CPYTHONS + PYPYS
Expand Down
4 changes: 2 additions & 2 deletions pymongo/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@
MAX_WRITE_BATCH_SIZE = 100000

# What this version of PyMongo supports.
MIN_SUPPORTED_SERVER_VERSION = "4.0"
MIN_SUPPORTED_WIRE_VERSION = 7
MIN_SUPPORTED_SERVER_VERSION = "4.2"
MIN_SUPPORTED_WIRE_VERSION = 8
# MongoDB 8.0
MAX_SUPPORTED_WIRE_VERSION = 25

Expand Down
20 changes: 1 addition & 19 deletions test/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,19 +508,6 @@ def require_data_lake(self, func):
func=func,
)

def require_no_mmap(self, func):
"""Run a test only if the server is not using the MMAPv1 storage
engine. Only works for standalone and replica sets; tests are
run regardless of storage engine on sharded clusters.
"""

def is_not_mmap():
if self.is_mongos:
return True
return self.storage_engine != "mmapv1"

return self._require(is_not_mmap, "Storage engine must not be MMAPv1", func=func)

def require_version_min(self, *ver):
"""Run a test only if the server version is at least ``version``."""
other_version = Version(*ver)
Expand Down Expand Up @@ -651,7 +638,7 @@ def require_no_load_balancer(self, func):

def require_change_streams(self, func):
"""Run a test only if the server supports change streams."""
return self.require_no_mmap(self.require_no_standalone(func))
return self.require_no_standalone(func)

def is_topology_type(self, topologies):
unknown = set(topologies) - {
Expand Down Expand Up @@ -754,8 +741,6 @@ def require_sessions(self, func):
return self._require(lambda: self.sessions_enabled, "Sessions not supported", func=func)

def supports_retryable_writes(self):
if self.storage_engine == "mmapv1":
return False
if not self.sessions_enabled:
return False
return self.is_mongos or self.is_rs
Expand All @@ -769,9 +754,6 @@ def require_retryable_writes(self, func):
)

def supports_transactions(self):
if self.storage_engine == "mmapv1":
return False

if self.version.at_least(4, 1, 8):
return self.is_mongos or self.is_rs

Expand Down
20 changes: 1 addition & 19 deletions test/asynchronous/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,19 +508,6 @@ def require_data_lake(self, func):
func=func,
)

def require_no_mmap(self, func):
"""Run a test only if the server is not using the MMAPv1 storage
engine. Only works for standalone and replica sets; tests are
run regardless of storage engine on sharded clusters.
"""

def is_not_mmap():
if self.is_mongos:
return True
return self.storage_engine != "mmapv1"

return self._require(is_not_mmap, "Storage engine must not be MMAPv1", func=func)

def require_version_min(self, *ver):
"""Run a test only if the server version is at least ``version``."""
other_version = Version(*ver)
Expand Down Expand Up @@ -651,7 +638,7 @@ def require_no_load_balancer(self, func):

def require_change_streams(self, func):
"""Run a test only if the server supports change streams."""
return self.require_no_mmap(self.require_no_standalone(func))
return self.require_no_standalone(func)

async def is_topology_type(self, topologies):
unknown = set(topologies) - {
Expand Down Expand Up @@ -754,8 +741,6 @@ def require_sessions(self, func):
return self._require(lambda: self.sessions_enabled, "Sessions not supported", func=func)

def supports_retryable_writes(self):
if self.storage_engine == "mmapv1":
return False
if not self.sessions_enabled:
return False
return self.is_mongos or self.is_rs
Expand All @@ -769,9 +754,6 @@ def require_retryable_writes(self, func):
)

def supports_transactions(self):
if self.storage_engine == "mmapv1":
return False

if self.version.at_least(4, 1, 8):
return self.is_mongos or self.is_rs

Expand Down
4 changes: 2 additions & 2 deletions test/asynchronous/test_bulk.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ async def _test_update_many(self, update):
async def test_update_many(self):
await self._test_update_many({"$set": {"foo": "bar"}})

@async_client_context.require_version_min(4, 1, 11)
@async_client_context.require_version_min(4, 2, 0)
async def test_update_many_pipeline(self):
await self._test_update_many([{"$set": {"foo": "bar"}}])

Expand Down Expand Up @@ -206,7 +206,7 @@ async def _test_update_one(self, update):
async def test_update_one(self):
await self._test_update_one({"$set": {"foo": "bar"}})

@async_client_context.require_version_min(4, 1, 11)
@async_client_context.require_version_min(4, 2, 0)
async def test_update_one_pipeline(self):
await self._test_update_one([{"$set": {"foo": "bar"}}])

Expand Down
95 changes: 12 additions & 83 deletions test/asynchronous/test_change_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ async def test_batch_size_is_honored(self):

# $changeStream.startAtOperationTime was added in 4.0.0.
@no_type_check
@async_client_context.require_version_min(4, 0, 0)
@async_client_context.require_version_min(4, 2, 0)
async def test_start_at_operation_time(self):
optime = await self.get_start_at_operation_time()

Expand Down Expand Up @@ -436,7 +436,7 @@ async def test_change_operations(self):
await self._test_get_invalidate_event(change_stream)

@no_type_check
@async_client_context.require_version_min(4, 1, 1)
@async_client_context.require_version_min(4, 2, 0)
async def test_start_after(self):
resume_token = await self.get_resume_token(invalidate=True)

Expand All @@ -452,7 +452,7 @@ async def test_start_after(self):
self.assertEqual(change["fullDocument"], {"_id": 2})

@no_type_check
@async_client_context.require_version_min(4, 1, 1)
@async_client_context.require_version_min(4, 2, 0)
async def test_start_after_resume_process_with_changes(self):
resume_token = await self.get_resume_token(invalidate=True)

Expand Down Expand Up @@ -563,27 +563,16 @@ async def _test_update_resume_token(self, expected_rt_getter):
)

# Prose test no. 1
@async_client_context.require_version_min(4, 0, 7)
@async_client_context.require_version_min(4, 2, 0)
async def test_update_resume_token(self):
await self._test_update_resume_token(self._get_expected_resume_token)

# Prose test no. 1
@async_client_context.require_version_max(4, 0, 7)
async def test_update_resume_token_legacy(self):
await self._test_update_resume_token(self._get_expected_resume_token_legacy)

# Prose test no. 2
@async_client_context.require_version_min(4, 1, 8)
@async_client_context.require_version_min(4, 2, 0)
async def test_raises_error_on_missing_id_418plus(self):
# Server returns an error on 4.1.8+
await self._test_raises_error_on_missing_id(OperationFailure)

# Prose test no. 2
@async_client_context.require_version_max(4, 1, 8)
async def test_raises_error_on_missing_id_418minus(self):
# PyMongo raises an error
await self._test_raises_error_on_missing_id(InvalidOperation)

# Prose test no. 3
@no_type_check
async def test_resume_on_error(self):
Expand Down Expand Up @@ -642,40 +631,12 @@ def raise_error():
cursor.close = raise_error
await self.insert_one_and_check(change_stream, {"_id": 2})

# Prose test no. 9
@no_type_check
@async_client_context.require_version_min(4, 0, 0)
@async_client_context.require_version_max(4, 0, 7)
async def test_start_at_operation_time_caching(self):
# Case 1: change stream not started with startAtOperationTime
client, listener = self.client_with_listener("aggregate")
async with await self.change_stream_with_client(client) as cs:
await self.kill_change_stream_cursor(cs)
await cs.try_next()
cmd = listener.started_events[-1].command
self.assertIsNotNone(cmd["pipeline"][0]["$changeStream"].get("startAtOperationTime"))

# Case 2: change stream started with startAtOperationTime
listener.reset()
optime = await self.get_start_at_operation_time()
async with await self.change_stream_with_client(
client, start_at_operation_time=optime
) as cs:
await self.kill_change_stream_cursor(cs)
await cs.try_next()
cmd = listener.started_events[-1].command
self.assertEqual(
cmd["pipeline"][0]["$changeStream"].get("startAtOperationTime"),
optime,
str([k.command for k in listener.started_events]),
)

# Prose test no. 10 - SKIPPED
# This test is identical to prose test no. 3.

# Prose test no. 11
@no_type_check
@async_client_context.require_version_min(4, 0, 7)
@async_client_context.require_version_min(4, 2, 0)
async def test_resumetoken_empty_batch(self):
client, listener = await self._client_with_listener("getMore")
async with await self.change_stream_with_client(client) as change_stream:
Expand All @@ -687,7 +648,7 @@ async def test_resumetoken_empty_batch(self):

# Prose test no. 11
@no_type_check
@async_client_context.require_version_min(4, 0, 7)
@async_client_context.require_version_min(4, 2, 0)
async def test_resumetoken_exhausted_batch(self):
client, listener = await self._client_with_listener("getMore")
async with await self.change_stream_with_client(client) as change_stream:
Expand All @@ -697,38 +658,6 @@ async def test_resumetoken_exhausted_batch(self):
response = listener.succeeded_events[-1].reply
self.assertEqual(resume_token, response["cursor"]["postBatchResumeToken"])

# Prose test no. 12
@no_type_check
@async_client_context.require_version_max(4, 0, 7)
async def test_resumetoken_empty_batch_legacy(self):
resume_point = await self.get_resume_token()

# Empty resume token when neither resumeAfter or startAfter specified.
async with await self.change_stream() as change_stream:
await change_stream.try_next()
self.assertIsNone(change_stream.resume_token)

# Resume token value is same as resumeAfter.
async with await self.change_stream(resume_after=resume_point) as change_stream:
await change_stream.try_next()
resume_token = change_stream.resume_token
self.assertEqual(resume_token, resume_point)

# Prose test no. 12
@no_type_check
@async_client_context.require_version_max(4, 0, 7)
async def test_resumetoken_exhausted_batch_legacy(self):
# Resume token is _id of last change.
async with await self.change_stream() as change_stream:
change = await self._populate_and_exhaust_change_stream(change_stream)
self.assertEqual(change_stream.resume_token, change["_id"])
resume_point = change["_id"]

# Resume token is _id of last change even if resumeAfter is specified.
async with await self.change_stream(resume_after=resume_point) as change_stream:
change = await self._populate_and_exhaust_change_stream(change_stream)
self.assertEqual(change_stream.resume_token, change["_id"])

# Prose test no. 13
@no_type_check
async def test_resumetoken_partially_iterated_batch(self):
Expand Down Expand Up @@ -770,13 +699,13 @@ async def test_resumetoken_uniterated_nonempty_batch_resumeafter(self):
# Prose test no. 14
@no_type_check
@async_client_context.require_no_mongos
@async_client_context.require_version_min(4, 1, 1)
@async_client_context.require_version_min(4, 2, 0)
async def test_resumetoken_uniterated_nonempty_batch_startafter(self):
await self._test_resumetoken_uniterated_nonempty_batch("start_after")

# Prose test no. 17
@no_type_check
@async_client_context.require_version_min(4, 1, 1)
@async_client_context.require_version_min(4, 2, 0)
async def test_startafter_resume_uses_startafter_after_empty_getMore(self):
# Resume should use startAfter after no changes have been returned.
resume_point = await self.get_resume_token()
Expand All @@ -796,7 +725,7 @@ async def test_startafter_resume_uses_startafter_after_empty_getMore(self):

# Prose test no. 18
@no_type_check
@async_client_context.require_version_min(4, 1, 1)
@async_client_context.require_version_min(4, 2, 0)
async def test_startafter_resume_uses_resumeafter_after_nonempty_getMore(self):
# Resume should use resumeAfter after some changes have been returned.
resume_point = await self.get_resume_token()
Expand Down Expand Up @@ -843,7 +772,7 @@ async def test_split_large_change(self):
class TestClusterAsyncChangeStream(TestAsyncChangeStreamBase, APITestsMixin):
dbs: list

@async_client_context.require_version_min(4, 0, 0, -1)
@async_client_context.require_version_min(4, 2, 0)
@async_client_context.require_change_streams
async def asyncSetUp(self) -> None:
await super().asyncSetUp()
Expand Down Expand Up @@ -903,7 +832,7 @@ async def test_full_pipeline(self):


class TestAsyncDatabaseAsyncChangeStream(TestAsyncChangeStreamBase, APITestsMixin):
@async_client_context.require_version_min(4, 0, 0, -1)
@async_client_context.require_version_min(4, 2, 0)
@async_client_context.require_change_streams
async def asyncSetUp(self) -> None:
await super().asyncSetUp()
Expand Down
Loading
Loading