diff --git a/PLAN.md b/PLAN.md new file mode 100644 index 0000000..2957a31 --- /dev/null +++ b/PLAN.md @@ -0,0 +1,41 @@ +# Session Persistence Capability + +## Summary + +This PR implements the `SessionPersistence` capability for saving and loading agent conversation sessions across process restarts. + +## Design + +### Storage Protocol + +`SessionStore` is a `Protocol` with four methods: +- `save(session_id, messages)` — persist a list of `ModelMessage` +- `load(session_id)` — retrieve messages or `None` +- `list_sessions()` — enumerate stored session IDs +- `delete(session_id)` — remove a session + +### Backends + +- **`InMemorySessionStore`** — dict-based, for testing (data lost on process exit) +- **`FileSessionStore`** — one JSON file per session in a directory, using `ModelMessagesTypeAdapter` for serialization/deserialization + +### Capability + +`SessionPersistence(AbstractCapability)`: +- **`before_run`**: loads saved messages and prepends them to `ctx.messages` +- **`after_run`**: saves `result.all_messages()` to the store (when `auto_save=True`) +- **`session_id`**: auto-generated UUID4 if not provided +- **`from_spec`**: supports `backend="memory"` (default) and `backend="file"` (with configurable `directory`) + +### Key decisions + +- Uses `before_run`/`after_run` hooks (not `before_model_request`) since session restore/save is a per-run concern, not per-request +- Prepends history via `ctx.messages[:0] = existing` for clean integration with the agent's message handling +- `InMemorySessionStore` returns copies to prevent aliasing bugs +- `FileSessionStore` uses `ModelMessagesTypeAdapter.dump_json`/`validate_json` for full-fidelity message serialization + +## Files + +- `src/pydantic_harness/session_persistence.py` — stores, capability +- `src/pydantic_harness/__init__.py` — re-exports +- `tests/test_session_persistence.py` — 33 tests, 100% coverage diff --git a/src/pydantic_harness/__init__.py b/src/pydantic_harness/__init__.py index 9d728b6..ee8bc45 100644 --- a/src/pydantic_harness/__init__.py +++ b/src/pydantic_harness/__init__.py @@ -7,4 +7,16 @@ # Each capability module is imported and re-exported here. # Capabilities are listed alphabetically. -__all__: list[str] = [] +from pydantic_harness.session_persistence import ( + FileSessionStore, + InMemorySessionStore, + SessionPersistence, + SessionStore, +) + +__all__: list[str] = [ + 'FileSessionStore', + 'InMemorySessionStore', + 'SessionPersistence', + 'SessionStore', +] diff --git a/src/pydantic_harness/session_persistence.py b/src/pydantic_harness/session_persistence.py new file mode 100644 index 0000000..db12e14 --- /dev/null +++ b/src/pydantic_harness/session_persistence.py @@ -0,0 +1,260 @@ +"""Session persistence capability for saving and loading agent conversation history. + +Provides automatic save/restore of conversation messages across agent runs, +with pluggable storage backends (``InMemorySessionStore`` for testing, +``FileSessionStore`` for on-disk persistence via JSON files). +""" + +from __future__ import annotations + +import json as _json +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Protocol, runtime_checkable +from uuid import uuid4 + +from pydantic_ai.capabilities.abstract import AbstractCapability +from pydantic_ai.messages import ModelMessage, ModelMessagesTypeAdapter +from pydantic_ai.run import AgentRunResult +from pydantic_ai.tools import AgentDepsT, RunContext + + +@runtime_checkable +class SessionStore(Protocol): + """Protocol for pluggable session storage backends.""" + + def save( + self, + session_id: str, + messages: list[ModelMessage], + *, + metadata: dict[str, Any] | None = None, + ) -> None: # pragma: no cover + """Persist conversation messages (and optional metadata) for the given session.""" + ... + + def load(self, session_id: str) -> list[ModelMessage] | None: # pragma: no cover + """Load conversation messages for the given session, or None if not found.""" + ... + + def load_metadata(self, session_id: str) -> dict[str, Any] | None: # pragma: no cover + """Load metadata for the given session, or None if not found.""" + ... + + def list_sessions(self) -> list[str]: # pragma: no cover + """Return all stored session IDs.""" + ... + + def delete(self, session_id: str) -> bool: # pragma: no cover + """Delete a session by ID. Returns True if it existed.""" + ... + + +class InMemorySessionStore: + """Dict-based in-memory session store, suitable for testing. + + All data lives in a plain ``dict`` and is lost when the process exits. + """ + + def __init__(self) -> None: + """Initialize an empty in-memory session store.""" + self._sessions: dict[str, list[ModelMessage]] = {} + self._metadata: dict[str, dict[str, Any]] = {} + + def save( + self, + session_id: str, + messages: list[ModelMessage], + *, + metadata: dict[str, Any] | None = None, + ) -> None: + """Persist conversation messages (and optional metadata) for the given session.""" + self._sessions[session_id] = list(messages) + if metadata is not None: + self._metadata[session_id] = dict(metadata) + else: + self._metadata.pop(session_id, None) + + def load(self, session_id: str) -> list[ModelMessage] | None: + """Load conversation messages for the given session.""" + messages = self._sessions.get(session_id) + if messages is None: + return None + return list(messages) + + def load_metadata(self, session_id: str) -> dict[str, Any] | None: + """Load metadata for the given session.""" + meta = self._metadata.get(session_id) + if meta is None: + return None + return dict(meta) + + def list_sessions(self) -> list[str]: + """Return all stored session IDs.""" + return list(self._sessions) + + def delete(self, session_id: str) -> bool: + """Delete a session by ID.""" + self._metadata.pop(session_id, None) + return self._sessions.pop(session_id, None) is not None + + +class FileSessionStore: + """JSON-file-based session store for on-disk persistence. + + Each session is stored as a separate JSON file in the configured directory, + using ``ModelMessagesTypeAdapter`` for serialization. + """ + + def __init__(self, directory: str | Path) -> None: + """Initialize a file-backed session store at the given directory. + + Args: + directory: Path to the directory where session files are stored. + Created automatically if it does not exist. + """ + self._directory = Path(directory) + + def _path_for(self, session_id: str) -> Path: + return self._directory / f'{session_id}.json' + + def _meta_path_for(self, session_id: str) -> Path: + return self._directory / f'{session_id}.meta.json' + + def save( + self, + session_id: str, + messages: list[ModelMessage], + *, + metadata: dict[str, Any] | None = None, + ) -> None: + """Persist conversation messages (and optional metadata) as JSON files.""" + self._directory.mkdir(parents=True, exist_ok=True) + data = ModelMessagesTypeAdapter.dump_json(messages) + self._path_for(session_id).write_bytes(data) + + meta_path = self._meta_path_for(session_id) + if metadata is not None: + meta_path.write_text(_json.dumps(metadata), encoding='utf-8') + elif meta_path.exists(): + meta_path.unlink() + + def load(self, session_id: str) -> list[ModelMessage] | None: + """Load conversation messages from a JSON file.""" + path = self._path_for(session_id) + if not path.exists(): + return None + data = path.read_bytes() + return ModelMessagesTypeAdapter.validate_json(data) + + def load_metadata(self, session_id: str) -> dict[str, Any] | None: + """Load metadata from a JSON file.""" + meta_path = self._meta_path_for(session_id) + if not meta_path.exists(): + return None + raw = meta_path.read_text(encoding='utf-8') + result: dict[str, Any] = _json.loads(raw) + return result + + def list_sessions(self) -> list[str]: + """Return all session IDs found in the directory.""" + if not self._directory.exists(): + return [] + return sorted(p.stem for p in self._directory.glob('*.json') if not p.name.endswith('.meta.json')) + + def delete(self, session_id: str) -> bool: + """Delete a session file and its metadata. Returns True if it existed.""" + path = self._path_for(session_id) + existed = path.exists() + if existed: + path.unlink() + meta_path = self._meta_path_for(session_id) + if meta_path.exists(): + meta_path.unlink() + return existed + + +@dataclass +class SessionPersistence(AbstractCapability[AgentDepsT]): + """Capability for saving and restoring conversation state across agent runs. + + On run start, loads any previously saved messages for the session and + prepends them to the conversation. On run end, saves the full message + history back to the store. + + Example: + ```python + from pydantic_ai import Agent + from pydantic_harness.session_persistence import ( + SessionPersistence, + InMemorySessionStore, + ) + + store = InMemorySessionStore() + agent = Agent( + 'openai:gpt-4o', + capabilities=[SessionPersistence(store=store, session_id='my-session')], + ) + ``` + """ + + store: SessionStore = field(default_factory=InMemorySessionStore) + """The storage backend. Defaults to ``InMemorySessionStore`` (ephemeral).""" + + session_id: str = field(default_factory=lambda: str(uuid4())) + """Unique identifier for this session. Auto-generated (UUID4) if not provided.""" + + auto_save: bool = True + """Whether to automatically save messages after each run.""" + + metadata: dict[str, Any] | None = None + """Optional metadata to store alongside the session messages. + + When set, this dict is persisted on each save and can be retrieved + via ``store.load_metadata(session_id)``. + """ + + @classmethod + def get_serialization_name(cls) -> str | None: + """Return the name used for spec serialization.""" + return 'SessionPersistence' + + @classmethod + def from_spec(cls, *args: Any, **kwargs: Any) -> SessionPersistence[Any]: + """Create from spec arguments. + + Supports ``backend`` kwarg: ``"memory"`` (default) or ``"file"`` (requires ``directory``). + """ + backend = kwargs.pop('backend', 'memory') + if backend == 'file': + directory = kwargs.pop('directory', '.sessions') + return cls(store=FileSessionStore(directory), **kwargs) + return cls(store=InMemorySessionStore(), **kwargs) + + async def before_run( + self, + ctx: RunContext[AgentDepsT], + ) -> None: + """Load saved messages and prepend them to the conversation.""" + existing = self.store.load(self.session_id) + if existing: + ctx.messages[:0] = existing + + async def after_run( + self, + ctx: RunContext[AgentDepsT], + *, + result: AgentRunResult[Any], + ) -> AgentRunResult[Any]: + """Save the full message history after a successful run.""" + if self.auto_save: + self.store.save(self.session_id, result.all_messages(), metadata=self.metadata) + return result + + +__all__ = [ + 'FileSessionStore', + 'InMemorySessionStore', + 'SessionPersistence', + 'SessionStore', +] diff --git a/tests/test_session_persistence.py b/tests/test_session_persistence.py new file mode 100644 index 0000000..a0123ac --- /dev/null +++ b/tests/test_session_persistence.py @@ -0,0 +1,432 @@ +"""Tests for pydantic_harness.session_persistence.""" + +from __future__ import annotations + +import dataclasses +import json +from pathlib import Path +from typing import Any + +import pytest +from pydantic_ai.messages import ( + ModelMessage, + ModelRequest, + ModelResponse, + TextPart, + UserPromptPart, +) +from pydantic_ai.run import AgentRunResult +from pydantic_ai.usage import RunUsage + +from pydantic_harness.session_persistence import ( + FileSessionStore, + InMemorySessionStore, + SessionPersistence, + SessionStore, +) + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _user(text: str) -> ModelRequest: + return ModelRequest(parts=[UserPromptPart(content=text)]) + + +def _assistant(text: str) -> ModelResponse: + return ModelResponse(parts=[TextPart(content=text)]) + + +def _make_ctx( + *, + messages: list[ModelMessage] | None = None, +) -> Any: + """Build a minimal RunContext-like object for testing hooks.""" + + @dataclasses.dataclass + class _FakeModel: + model_id: str = 'test-model' + + @dataclasses.dataclass + class _FakeCtx: + usage: RunUsage + model: Any = dataclasses.field(default_factory=_FakeModel) + deps: None = None + messages: list[ModelMessage] = dataclasses.field(default_factory=list[ModelMessage]) + + ctx = _FakeCtx(usage=RunUsage()) + if messages: + ctx.messages = list(messages) + return ctx + + +def _make_result(messages: list[ModelMessage], output: str = 'done') -> AgentRunResult[str]: + """Build a minimal AgentRunResult wrapping the given messages.""" + from pydantic_ai._agent_graph import GraphAgentState + + state = GraphAgentState(message_history=list(messages)) + return AgentRunResult(output=output, _state=state) + + +# --------------------------------------------------------------------------- +# InMemorySessionStore +# --------------------------------------------------------------------------- + + +class TestInMemorySessionStore: + def test_protocol_conformance(self) -> None: + assert isinstance(InMemorySessionStore(), SessionStore) + + def test_save_and_load(self) -> None: + store = InMemorySessionStore() + messages: list[ModelMessage] = [_user('hello'), _assistant('hi')] + store.save('s1', messages) + loaded = store.load('s1') + assert loaded is not None + assert len(loaded) == 2 + + def test_load_nonexistent_returns_none(self) -> None: + store = InMemorySessionStore() + assert store.load('missing') is None + + def test_save_overwrites(self) -> None: + store = InMemorySessionStore() + store.save('s1', [_user('first')]) + store.save('s1', [_user('second')]) + loaded = store.load('s1') + assert loaded is not None + assert len(loaded) == 1 + part = loaded[0].parts[0] + assert isinstance(part, UserPromptPart) + assert part.content == 'second' + + def test_list_sessions_empty(self) -> None: + store = InMemorySessionStore() + assert store.list_sessions() == [] + + def test_list_sessions(self) -> None: + store = InMemorySessionStore() + store.save('a', [_user('x')]) + store.save('b', [_user('y')]) + assert set(store.list_sessions()) == {'a', 'b'} + + def test_delete_existing(self) -> None: + store = InMemorySessionStore() + store.save('s1', [_user('x')]) + assert store.delete('s1') is True + assert store.load('s1') is None + + def test_delete_nonexistent(self) -> None: + store = InMemorySessionStore() + assert store.delete('missing') is False + + def test_save_returns_copy(self) -> None: + """Mutating the saved list should not affect stored data.""" + store = InMemorySessionStore() + messages: list[ModelMessage] = [_user('hello')] + store.save('s1', messages) + messages.append(_assistant('bye')) + loaded = store.load('s1') + assert loaded is not None + assert len(loaded) == 1 + + def test_load_returns_copy(self) -> None: + """Mutating loaded list should not affect stored data.""" + store = InMemorySessionStore() + store.save('s1', [_user('hello')]) + loaded = store.load('s1') + assert loaded is not None + loaded.append(_assistant('extra')) + reloaded = store.load('s1') + assert reloaded is not None + assert len(reloaded) == 1 + + def test_save_with_metadata(self) -> None: + store = InMemorySessionStore() + store.save('s1', [_user('hi')], metadata={'agent': 'test'}) + meta = store.load_metadata('s1') + assert meta == {'agent': 'test'} + + def test_save_without_metadata_clears_existing(self) -> None: + store = InMemorySessionStore() + store.save('s1', [_user('hi')], metadata={'key': 'val'}) + store.save('s1', [_user('hi')]) + assert store.load_metadata('s1') is None + + def test_load_metadata_nonexistent(self) -> None: + store = InMemorySessionStore() + assert store.load_metadata('missing') is None + + def test_load_metadata_returns_copy(self) -> None: + store = InMemorySessionStore() + store.save('s1', [_user('hi')], metadata={'key': 'val'}) + meta = store.load_metadata('s1') + assert meta is not None + meta['extra'] = 'added' + assert store.load_metadata('s1') == {'key': 'val'} + + def test_delete_also_removes_metadata(self) -> None: + store = InMemorySessionStore() + store.save('s1', [_user('hi')], metadata={'key': 'val'}) + store.delete('s1') + assert store.load_metadata('s1') is None + + +# --------------------------------------------------------------------------- +# FileSessionStore +# --------------------------------------------------------------------------- + + +class TestFileSessionStore: + def test_protocol_conformance(self, tmp_path: Path) -> None: + assert isinstance(FileSessionStore(tmp_path), SessionStore) + + def test_save_and_load(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path / 'sessions') + messages: list[ModelMessage] = [_user('hello'), _assistant('hi')] + store.save('s1', messages) + loaded = store.load('s1') + assert loaded is not None + assert len(loaded) == 2 + + def test_creates_directory(self, tmp_path: Path) -> None: + d = tmp_path / 'nested' / 'dir' + store = FileSessionStore(d) + store.save('s1', [_user('hi')]) + assert d.exists() + + def test_load_nonexistent_returns_none(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + assert store.load('missing') is None + + def test_file_is_valid_json(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + store.save('s1', [_user('hello')]) + raw = (tmp_path / 's1.json').read_text(encoding='utf-8') + parsed = json.loads(raw) + assert isinstance(parsed, list) + + def test_list_sessions_empty(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + assert store.list_sessions() == [] + + def test_list_sessions_nonexistent_directory(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path / 'nonexistent') + assert store.list_sessions() == [] + + def test_list_sessions(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + store.save('alpha', [_user('x')]) + store.save('beta', [_user('y')]) + assert store.list_sessions() == ['alpha', 'beta'] + + def test_delete_existing(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + store.save('s1', [_user('x')]) + assert store.delete('s1') is True + assert store.load('s1') is None + assert not (tmp_path / 's1.json').exists() + + def test_delete_nonexistent(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + assert store.delete('missing') is False + + def test_save_with_metadata(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + store.save('s1', [_user('hi')], metadata={'model': 'gpt-5', 'version': 2}) + meta = store.load_metadata('s1') + assert meta == {'model': 'gpt-5', 'version': 2} + # Verify the file exists + assert (tmp_path / 's1.meta.json').exists() + + def test_save_without_metadata_removes_meta_file(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + store.save('s1', [_user('hi')], metadata={'key': 'val'}) + assert (tmp_path / 's1.meta.json').exists() + store.save('s1', [_user('hi')]) + assert not (tmp_path / 's1.meta.json').exists() + assert store.load_metadata('s1') is None + + def test_load_metadata_nonexistent(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + assert store.load_metadata('missing') is None + + def test_delete_also_removes_metadata(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + store.save('s1', [_user('hi')], metadata={'key': 'val'}) + store.delete('s1') + assert not (tmp_path / 's1.meta.json').exists() + assert store.load_metadata('s1') is None + + def test_list_sessions_excludes_meta_files(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + store.save('alpha', [_user('x')], metadata={'tag': 'a'}) + store.save('beta', [_user('y')]) + sessions = store.list_sessions() + assert sessions == ['alpha', 'beta'] + + def test_roundtrip_preserves_content(self, tmp_path: Path) -> None: + store = FileSessionStore(tmp_path) + original: list[ModelMessage] = [_user('hello world'), _assistant('greetings')] + store.save('s1', original) + loaded = store.load('s1') + assert loaded is not None + assert len(loaded) == 2 + user_part = loaded[0].parts[0] + assert isinstance(user_part, UserPromptPart) + assert user_part.content == 'hello world' + assistant_part = loaded[1].parts[0] + assert isinstance(assistant_part, TextPart) + assert assistant_part.content == 'greetings' + + +# --------------------------------------------------------------------------- +# SessionPersistence capability +# --------------------------------------------------------------------------- + + +class TestSessionPersistence: + def test_auto_generates_session_id(self) -> None: + cap = SessionPersistence() + assert cap.session_id + cap2 = SessionPersistence() + assert cap.session_id != cap2.session_id + + def test_explicit_session_id(self) -> None: + cap = SessionPersistence(session_id='my-session') + assert cap.session_id == 'my-session' + + def test_default_store_is_in_memory(self) -> None: + cap = SessionPersistence() + assert isinstance(cap.store, InMemorySessionStore) + + @pytest.mark.anyio + async def test_before_run_no_history(self) -> None: + store = InMemorySessionStore() + cap = SessionPersistence(store=store, session_id='s1') + ctx = _make_ctx(messages=[_user('new prompt')]) + await cap.before_run(ctx) + assert len(ctx.messages) == 1 + + @pytest.mark.anyio + async def test_before_run_loads_history(self) -> None: + store = InMemorySessionStore() + store.save('s1', [_user('old'), _assistant('response')]) + cap = SessionPersistence(store=store, session_id='s1') + ctx = _make_ctx(messages=[_user('new prompt')]) + await cap.before_run(ctx) + assert len(ctx.messages) == 3 + # History is prepended + first_part = ctx.messages[0].parts[0] + assert isinstance(first_part, UserPromptPart) + assert first_part.content == 'old' + + @pytest.mark.anyio + async def test_after_run_saves_messages(self) -> None: + store = InMemorySessionStore() + cap = SessionPersistence(store=store, session_id='s1') + messages: list[ModelMessage] = [_user('hello'), _assistant('hi')] + result = _make_result(messages, output='hi') + ctx = _make_ctx() + returned = await cap.after_run(ctx, result=result) + assert returned is result + loaded = store.load('s1') + assert loaded is not None + assert len(loaded) == 2 + + @pytest.mark.anyio + async def test_after_run_auto_save_disabled(self) -> None: + store = InMemorySessionStore() + cap = SessionPersistence(store=store, session_id='s1', auto_save=False) + messages: list[ModelMessage] = [_user('hello'), _assistant('hi')] + result = _make_result(messages, output='hi') + ctx = _make_ctx() + await cap.after_run(ctx, result=result) + assert store.load('s1') is None + + @pytest.mark.anyio + async def test_multi_turn_accumulation(self) -> None: + """Simulate two agent runs that accumulate messages.""" + store = InMemorySessionStore() + + # First run + cap = SessionPersistence(store=store, session_id='s1') + run1_messages: list[ModelMessage] = [_user('turn 1'), _assistant('reply 1')] + result1 = _make_result(run1_messages, output='reply 1') + ctx1 = _make_ctx() + await cap.after_run(ctx1, result=result1) + + # Second run: before_run prepends first run's messages + ctx2 = _make_ctx(messages=[_user('turn 2')]) + await cap.before_run(ctx2) + assert len(ctx2.messages) == 3 # 2 from history + 1 new + + # Simulate full run result with all messages + run2_messages: list[ModelMessage] = [ + _user('turn 1'), + _assistant('reply 1'), + _user('turn 2'), + _assistant('reply 2'), + ] + result2 = _make_result(run2_messages, output='reply 2') + await cap.after_run(ctx2, result=result2) + + saved = store.load('s1') + assert saved is not None + assert len(saved) == 4 + + def test_get_serialization_name(self) -> None: + assert SessionPersistence.get_serialization_name() == 'SessionPersistence' + + def test_from_spec_default(self) -> None: + cap = SessionPersistence.from_spec(session_id='s1') + assert isinstance(cap.store, InMemorySessionStore) + assert cap.session_id == 's1' + + def test_from_spec_file_backend(self, tmp_path: Path) -> None: + cap = SessionPersistence.from_spec(backend='file', directory=str(tmp_path)) + assert isinstance(cap.store, FileSessionStore) + + @pytest.mark.anyio + async def test_after_run_saves_metadata(self) -> None: + store = InMemorySessionStore() + cap = SessionPersistence(store=store, session_id='s1', metadata={'run': 'first'}) + messages: list[ModelMessage] = [_user('hello'), _assistant('hi')] + result = _make_result(messages, output='hi') + ctx = _make_ctx() + await cap.after_run(ctx, result=result) + meta = store.load_metadata('s1') + assert meta == {'run': 'first'} + + @pytest.mark.anyio + async def test_after_run_no_metadata_by_default(self) -> None: + store = InMemorySessionStore() + cap = SessionPersistence(store=store, session_id='s1') + messages: list[ModelMessage] = [_user('hello'), _assistant('hi')] + result = _make_result(messages, output='hi') + ctx = _make_ctx() + await cap.after_run(ctx, result=result) + assert store.load_metadata('s1') is None + + @pytest.mark.anyio + async def test_with_file_store_roundtrip(self, tmp_path: Path) -> None: + """Full roundtrip: save via after_run, restore via before_run, using FileSessionStore.""" + store = FileSessionStore(tmp_path / 'sessions') + cap = SessionPersistence(store=store, session_id='file-session') + + # Simulate first run + run1_messages: list[ModelMessage] = [_user('hello'), _assistant('hi there')] + result = _make_result(run1_messages, output='hi there') + ctx1 = _make_ctx() + await cap.after_run(ctx1, result=result) + + # Simulate new process: create fresh store instance pointing at same dir + store2 = FileSessionStore(tmp_path / 'sessions') + cap2 = SessionPersistence(store=store2, session_id='file-session') + ctx2 = _make_ctx(messages=[_user('new prompt')]) + await cap2.before_run(ctx2) + assert len(ctx2.messages) == 3 + first_part = ctx2.messages[0].parts[0] + assert isinstance(first_part, UserPromptPart) + assert first_part.content == 'hello'