Skip to content
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 11 additions & 7 deletions src/agents/memory/openai_conversations_session.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from __future__ import annotations

from typing import Any, cast

from openai import AsyncOpenAI

from agents.models._openai_shared import get_default_openai_client
Expand All @@ -12,8 +14,9 @@ async def start_openai_conversations_session(openai_client: AsyncOpenAI | None =
_maybe_openai_client = openai_client
if openai_client is None:
_maybe_openai_client = get_default_openai_client() or AsyncOpenAI()
# this never be None here
_openai_client: AsyncOpenAI = _maybe_openai_client # type: ignore [assignment]
# ensure non-None for type checkers and readers
assert _maybe_openai_client is not None
_openai_client: AsyncOpenAI = _maybe_openai_client

response = await _openai_client.conversations.create(items=[])
return response.id
Expand Down Expand Up @@ -43,27 +46,28 @@ async def _clear_session_id(self) -> None:

async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]:
session_id = await self._get_session_id()
all_items = []
all_items: list[dict[str, Any]] = []
if limit is None:
async for item in self._openai_client.conversations.items.list(
conversation_id=session_id,
order="asc",
):
# calling model_dump() to make this serializable
# model_dump for serialization; shape matches TResponseInputItem at runtime
all_items.append(item.model_dump(exclude_unset=True))
else:
async for item in self._openai_client.conversations.items.list(
conversation_id=session_id,
limit=limit,
order="desc",
):
# calling model_dump() to make this serializable
# model_dump for serialization; shape matches TResponseInputItem at runtime
all_items.append(item.model_dump(exclude_unset=True))
if limit is not None and len(all_items) >= limit:
break
all_items.reverse()

return all_items # type: ignore
# The Conversations API guarantees this shape; narrow once for type checkers
return cast(list[TResponseInputItem], all_items)

async def add_items(self, items: list[TResponseInputItem]) -> None:
session_id = await self._get_session_id()
Expand All @@ -77,7 +81,7 @@ async def pop_item(self) -> TResponseInputItem | None:
items = await self.get_items(limit=1)
if not items:
return None
item_id: str = str(items[0]["id"]) # type: ignore [typeddict-item]
item_id: str = str(items[0]["id"]) # type: ignore[typeddict-item]
await self._openai_client.conversations.items.delete(
conversation_id=session_id, item_id=item_id
)
Expand Down