1414from pydantic import BaseModel
1515from typing_extensions import Unpack , override
1616
17- from ..types .content import ContentBlock , Messages
17+ from ..types .content import ContentBlock , Messages , SystemContentBlock
1818from ..types .exceptions import ContextWindowOverflowException , ModelThrottledException
1919from ..types .streaming import StreamEvent
2020from ..types .tools import ToolChoice , ToolResult , ToolSpec , ToolUse
@@ -89,11 +89,12 @@ def get_config(self) -> OpenAIConfig:
8989 return cast (OpenAIModel .OpenAIConfig , self .config )
9090
9191 @classmethod
92- def format_request_message_content (cls , content : ContentBlock ) -> dict [str , Any ]:
92+ def format_request_message_content (cls , content : ContentBlock , ** kwargs : Any ) -> dict [str , Any ]:
9393 """Format an OpenAI compatible content block.
9494
9595 Args:
9696 content: Message content.
97+ **kwargs: Additional keyword arguments for future extensibility.
9798
9899 Returns:
99100 OpenAI compatible content block.
@@ -131,11 +132,12 @@ def format_request_message_content(cls, content: ContentBlock) -> dict[str, Any]
131132 raise TypeError (f"content_type=<{ next (iter (content ))} > | unsupported type" )
132133
133134 @classmethod
134- def format_request_message_tool_call (cls , tool_use : ToolUse ) -> dict [str , Any ]:
135+ def format_request_message_tool_call (cls , tool_use : ToolUse , ** kwargs : Any ) -> dict [str , Any ]:
135136 """Format an OpenAI compatible tool call.
136137
137138 Args:
138139 tool_use: Tool use requested by the model.
140+ **kwargs: Additional keyword arguments for future extensibility.
139141
140142 Returns:
141143 OpenAI compatible tool call.
@@ -150,11 +152,12 @@ def format_request_message_tool_call(cls, tool_use: ToolUse) -> dict[str, Any]:
150152 }
151153
152154 @classmethod
153- def format_request_tool_message (cls , tool_result : ToolResult ) -> dict [str , Any ]:
155+ def format_request_tool_message (cls , tool_result : ToolResult , ** kwargs : Any ) -> dict [str , Any ]:
154156 """Format an OpenAI compatible tool message.
155157
156158 Args:
157159 tool_result: Tool result collected from a tool execution.
160+ **kwargs: Additional keyword arguments for future extensibility.
158161
159162 Returns:
160163 OpenAI compatible tool message.
@@ -198,18 +201,46 @@ def _format_request_tool_choice(cls, tool_choice: ToolChoice | None) -> dict[str
198201 return {"tool_choice" : "auto" }
199202
200203 @classmethod
201- def format_request_messages (cls , messages : Messages , system_prompt : Optional [str ] = None ) -> list [dict [str , Any ]]:
202- """Format an OpenAI compatible messages array.
204+ def _format_system_messages (
205+ cls ,
206+ system_prompt : Optional [str ] = None ,
207+ * ,
208+ system_prompt_content : Optional [list [SystemContentBlock ]] = None ,
209+ ** kwargs : Any ,
210+ ) -> list [dict [str , Any ]]:
211+ """Format system messages for OpenAI-compatible providers.
203212
204213 Args:
205- messages: List of message objects to be processed by the model.
206214 system_prompt: System prompt to provide context to the model.
215+ system_prompt_content: System prompt content blocks to provide context to the model.
216+ **kwargs: Additional keyword arguments for future extensibility.
207217
208218 Returns:
209- An OpenAI compatible messages array.
219+ List of formatted system messages.
220+ """
221+ # Handle backward compatibility: if system_prompt is provided but system_prompt_content is None
222+ if system_prompt and system_prompt_content is None :
223+ system_prompt_content = [{"text" : system_prompt }]
224+
225+ # TODO: Handle caching blocks https://github.com/strands-agents/sdk-python/issues/1140
226+ return [
227+ {"role" : "system" , "content" : content ["text" ]}
228+ for content in system_prompt_content or []
229+ if "text" in content
230+ ]
231+
232+ @classmethod
233+ def _format_regular_messages (cls , messages : Messages , ** kwargs : Any ) -> list [dict [str , Any ]]:
234+ """Format regular messages for OpenAI-compatible providers.
235+
236+ Args:
237+ messages: List of message objects to be processed by the model.
238+ **kwargs: Additional keyword arguments for future extensibility.
239+
240+ Returns:
241+ List of formatted messages.
210242 """
211- formatted_messages : list [dict [str , Any ]]
212- formatted_messages = [{"role" : "system" , "content" : system_prompt }] if system_prompt else []
243+ formatted_messages = []
213244
214245 for message in messages :
215246 contents = message ["content" ]
@@ -242,14 +273,42 @@ def format_request_messages(cls, messages: Messages, system_prompt: Optional[str
242273 formatted_messages .append (formatted_message )
243274 formatted_messages .extend (formatted_tool_messages )
244275
276+ return formatted_messages
277+
278+ @classmethod
279+ def format_request_messages (
280+ cls ,
281+ messages : Messages ,
282+ system_prompt : Optional [str ] = None ,
283+ * ,
284+ system_prompt_content : Optional [list [SystemContentBlock ]] = None ,
285+ ** kwargs : Any ,
286+ ) -> list [dict [str , Any ]]:
287+ """Format an OpenAI compatible messages array.
288+
289+ Args:
290+ messages: List of message objects to be processed by the model.
291+ system_prompt: System prompt to provide context to the model.
292+ system_prompt_content: System prompt content blocks to provide context to the model.
293+ **kwargs: Additional keyword arguments for future extensibility.
294+
295+ Returns:
296+ An OpenAI compatible messages array.
297+ """
298+ formatted_messages = cls ._format_system_messages (system_prompt , system_prompt_content = system_prompt_content )
299+ formatted_messages .extend (cls ._format_regular_messages (messages ))
300+
245301 return [message for message in formatted_messages if message ["content" ] or "tool_calls" in message ]
246302
247303 def format_request (
248304 self ,
249305 messages : Messages ,
250- tool_specs : Optional [ list [ToolSpec ]] = None ,
251- system_prompt : Optional [ str ] = None ,
306+ tool_specs : list [ToolSpec ] | None = None ,
307+ system_prompt : str | None = None ,
252308 tool_choice : ToolChoice | None = None ,
309+ * ,
310+ system_prompt_content : list [SystemContentBlock ] | None = None ,
311+ ** kwargs : Any ,
253312 ) -> dict [str , Any ]:
254313 """Format an OpenAI compatible chat streaming request.
255314
@@ -258,6 +317,8 @@ def format_request(
258317 tool_specs: List of tool specifications to make available to the model.
259318 system_prompt: System prompt to provide context to the model.
260319 tool_choice: Selection strategy for tool invocation.
320+ system_prompt_content: System prompt content blocks to provide context to the model.
321+ **kwargs: Additional keyword arguments for future extensibility.
261322
262323 Returns:
263324 An OpenAI compatible chat streaming request.
@@ -267,7 +328,9 @@ def format_request(
267328 format.
268329 """
269330 return {
270- "messages" : self .format_request_messages (messages , system_prompt ),
331+ "messages" : self .format_request_messages (
332+ messages , system_prompt , system_prompt_content = system_prompt_content
333+ ),
271334 "model" : self .config ["model_id" ],
272335 "stream" : True ,
273336 "stream_options" : {"include_usage" : True },
@@ -286,11 +349,12 @@ def format_request(
286349 ** cast (dict [str , Any ], self .config .get ("params" , {})),
287350 }
288351
289- def format_chunk (self , event : dict [str , Any ]) -> StreamEvent :
352+ def format_chunk (self , event : dict [str , Any ], ** kwargs : Any ) -> StreamEvent :
290353 """Format an OpenAI response event into a standardized message chunk.
291354
292355 Args:
293356 event: A response event from the OpenAI compatible model.
357+ **kwargs: Additional keyword arguments for future extensibility.
294358
295359 Returns:
296360 The formatted chunk.
0 commit comments