Skip to content

AsyncSimpleOpenai

Async Simple OpenAI API wrapper

The is the async version of the Simple OpenAI API wrapper which uses the aiohttp library.

It is intended for use with asyncio applications. If you are not using asyncio, you should use the Simple OpenAI API wrapper instead.

AsyncSimpleOpenai

Async Simple OpenAI API wrapper

This class implements the Async Simple OpenAI API wrapper.

To use this class, you need to have an OpenAI API key. You can get one from Openai.

An optional storage path can be provided. If a storage path is provided, the chat messages will be stored in the directory specified by the storage path. If no storage path is provided, the chat messages will not be stored.

Parameters:

Name Type Description Default
api_key str

Your OpenAI API key

required
system_message str

The system message to add to the start of the chat

required
storage_path Path

The path to the storage directory. Defaults to None.

None
timezone str

The timezone to use for the chat messages. Defaults to 'UTC'.

'UTC'

Example

from simple_openai import AsyncSimpleOpenai
import asyncio

async def main():
    # Get the storage path
    storage_path = Path("/path/to/storage")

    # Create a system message
    system_message = "You are a helpful chatbot. You are very friendly and helpful. You are a good friend to have."

    # Create the client
    client = AsyncSimpleOpenai(api_key, system_message, storage_path)

    # Create tasks for the chat response and the image response
    tasks = [
        client.get_chat_response("Hello, how are you?", name="Bob", chat_id="Group 1"),
        client.get_image_url("A cat"),
    ]

    # Wait for the tasks to complete
    for task in asyncio.as_completed(tasks):
        # Get the result
        result = await task

        # Print the result
        if result.success:
            # Print the message
            print(f'Success: {result.message}')
        else:
            # Print the error
            print(f'Error: {result.message}')

if __name__ == "__main__":
    # Run the main function
    asyncio.run(main())
Source code in src/simple_openai/async_simple_openai.py
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
class AsyncSimpleOpenai:
    """Async Simple OpenAI API wrapper

    This class implements the Async Simple OpenAI API wrapper.

    To use this class, you need to have an OpenAI API key. You can get one from [Openai](https://platform.openai.com).

    An optional storage path can be provided.  If a storage path is provided, the chat messages will be stored in the directory specified by the storage path.  If no storage path is provided, the chat messages will not be stored.

    Args:
        api_key (str): Your OpenAI API key
        system_message (str): The system message to add to the start of the chat
        storage_path (Path, optional): The path to the storage directory. Defaults to None.
        timezone (str, optional): The timezone to use for the chat messages. Defaults to 'UTC'.

    !!!Example
        ```python
        from simple_openai import AsyncSimpleOpenai
        import asyncio

        async def main():
            # Get the storage path
            storage_path = Path("/path/to/storage")

            # Create a system message
            system_message = "You are a helpful chatbot. You are very friendly and helpful. You are a good friend to have."

            # Create the client
            client = AsyncSimpleOpenai(api_key, system_message, storage_path)

            # Create tasks for the chat response and the image response
            tasks = [
                client.get_chat_response("Hello, how are you?", name="Bob", chat_id="Group 1"),
                client.get_image_url("A cat"),
            ]

            # Wait for the tasks to complete
            for task in asyncio.as_completed(tasks):
                # Get the result
                result = await task

                # Print the result
                if result.success:
                    # Print the message
                    print(f'Success: {result.message}')
                else:
                    # Print the error
                    print(f'Error: {result.message}')

        if __name__ == "__main__":
            # Run the main function
            asyncio.run(main())
        ```
    """

    def __init__(
        self,
        api_key: str,
        system_message: str,
        storage_path: Path | None = None,
        timezone: str = "UTC",
    ) -> None:
        self._headers = {
            "Content-Type": "application/json",
            "Authorization": f"Bearer {api_key}",
        }

        # Create the chat manager
        self._chat = chat_manager.ChatManager(
            system_message, storage_path=storage_path, timezone=timezone
        )

        # Create the tool manager
        self._tool_manager = tool_manager.ToolManager()

    def update_system_message(self, system_message: str) -> None:
        """Update the system message

        Args:
            system_message (str): The new system message
        """
        self._chat.update_system_message(system_message)

    def add_tool(
        self, tool_definition: open_ai_models.OpenAITool, function: Callable
    ) -> None:
        """Add a tool to the tool manager

        Args:
            tool_definition (open_ai_models.OpenAITool): The tool definition
            function (Callable): The function to call
        """
        self._tool_manager.add_tool(tool_definition, function)

    async def get_chat_response(
        self,
        prompt: str,
        name: str,
        chat_id: str = constants.DEFAULT_CHAT_ID,
        add_date_time: bool = False,
    ) -> SimpleOpenaiResponse:
        """Get a chat response from OpenAI

        An optional chat ID can be provided.  If a chat ID is provided, the chat will be continued from the chat with the specified ID.  If no chat ID is provided, all messages will be mixed into a single list.

        Args:
            prompt (str): The prompt to use for the chat response
            name (str): The name of the user
            chat_id (str, optional): The ID of the chat to continue. Defaults to DEFAULT_CHAT_ID.
            add_date_time (bool, optional): Whether to add the date and time to the message. Defaults to False.

        Returns:
            SimpleOpenaiResponse: The chat response, the value of `success` should be checked before using the value of `message`

        """
        # Add the message to the chat
        messages = self._chat.add_message(
            open_ai_models.ChatMessage(role="user", content=prompt, name=name),
            chat_id=chat_id,
            add_date_time=add_date_time,
        ).messages

        # Create the request body
        request_body = open_ai_models.ChatRequest(
            messages=messages,
            tools=self._tool_manager.get_json_tool_list(),
            tool_choice="auto",
        )

        # Delete the tools from the request body if there are no tools
        if request_body.tools is None:
            del request_body.tools
            del request_body.tool_choice

        # Open a session
        async with aiohttp.ClientSession(
            headers=self._headers, base_url=constants.BASE_URL
        ) as session:
            # Send the request
            async with session.post(
                constants.CHAT_URL, json=request_body.model_dump()
            ) as response1:
                # Check the status code
                if response1.status == 200:
                    # Get the response content
                    response_text = await response1.text()

                    # Parse the response body
                    response_body = open_ai_models.ChatResponse.model_validate_json(
                        response_text
                    )

                    # Check if a function was called
                    if (
                        response_body.choices[0].finish_reason
                        == constants.OPEN_AI_TOOL_CALLS
                        and response_body.choices[0].message.tool_calls is not None
                    ):
                        # Call the function
                        new_prompt = await self._tool_manager.async_call_function(
                            response_body.choices[0].message.tool_calls[0].function.name,
                            **json.loads(response_body.choices[0].message.tool_calls[0].function.arguments),
                        )

                        # Add the response to the chat
                        self._chat.add_message(
                            open_ai_models.ChatMessage(
                                role="assistant",
                                content=response_body.choices[0]
                                .message.tool_calls[0]
                                .function.model_dump_json(),
                                name="Botto",
                            ),
                            chat_id=chat_id,
                            add_date_time=add_date_time,
                        )

                        # Add the message to the chat
                        messages = self._chat.add_message(
                            open_ai_models.ChatMessage(
                                role="function", content=new_prompt, name="Botto"
                            ),
                            chat_id=chat_id,
                            add_date_time=add_date_time,
                        ).messages

                        # Create the request body
                        request_body = open_ai_models.ChatRequest(
                            messages=messages,
                            tools=self._tool_manager.get_json_tool_list(),
                            tool_choice="none",
                        )

                        # Send the request
                        async with session.post(
                            constants.CHAT_URL, json=request_body.model_dump()
                        ) as response2:
                            # Check the status code
                            if response2.status == 200:
                                # Get the response content
                                response_text = await response2.text()

                                # Parse the response body
                                response_body = (
                                    open_ai_models.ChatResponse.model_validate_json(
                                        response_text
                                    )
                                )

                                # Create the response
                                if response_body.choices[0].message.content is not None:
                                    open_ai_response = SimpleOpenaiResponse(
                                        True, response_body.choices[0].message.content
                                    )
                                else:
                                    open_ai_response = SimpleOpenaiResponse(
                                        True, "No response"
                                    )

                                # Add the response to the chat
                                self._chat.add_message(
                                    open_ai_models.ChatMessage(
                                        role="assistant",
                                        content=open_ai_response.message,
                                        name="Botto",
                                    ),
                                    chat_id=chat_id,
                                    add_date_time=add_date_time,
                                )
                            else:
                                # Parse the error response body
                                response_body = (
                                    open_ai_models.ErrorResponse.model_validate_json(
                                        await response2.text()
                                    )
                                )

                                # Create the response
                                open_ai_response = SimpleOpenaiResponse(
                                    False, response_body.error.message
                                )
                    else:
                        # Create the response
                        if response_body.choices[0].message.content is not None:
                            open_ai_response = SimpleOpenaiResponse(
                                True, response_body.choices[0].message.content
                            )
                        else:
                            open_ai_response = SimpleOpenaiResponse(True, "No response")

                        # Add the response to the chat
                        self._chat.add_message(
                            open_ai_models.ChatMessage(
                                role="assistant",
                                content=open_ai_response.message,
                                name="Botto",
                            ),
                            chat_id=chat_id,
                            add_date_time=add_date_time,
                        )
                else:
                    # Parse the error response body
                    response_body = open_ai_models.ErrorResponse.model_validate_json(
                        await response1.text()
                    )

                    # Create the response
                    open_ai_response = SimpleOpenaiResponse(
                        False, response_body.error.message
                    )

                # Return the response
                return open_ai_response

    async def get_image_url(
        self, prompt: str, style: str = "vivid"
    ) -> SimpleOpenaiResponse:
        """Get an image response from OpenAI

        Args:
            prompt (str): The prompt to use
            style (str, optional): The style of the image. Defaults to 'vivid'.

        Returns:
            SimpleOpenaiResponse: The image response, the value of `success` should be checked before using the value of `message`
        """

        # Create the request body
        request_body = open_ai_models.ImageRequest(prompt=prompt, style=style)

        # Open a session
        async with aiohttp.ClientSession(
            headers=self._headers, base_url=constants.BASE_URL
        ) as session:
            # Send the request
            async with session.post(
                constants.IMAGE_URL, json=request_body.model_dump()
            ) as response:
                # Check the status code
                if response.status == 200:
                    # Parse the response body
                    response_body = open_ai_models.ImageResponse.model_validate_json(
                        await response.text()
                    )

                    # Create the response
                    response = SimpleOpenaiResponse(True, response_body.data[0].url)
                else:
                    # Parse the error response body
                    response_body = open_ai_models.ErrorResponse.model_validate_json(
                        await response.text()
                    )

                    # Create the response
                    response = SimpleOpenaiResponse(False, response_body.error.message)

                # Return the response
                return response

    def get_chat_history(self, chat_id: str) -> str:
        """Get the chat history

        Args:
            chat_id (str): The ID of the chat

        Returns:
            str: The chat history
        """
        # Get the chat history
        chat_history = self._chat.get_chat(chat_id)

        # Return the chat history
        return chat_history

    def get_truncated_chat_history(self, chat_id: str) -> str:
        """Get the truncated chat history, limited to the last 4,000 characters

        Args:
            chat_id (str): The ID of the chat

        Returns:
            str: The truncated chat history
        """
        # Get the chat history
        chat_history = self._chat.get_truncated_chat(chat_id)

        # Return the chat history
        return chat_history

add_tool(tool_definition, function)

Add a tool to the tool manager

Parameters:

Name Type Description Default
tool_definition OpenAITool

The tool definition

required
function Callable

The function to call

required
Source code in src/simple_openai/async_simple_openai.py
104
105
106
107
108
109
110
111
112
113
def add_tool(
    self, tool_definition: open_ai_models.OpenAITool, function: Callable
) -> None:
    """Add a tool to the tool manager

    Args:
        tool_definition (open_ai_models.OpenAITool): The tool definition
        function (Callable): The function to call
    """
    self._tool_manager.add_tool(tool_definition, function)

get_chat_history(chat_id)

Get the chat history

Parameters:

Name Type Description Default
chat_id str

The ID of the chat

required

Returns:

Name Type Description
str str

The chat history

Source code in src/simple_openai/async_simple_openai.py
340
341
342
343
344
345
346
347
348
349
350
351
352
353
def get_chat_history(self, chat_id: str) -> str:
    """Get the chat history

    Args:
        chat_id (str): The ID of the chat

    Returns:
        str: The chat history
    """
    # Get the chat history
    chat_history = self._chat.get_chat(chat_id)

    # Return the chat history
    return chat_history

get_chat_response(prompt, name, chat_id=constants.DEFAULT_CHAT_ID, add_date_time=False) async

Get a chat response from OpenAI

An optional chat ID can be provided. If a chat ID is provided, the chat will be continued from the chat with the specified ID. If no chat ID is provided, all messages will be mixed into a single list.

Parameters:

Name Type Description Default
prompt str

The prompt to use for the chat response

required
name str

The name of the user

required
chat_id str

The ID of the chat to continue. Defaults to DEFAULT_CHAT_ID.

DEFAULT_CHAT_ID
add_date_time bool

Whether to add the date and time to the message. Defaults to False.

False

Returns:

Name Type Description
SimpleOpenaiResponse SimpleOpenaiResponse

The chat response, the value of success should be checked before using the value of message

Source code in src/simple_openai/async_simple_openai.py
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
async def get_chat_response(
    self,
    prompt: str,
    name: str,
    chat_id: str = constants.DEFAULT_CHAT_ID,
    add_date_time: bool = False,
) -> SimpleOpenaiResponse:
    """Get a chat response from OpenAI

    An optional chat ID can be provided.  If a chat ID is provided, the chat will be continued from the chat with the specified ID.  If no chat ID is provided, all messages will be mixed into a single list.

    Args:
        prompt (str): The prompt to use for the chat response
        name (str): The name of the user
        chat_id (str, optional): The ID of the chat to continue. Defaults to DEFAULT_CHAT_ID.
        add_date_time (bool, optional): Whether to add the date and time to the message. Defaults to False.

    Returns:
        SimpleOpenaiResponse: The chat response, the value of `success` should be checked before using the value of `message`

    """
    # Add the message to the chat
    messages = self._chat.add_message(
        open_ai_models.ChatMessage(role="user", content=prompt, name=name),
        chat_id=chat_id,
        add_date_time=add_date_time,
    ).messages

    # Create the request body
    request_body = open_ai_models.ChatRequest(
        messages=messages,
        tools=self._tool_manager.get_json_tool_list(),
        tool_choice="auto",
    )

    # Delete the tools from the request body if there are no tools
    if request_body.tools is None:
        del request_body.tools
        del request_body.tool_choice

    # Open a session
    async with aiohttp.ClientSession(
        headers=self._headers, base_url=constants.BASE_URL
    ) as session:
        # Send the request
        async with session.post(
            constants.CHAT_URL, json=request_body.model_dump()
        ) as response1:
            # Check the status code
            if response1.status == 200:
                # Get the response content
                response_text = await response1.text()

                # Parse the response body
                response_body = open_ai_models.ChatResponse.model_validate_json(
                    response_text
                )

                # Check if a function was called
                if (
                    response_body.choices[0].finish_reason
                    == constants.OPEN_AI_TOOL_CALLS
                    and response_body.choices[0].message.tool_calls is not None
                ):
                    # Call the function
                    new_prompt = await self._tool_manager.async_call_function(
                        response_body.choices[0].message.tool_calls[0].function.name,
                        **json.loads(response_body.choices[0].message.tool_calls[0].function.arguments),
                    )

                    # Add the response to the chat
                    self._chat.add_message(
                        open_ai_models.ChatMessage(
                            role="assistant",
                            content=response_body.choices[0]
                            .message.tool_calls[0]
                            .function.model_dump_json(),
                            name="Botto",
                        ),
                        chat_id=chat_id,
                        add_date_time=add_date_time,
                    )

                    # Add the message to the chat
                    messages = self._chat.add_message(
                        open_ai_models.ChatMessage(
                            role="function", content=new_prompt, name="Botto"
                        ),
                        chat_id=chat_id,
                        add_date_time=add_date_time,
                    ).messages

                    # Create the request body
                    request_body = open_ai_models.ChatRequest(
                        messages=messages,
                        tools=self._tool_manager.get_json_tool_list(),
                        tool_choice="none",
                    )

                    # Send the request
                    async with session.post(
                        constants.CHAT_URL, json=request_body.model_dump()
                    ) as response2:
                        # Check the status code
                        if response2.status == 200:
                            # Get the response content
                            response_text = await response2.text()

                            # Parse the response body
                            response_body = (
                                open_ai_models.ChatResponse.model_validate_json(
                                    response_text
                                )
                            )

                            # Create the response
                            if response_body.choices[0].message.content is not None:
                                open_ai_response = SimpleOpenaiResponse(
                                    True, response_body.choices[0].message.content
                                )
                            else:
                                open_ai_response = SimpleOpenaiResponse(
                                    True, "No response"
                                )

                            # Add the response to the chat
                            self._chat.add_message(
                                open_ai_models.ChatMessage(
                                    role="assistant",
                                    content=open_ai_response.message,
                                    name="Botto",
                                ),
                                chat_id=chat_id,
                                add_date_time=add_date_time,
                            )
                        else:
                            # Parse the error response body
                            response_body = (
                                open_ai_models.ErrorResponse.model_validate_json(
                                    await response2.text()
                                )
                            )

                            # Create the response
                            open_ai_response = SimpleOpenaiResponse(
                                False, response_body.error.message
                            )
                else:
                    # Create the response
                    if response_body.choices[0].message.content is not None:
                        open_ai_response = SimpleOpenaiResponse(
                            True, response_body.choices[0].message.content
                        )
                    else:
                        open_ai_response = SimpleOpenaiResponse(True, "No response")

                    # Add the response to the chat
                    self._chat.add_message(
                        open_ai_models.ChatMessage(
                            role="assistant",
                            content=open_ai_response.message,
                            name="Botto",
                        ),
                        chat_id=chat_id,
                        add_date_time=add_date_time,
                    )
            else:
                # Parse the error response body
                response_body = open_ai_models.ErrorResponse.model_validate_json(
                    await response1.text()
                )

                # Create the response
                open_ai_response = SimpleOpenaiResponse(
                    False, response_body.error.message
                )

            # Return the response
            return open_ai_response

get_image_url(prompt, style='vivid') async

Get an image response from OpenAI

Parameters:

Name Type Description Default
prompt str

The prompt to use

required
style str

The style of the image. Defaults to 'vivid'.

'vivid'

Returns:

Name Type Description
SimpleOpenaiResponse SimpleOpenaiResponse

The image response, the value of success should be checked before using the value of message

Source code in src/simple_openai/async_simple_openai.py
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
async def get_image_url(
    self, prompt: str, style: str = "vivid"
) -> SimpleOpenaiResponse:
    """Get an image response from OpenAI

    Args:
        prompt (str): The prompt to use
        style (str, optional): The style of the image. Defaults to 'vivid'.

    Returns:
        SimpleOpenaiResponse: The image response, the value of `success` should be checked before using the value of `message`
    """

    # Create the request body
    request_body = open_ai_models.ImageRequest(prompt=prompt, style=style)

    # Open a session
    async with aiohttp.ClientSession(
        headers=self._headers, base_url=constants.BASE_URL
    ) as session:
        # Send the request
        async with session.post(
            constants.IMAGE_URL, json=request_body.model_dump()
        ) as response:
            # Check the status code
            if response.status == 200:
                # Parse the response body
                response_body = open_ai_models.ImageResponse.model_validate_json(
                    await response.text()
                )

                # Create the response
                response = SimpleOpenaiResponse(True, response_body.data[0].url)
            else:
                # Parse the error response body
                response_body = open_ai_models.ErrorResponse.model_validate_json(
                    await response.text()
                )

                # Create the response
                response = SimpleOpenaiResponse(False, response_body.error.message)

            # Return the response
            return response

get_truncated_chat_history(chat_id)

Get the truncated chat history, limited to the last 4,000 characters

Parameters:

Name Type Description Default
chat_id str

The ID of the chat

required

Returns:

Name Type Description
str str

The truncated chat history

Source code in src/simple_openai/async_simple_openai.py
355
356
357
358
359
360
361
362
363
364
365
366
367
368
def get_truncated_chat_history(self, chat_id: str) -> str:
    """Get the truncated chat history, limited to the last 4,000 characters

    Args:
        chat_id (str): The ID of the chat

    Returns:
        str: The truncated chat history
    """
    # Get the chat history
    chat_history = self._chat.get_truncated_chat(chat_id)

    # Return the chat history
    return chat_history

update_system_message(system_message)

Update the system message

Parameters:

Name Type Description Default
system_message str

The new system message

required
Source code in src/simple_openai/async_simple_openai.py
 96
 97
 98
 99
100
101
102
def update_system_message(self, system_message: str) -> None:
    """Update the system message

    Args:
        system_message (str): The new system message
    """
    self._chat.update_system_message(system_message)