11import importlib
2- import openai
3- from enum import Enum
42import logging
3+ from enum import Enum
54from typing import Annotated , List , Literal , Optional , Union
65
6+ import openai
77from asgiref .sync import async_to_sync
8+ from openai import OpenAI
89from pydantic import BaseModel , Field , confloat , conint
910
10- from llmstack .common .blocks .llm .openai import OpenAIChatCompletionsAPIProcessorConfiguration
11- from llmstack .processors .providers .api_processor_interface import ApiProcessorInterface , ApiProcessorSchema
12- from llmstack .processors .providers .api_processor_interface import ApiProcessorInterface , ApiProcessorSchema
11+ from llmstack .common .blocks .llm .openai import (
12+ OpenAIChatCompletionsAPIProcessorConfiguration ,
13+ )
14+ from llmstack .processors .providers .api_processor_interface import (
15+ ApiProcessorInterface ,
16+ ApiProcessorSchema ,
17+ )
1318
1419logger = logging .getLogger (__name__ )
1520
21+
1622class Role (str , Enum ):
1723 SYSTEM = 'system'
1824 USER = 'user'
@@ -21,42 +27,53 @@ class Role(str, Enum):
2127 def __str__ (self ):
2228 return self .value
2329
30+
2431class ChatCompletionsVisionModel (str , Enum ):
2532 GPT_4_Vision = 'gpt-4-vision-preview'
2633
2734 def __str__ (self ):
2835 return self .value
29-
36+
37+
3038class TextMessage (BaseModel ):
3139 type : Literal ["text" ]
32-
40+
3341 text : str = Field (
3442 default = '' , description = 'The message text.' )
35-
43+
44+
3645class UrlImageMessage (BaseModel ):
3746 type : Literal ["image_url" ]
38-
47+
3948 image_url : str = Field (
4049 default = '' , description = 'The image data URI.' )
4150
42- Message = Annotated [Union [TextMessage , UrlImageMessage ], Field (discriminator = 'type' )]
51+
52+ Message = Annotated [Union [TextMessage , UrlImageMessage ],
53+ Field (discriminator = 'type' )]
54+
55+
4356class ChatMessage (ApiProcessorSchema ):
4457 role : Optional [Role ] = Field (
4558 default = Role .USER , description = "The role of the message sender. Can be 'user' or 'assistant' or 'system'." ,
4659 )
47- content : List [Union [TextMessage , UrlImageMessage ]] = Field (default = [], description = 'The message text.' )
48-
60+ content : List [Union [TextMessage , UrlImageMessage ]] = Field (
61+ default = [], description = 'The message text.' )
62+
63+
4964class ChatCompletionsVisionInput (ApiProcessorSchema ):
5065 system_message : Optional [str ] = Field (
5166 default = '' , description = 'A message from the system, which will be prepended to the chat history.' , widget = 'textarea' ,
5267 )
5368 messages : List [Message ] = Field (
5469 default = [], description = 'A list of messages, each with a role and message text.'
5570 )
56-
71+
72+
5773class ChatCompletionsVisionOutput (ApiProcessorSchema ):
5874 result : str = Field (default = '' , description = 'The model-generated message.' )
59-
75+
76+
6077class ChatCompletionsVisionConfiguration (OpenAIChatCompletionsAPIProcessorConfiguration , ApiProcessorSchema ):
6178 model : ChatCompletionsVisionModel = Field (
6279 default = ChatCompletionsVisionModel .GPT_4_Vision ,
@@ -83,6 +100,7 @@ class ChatCompletionsVisionConfiguration(OpenAIChatCompletionsAPIProcessorConfig
83100 default = False , description = "Automatically prune chat history. This is only applicable if 'retain_history' is set to 'true'." ,
84101 )
85102
103+
86104class ChatCompletionsVision (ApiProcessorInterface [ChatCompletionsVisionInput , ChatCompletionsVisionOutput , ChatCompletionsVisionConfiguration ]):
87105 """
88106 OpenAI Chat Completions with vision API
@@ -114,32 +132,32 @@ def session_data_to_persist(self) -> dict:
114132 def process (self ) -> dict :
115133 importlib .reload (openai )
116134 output_stream = self ._output_stream
117-
135+
118136 chat_history = self ._chat_history if self ._config .retain_history else []
119137 messages = []
120- messages .append ({'role' : 'system' , 'content' : self ._input .system_message })
121-
138+ messages .append (
139+ {'role' : 'system' , 'content' : self ._input .system_message })
140+
122141 for msg in chat_history :
123142 messages .append (msg )
124-
125- messages .append ({'role' : 'user' , 'content' : [msg . dict () for msg in self . _input . messages ]})
126-
127- openai . api_key = self . _env [ 'openai_api_key' ]
128- result = openai . chat . completions . create (
129- model = self . _config . model ,
130- messages = messages ,
131- temperature = self . _config . temperature ,
132- stream = True ,
133- )
134-
135-
143+
144+ messages .append ({'role' : 'user' , 'content' : [
145+ msg . dict () for msg in self . _input . messages ]})
146+
147+ openai_client = OpenAI ( api_key = self . _env [ 'openai_api_key' ])
148+ result = openai_client . chat . completions . create (
149+ model = self . _config . model ,
150+ messages = messages ,
151+ temperature = self . _config . temperature ,
152+ stream = True ,
153+ )
154+
136155 for data in result :
137- if data .get ( ' object' ) and data . get ( 'object' ) == 'chat.completion.chunk' and data . get ( 'choices' ) and len (data .get ( ' choices' )) > 0 and data [ ' choices' ] [0 ].get ( ' delta' ) and data [ ' choices' ] [0 ][ ' delta' ]. get ( ' content' ) :
156+ if data .object == 'chat.completion.chunk' and len (data .choices ) > 0 and data . choices [0 ].delta and data . choices [0 ]. delta . content :
138157 async_to_sync (output_stream .write )(
139158 ChatCompletionsVisionOutput (
140- result = data [ ' choices' ] [0 ][ ' delta' ][ ' content' ]
159+ result = data . choices [0 ]. delta . content
141160 ))
142-
143161
144162 output = self ._output_stream .finalize ()
145163
0 commit comments