Skip to content

Commit 6632729

Browse files
Merge branch 'master' into agentic-streaming
2 parents 7121f98 + cd34063 commit 6632729

File tree

1 file changed

+11
-5
lines changed

1 file changed

+11
-5
lines changed

server/utils/AiProviders/openAi/index.js

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -168,11 +168,11 @@ class OpenAiLLM {
168168
return {
169169
textResponse: result.output.output_text,
170170
metrics: {
171-
prompt_tokens: usage.prompt_tokens || 0,
172-
completion_tokens: usage.completion_tokens || 0,
171+
prompt_tokens: usage.input_tokens || 0,
172+
completion_tokens: usage.output_tokens || 0,
173173
total_tokens: usage.total_tokens || 0,
174-
outputTps: usage.completion_tokens
175-
? usage.completion_tokens / result.duration
174+
outputTps: usage.output_tokens
175+
? usage.output_tokens / result.duration
176176
: 0,
177177
duration: result.duration,
178178
},
@@ -224,6 +224,7 @@ class OpenAiLLM {
224224
if (token) {
225225
fullText += token;
226226
if (!hasUsageMetrics) usage.completion_tokens++;
227+
227228
writeResponseChunk(response, {
228229
uuid,
229230
sources: [],
@@ -237,7 +238,12 @@ class OpenAiLLM {
237238
const { response: res } = chunk;
238239
if (res.hasOwnProperty("usage") && !!res.usage) {
239240
hasUsageMetrics = true;
240-
usage = { ...usage, ...res.usage };
241+
usage = {
242+
...usage,
243+
prompt_tokens: res.usage?.input_tokens || 0,
244+
completion_tokens: res.usage?.output_tokens || 0,
245+
total_tokens: res.usage?.total_tokens || 0,
246+
};
241247
}
242248

243249
writeResponseChunk(response, {

0 commit comments

Comments
 (0)