|
1 | 1 | import OpenAI from "openai";
|
2 | 2 | import * as ToolManager from "../tools/toolManager.ts";
|
3 | 3 | import { extendError, type Result } from "../utils/result.ts";
|
4 |
| -import type { StopReason, Provider, ProviderMessage } from "./provider.ts"; |
| 4 | +import type { |
| 5 | + StopReason, |
| 6 | + Provider, |
| 7 | + ProviderMessage, |
| 8 | + Usage, |
| 9 | +} from "./provider.ts"; |
5 | 10 | import { assertUnreachable } from "../utils/assertUnreachable.ts";
|
6 | 11 | import type { ToolName, ToolRequestId } from "../tools/toolManager.ts";
|
7 | 12 | import type { Nvim } from "nvim-node";
|
@@ -46,6 +51,7 @@ export class OpenAIProvider implements Provider {
|
46 | 51 | ): Promise<{
|
47 | 52 | toolRequests: Result<ToolManager.ToolRequest, { rawRequest: unknown }>[];
|
48 | 53 | stopReason: StopReason;
|
| 54 | + usage: Usage; |
49 | 55 | }> {
|
50 | 56 | const openaiMessages: OpenAI.ChatCompletionMessageParam[] = [
|
51 | 57 | {
|
@@ -148,7 +154,9 @@ export class OpenAIProvider implements Provider {
|
148 | 154 |
|
149 | 155 | const toolRequests = [];
|
150 | 156 | let stopReason: StopReason | undefined;
|
| 157 | + let lastChunk: OpenAI.ChatCompletionChunk | undefined; |
151 | 158 | for await (const chunk of stream) {
|
| 159 | + lastChunk = chunk; |
152 | 160 | const choice = chunk.choices[0];
|
153 | 161 | if (choice.delta.content) {
|
154 | 162 | onText(choice.delta.content);
|
@@ -229,6 +237,15 @@ export class OpenAIProvider implements Provider {
|
229 | 237 | return extendError(result, { rawRequest: req });
|
230 | 238 | }),
|
231 | 239 | stopReason: stopReason || "end_turn",
|
| 240 | + usage: lastChunk?.usage |
| 241 | + ? { |
| 242 | + inputTokens: lastChunk.usage.prompt_tokens, |
| 243 | + outputTokens: lastChunk.usage.completion_tokens, |
| 244 | + } |
| 245 | + : { |
| 246 | + inputTokens: 0, |
| 247 | + outputTokens: 0, |
| 248 | + }, |
232 | 249 | };
|
233 | 250 | } finally {
|
234 | 251 | this.request = undefined;
|
|
0 commit comments