dev: generalize how OpenAIUtil handles usage

The current implementation would break for XAIService because it gets
cumulative usage reports as the stream goes on (whereas OpenAI and
DeepSeek only provide this at the end of the stream).
This commit is contained in:
KernelDeimos
2025-02-06 14:32:21 -05:00
parent 16e2729b1d
commit f7147ab312

View File

@@ -66,14 +66,12 @@ module.exports = class OpenAIUtil {
let mode = 'text';
const tool_call_blocks = [];
let last_usage = null;
for await ( const chunk of completion ) {
if ( chunk.usage ) {
usage_promise.resolve({
input_tokens: chunk.usage.prompt_tokens,
output_tokens: chunk.usage.completion_tokens,
});
continue;
if ( process.env.DEBUG ) {
console.log(`AI CHUNK`, chunk);
}
if ( chunk.usage ) last_usage = chunk.usage;
if ( chunk.choices.length < 1 ) continue;
const choice = chunk.choices[0];
@@ -108,6 +106,10 @@ module.exports = class OpenAIUtil {
}
}
}
usage_promise.resolve({
input_tokens: last_usage.prompt_tokens,
output_tokens: last_usage.completion_tokens,
});
if ( mode === 'text' ) textblock.end();
if ( mode === 'tool' ) toolblock.end();