Skip to content

Commit c3c6d45

Browse files
committed
fix stats
1 parent 92031f0 commit c3c6d45

File tree

1 file changed

+6
-7
lines changed

1 file changed

+6
-7
lines changed

src/inferencesh/models/llm.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -238,15 +238,14 @@ def update_from_chunk(self, chunk: Dict[str, Any], timing: Any) -> None:
238238
"""Update response state from a chunk."""
239239
# Update usage stats if present
240240
if "usage" in chunk:
241-
print(chunk["usage"])
242241
usage = chunk["usage"]
243242
if usage is not None:
244-
self.usage_stats = {
245-
"prompt_tokens": usage.get("prompt_tokens", 0),
246-
"completion_tokens": usage.get("completion_tokens", 0),
247-
"total_tokens": usage.get("total_tokens", 0),
248-
"stop_reason": self.usage_stats["stop_reason"] # Preserve existing stop reason
249-
}
243+
# Update usage stats preserving existing values if not provided
244+
self.usage_stats.update({
245+
"prompt_tokens": usage.get("prompt_tokens", self.usage_stats["prompt_tokens"]),
246+
"completion_tokens": usage.get("completion_tokens", self.usage_stats["completion_tokens"]),
247+
"total_tokens": usage.get("total_tokens", self.usage_stats["total_tokens"])
248+
})
250249

251250
# Get the delta from the chunk
252251
delta = chunk.get("choices", [{}])[0]

0 commit comments

Comments
 (0)