feat: Log LLM response token usage (prompt/completion/total, content_len, finish_reason)
This commit is contained in:
parent
17347da87e
commit
0b0ca598b4
1 changed files with 14 additions and 0 deletions
|
|
@ -140,6 +140,13 @@ class LLMClient:
|
|||
**kwargs,
|
||||
)
|
||||
raw = response.choices[0].message.content or ""
|
||||
usage = getattr(response, "usage", None)
|
||||
if usage:
|
||||
logger.info(
|
||||
"LLM response: prompt_tokens=%s, completion_tokens=%s, total=%s, content_len=%d, finish=%s",
|
||||
usage.prompt_tokens, usage.completion_tokens, usage.total_tokens,
|
||||
len(raw), response.choices[0].finish_reason,
|
||||
)
|
||||
if modality == "thinking":
|
||||
raw = strip_think_tags(raw)
|
||||
return raw
|
||||
|
|
@ -161,6 +168,13 @@ class LLMClient:
|
|||
**kwargs,
|
||||
)
|
||||
raw = response.choices[0].message.content or ""
|
||||
usage = getattr(response, "usage", None)
|
||||
if usage:
|
||||
logger.info(
|
||||
"LLM response (fallback): prompt_tokens=%s, completion_tokens=%s, total=%s, content_len=%d, finish=%s",
|
||||
usage.prompt_tokens, usage.completion_tokens, usage.total_tokens,
|
||||
len(raw), response.choices[0].finish_reason,
|
||||
)
|
||||
if modality == "thinking":
|
||||
raw = strip_think_tags(raw)
|
||||
return raw
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue