Open tianshiyisi opened 2 months ago
目前还没有提供这个功能。需要修改oai.py,自行用 tokenizer 统计,大致思路:
# 以下的messages指_chat_no_stream的输入参数messages
# 以下的response指 response = self._chat_complete_create(model=self.model, messages=messages, stream=False, **generate_cfg)
from qwen_agent.utils.tokenization_qwen import tokenizer
from qwen_agent.utils.utils import build_text_completion_prompt
input_tokens = tokenizer.count_tokens(build_text_completion_prompt(messages))
output_tokens = tokenizer.count_tokens(response.choices[0].message.content)
收到,多谢!
有什么办法能统计function call调用过程中输入和输出的token数吗,试了一下修改oai.py代码,打印出的chunk里也没有相关信息
root@vllm:~/benchmark# python3 fcall.py 请输入查询内容(输入 'quit' 退出):你好 ChatCompletionChunk(id='chat-a55bee23999c4e37ae2bc50b1bf4b491', choices=[Choice(delta=ChoiceDelta(content='', function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1726676479, model='vllm-qwen2-plus', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None) ChatCompletionChunk(id='chat-a55bee23999c4e37ae2bc50b1bf4b491', choices=[Choice(delta=ChoiceDelta(content='', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1726676479, model='vllm-qwen2-plus', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None) ChatCompletionChunk(id='chat-a55bee23999c4e37ae2bc50b1bf4b491', choices=[Choice(delta=ChoiceDelta(content='', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1726676479, model='vllm-qwen2-plus', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None) ChatCompletionChunk(id='chat-a55bee23999c4e37ae2bc50b1bf4b491', choices=[Choice(delta=ChoiceDelta(content='', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1726676479, model='vllm-qwen2-plus', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None) ChatCompletionChunk(id='chat-a55bee23999c4e37ae2bc50b1bf4b491', choices=[Choice(delta=ChoiceDelta(content='你好!', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1726676479, model='vllm-qwen2-plus', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None) [{'role': 'assistant', 'content': '你好!'}] ChatCompletionChunk(id='chat-a55bee23999c4e37ae2bc50b1bf4b491', choices=[Choice(delta=ChoiceDelta(content='有什', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1726676479, model='vllm-qwen2-plus', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None) [{'role': 'assistant', 'content': '你好!有什'}] ChatCompletionChunk(id='chat-a55bee23999c4e37ae2bc50b1bf4b491', choices=[Choice(delta=ChoiceDelta(content='么', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1726676479, model='vllm-qwen2-plus', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None) [{'role': 'assistant', 'content': '你好!有什么'}] ChatCompletionChunk(id='chat-a55bee23999c4e37ae2bc50b1bf4b491', choices=[Choice(delta=ChoiceDelta(content='可以帮助你的?', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason='stop', index=0, logprobs=None, stop_reason=None)], created=1726676479, model='vllm-qwen2-plus', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None) [{'role': 'assistant', 'content': '你好!有什么可以帮助你的?'}]