From fb817a7d56c2a14197bfddb72db4bfc2cebda6a0 Mon Sep 17 00:00:00 2001 From: better629 Date: Mon, 4 Nov 2024 14:16:56 +0800 Subject: [PATCH] update aws claude token usage --- metagpt/utils/token_counter.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/metagpt/utils/token_counter.py b/metagpt/utils/token_counter.py index 3b95335716..3bff53e34a 100644 --- a/metagpt/utils/token_counter.py +++ b/metagpt/utils/token_counter.py @@ -67,6 +67,8 @@ "claude-2.0": {"prompt": 0.008, "completion": 0.024}, "claude-2.1": {"prompt": 0.008, "completion": 0.024}, "claude-3-sonnet-20240229": {"prompt": 0.003, "completion": 0.015}, + "claude-3-5-sonnet": {"prompt": 0.003, "completion": 0.015}, + "claude-3-5-sonnet-v2": {"prompt": 0.003, "completion": 0.015}, # alias of newer 3.5 sonnet "claude-3-5-sonnet-20240620": {"prompt": 0.003, "completion": 0.015}, "claude-3-opus-20240229": {"prompt": 0.015, "completion": 0.075}, "claude-3-haiku-20240307": {"prompt": 0.00025, "completion": 0.00125}, @@ -379,8 +381,12 @@ def count_input_tokens(messages, model="gpt-3.5-turbo-0125"): """Return the number of tokens used by a list of messages.""" if "claude" in model: + # rough estimation for models newer than claude-2.1 vo = anthropic.Client() - num_tokens = vo.count_tokens(str(messages)) + num_tokens = 0 + for message in messages: + for key, value in message.items(): + num_tokens += vo.count_tokens(str(value)) return num_tokens try: encoding = tiktoken.encoding_for_model(model)