diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props
index 576aefe5..dfb7f712 100644
--- a/src/Directory.Packages.props
+++ b/src/Directory.Packages.props
@@ -19,7 +19,7 @@
-
+
diff --git a/src/Providers/Google/src/GoogleChatModel.Tokens.cs b/src/Providers/Google/src/GoogleChatModel.Tokens.cs
new file mode 100644
index 00000000..18033fab
--- /dev/null
+++ b/src/Providers/Google/src/GoogleChatModel.Tokens.cs
@@ -0,0 +1,24 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using GenerativeAI.Types;
+
+namespace LangChain.Providers.Google
+{
+ public partial class GoogleChatModel
+ {
+ public async Task CountTokens(string text)
+ {
+ return await CountTokens(new Message[] { new Message(text, MessageRole.Human) }).ConfigureAwait(false);
+ }
+
+ public async Task CountTokens(IEnumerable messages)
+ {
+ var response = await this.Api.CountTokens(new CountTokensRequest() { Contents = messages.Select(ToRequestMessage).ToArray() }).ConfigureAwait(false);
+
+ return response.TotalTokens;
+ }
+ }
+}
diff --git a/src/Providers/Google/src/GoogleChatModel.cs b/src/Providers/Google/src/GoogleChatModel.cs
index da1d26a5..850baca5 100644
--- a/src/Providers/Google/src/GoogleChatModel.cs
+++ b/src/Providers/Google/src/GoogleChatModel.cs
@@ -127,11 +127,16 @@ public override async Task GenerateAsync(
settings,
Settings,
provider.ChatSettings);
+ var usage = Usage.Empty;
if (usedSettings.UseStreaming == true)
{
var message = await StreamCompletionAsync(messages, cancellationToken).ConfigureAwait(false);
messages.Add(message);
+ usage += Usage.Empty with
+ {
+ Time = watch.Elapsed
+ };
}
else
{
@@ -145,14 +150,14 @@ public override async Task GenerateAsync(
OnCompletedResponseGenerated(response.Text() ?? string.Empty);
- var usage2 = GetUsage(response) with
+ usage = GetUsage(response) with
{
Time = watch.Elapsed
};
//Add Usage
- AddUsage(usage2);
- provider.AddUsage(usage2);
+ AddUsage(usage);
+ provider.AddUsage(usage);
//Handle Function Call
while (ReplyToToolCallsAutomatically && response.IsFunctionCall())
@@ -185,28 +190,21 @@ public override async Task GenerateAsync(
messages.Add(message);
//Add Usage
- usage2 = GetUsage(response) with
+ var usage2 = GetUsage(response) with
{
Time = watch.Elapsed
};
AddUsage(usage2);
provider.AddUsage(usage2);
+ usage += usage2;
}
}
}
-
- //Add Usage
- var usage = Usage.Empty with
- {
- Time = watch.Elapsed
- };
- AddUsage(usage);
- provider.AddUsage(usage);
-
+
return new ChatResponse
{
Messages = messages,
- Usage = Usage,
+ Usage = usage,
UsedSettings = ChatSettings.Default
};
}
diff --git a/src/Providers/Google/src/Predefined/GeminiModels.cs b/src/Providers/Google/src/Predefined/GeminiModels.cs
index 0693e346..7dfa37fe 100644
--- a/src/Providers/Google/src/Predefined/GeminiModels.cs
+++ b/src/Providers/Google/src/Predefined/GeminiModels.cs
@@ -24,4 +24,4 @@ public class Gemini15FlashModel(GoogleProvider provider)
public class Gemini15ProModel(GoogleProvider provider)
: GoogleChatModel(
provider,
- GoogleAIModels.Gemini15Flash, 2 * 1024 * 1024, 3.5 * 0.000001, 10.50 * 0.000001, 7.0 * 0.000001, 21.00 * 0.000001);
+ GoogleAIModels.Gemini15Pro, 2 * 1024 * 1024, 3.5 * 0.000001, 10.50 * 0.000001, 7.0 * 0.000001, 21.00 * 0.000001);
diff --git a/src/Providers/OpenAI/src/Chat/OpenAiChatModel.cs b/src/Providers/OpenAI/src/Chat/OpenAiChatModel.cs
index 9c22235c..a344caa6 100644
--- a/src/Providers/OpenAI/src/Chat/OpenAiChatModel.cs
+++ b/src/Providers/OpenAI/src/Chat/OpenAiChatModel.cs
@@ -266,12 +266,13 @@ public override async Task GenerateAsync(
OnPartialResponseGenerated(Environment.NewLine);
OnCompletedResponseGenerated(newMessage.Content);
- usage = GetUsage(response) with
+ var usage2 = GetUsage(response) with
{
Time = watch.Elapsed,
};
- AddUsage(usage);
- provider.AddUsage(usage);
+ AddUsage(usage2);
+ provider.AddUsage(usage2);
+ usage += usage2;
}
}