Skip to content

Commit

Permalink
Merge pull request #459 from tryAGI/bot/auto-format_202409230519
Browse files Browse the repository at this point in the history
style:Run dotnet format
  • Loading branch information
github-actions[bot] authored Sep 23, 2024
2 parents 6c9ee67 + 670d381 commit 58f52a6
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 29 deletions.
44 changes: 22 additions & 22 deletions src/Cli/src/Helpers.cs
Original file line number Diff line number Diff line change
Expand Up @@ -69,37 +69,37 @@ public static async Task<ChatModel> GetChatModelAsync(CancellationToken cancella
switch (await File.ReadAllTextAsync(Path.Combine(settingsFolder, "provider.txt"), cancellationToken).ConfigureAwait(false))
{
case Providers.OpenAi:
{
var provider = new OpenAiProvider(apiKey: await File.ReadAllTextAsync(Path.Combine(settingsFolder, "api_key.txt"), cancellationToken).ConfigureAwait(false));
var modelId = await File.ReadAllTextAsync(Path.Combine(settingsFolder, "model.txt"), cancellationToken).ConfigureAwait(false);
switch (modelId)
{
case "latest-fast":
modelId = ChatClient.LatestFastModel.ToValueString();
break;
case "latest-smart":
modelId = ChatClient.LatestSmartModel.ToValueString();
break;
}

model = new OpenAiChatModel(provider, id: modelId);
break;
var provider = new OpenAiProvider(apiKey: await File.ReadAllTextAsync(Path.Combine(settingsFolder, "api_key.txt"), cancellationToken).ConfigureAwait(false));
var modelId = await File.ReadAllTextAsync(Path.Combine(settingsFolder, "model.txt"), cancellationToken).ConfigureAwait(false);
switch (modelId)
{
case "latest-fast":
modelId = ChatClient.LatestFastModel.ToValueString();
break;
case "latest-smart":
modelId = ChatClient.LatestSmartModel.ToValueString();
break;
}

model = new OpenAiChatModel(provider, id: modelId);
break;

}
}
case Providers.OpenRouter:
{
var provider = new OpenRouterProvider(apiKey: await File.ReadAllTextAsync(Path.Combine(settingsFolder, "api_key.txt"), cancellationToken).ConfigureAwait(false));
var modelId = await File.ReadAllTextAsync(Path.Combine(settingsFolder, "model.txt"), cancellationToken).ConfigureAwait(false);
model = new OpenRouterModel(provider, id: modelId);
break;
}
{
var provider = new OpenRouterProvider(apiKey: await File.ReadAllTextAsync(Path.Combine(settingsFolder, "api_key.txt"), cancellationToken).ConfigureAwait(false));
var modelId = await File.ReadAllTextAsync(Path.Combine(settingsFolder, "model.txt"), cancellationToken).ConfigureAwait(false);
model = new OpenRouterModel(provider, id: modelId);
break;
}
default:
throw new NotSupportedException("Provider not supported.");
}

return model;
}

public static async Task<string> GenerateUsingAuthenticatedModelAsync(string prompt, CancellationToken cancellationToken = default)
{
ChatModel model = await GetChatModelAsync(cancellationToken).ConfigureAwait(false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ public async Task Call_Ok()
async IAsyncEnumerable<ChatResponse> GetChatResponses()
{
await Task.CompletedTask;

yield return new ChatResponse
{
Messages = new[] { Message.Ai("Bob's asking what is hist name") },
Expand Down
2 changes: 1 addition & 1 deletion src/Meta/test/DatabaseTests.OpenSearch.cs
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ private static async Task<DatabaseTestEnvironment> StartEnvironmentAsync()
Port = port2,
};
}

private static async Task<DatabaseTestEnvironment> SetupDocumentTestsAsync()
{
var environment = await StartEnvironmentAsync();
Expand Down
10 changes: 5 additions & 5 deletions src/Meta/test/WikiTests.GettingStartedWithAmazonBedrock.cs
Original file line number Diff line number Diff line change
Expand Up @@ -95,19 +95,19 @@ public async Task GettingStartedWithAmazonBedrock()
UseStreaming = true
}
};

llm.RequestSent += (_, request) => Console.WriteLine($"Prompt: {request.Messages.AsHistory()}");
llm.DeltaReceived += (_, delta) => Console.Write(delta.Content);
llm.ResponseReceived += (_, response) => Console.WriteLine($"Completed response: {response}");

var prompt = @"
you are a comic book writer. you will be given a question and you will answer it.
question: who are 10 of the most popular superheros and what are their powers?";

string response = await llm.GenerateAsync(prompt);

Console.WriteLine(response);

//// In conclusion, by following these steps, you can set up the AWS CLI,
//// configure the Amazon Bedrock provider, and start using the supported foundation models in your code.
//// With the AWS CLI and Bedrock provider properly configured,
Expand Down

0 comments on commit 58f52a6

Please sign in to comment.