Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python: Bedrock connector #9100

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
Open
1 change: 1 addition & 0 deletions python/.cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
"aiplatform",
"azuredocindex",
"azuredocs",
"boto",
"contentvector",
"contoso",
"datamodel",
Expand Down
3 changes: 3 additions & 0 deletions python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,9 @@ weaviate = [
pandas = [
"pandas ~= 2.2"
]
aws = [
"boto3>=1.28.57",
]

[tool.uv]
prerelease = "if-necessary-or-explicit"
Expand Down
5 changes: 5 additions & 0 deletions python/semantic_kernel/connectors/ai/bedrock/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
## Configure AWS CLI

1. Install the AWS CLI
2. Configure the AWS CLI with your AWS credentials
3. Configure the AWS CLI with the correct region
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Copyright (c) Microsoft. All rights reserved.


from typing import Any

from pydantic import Field

from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings


class BedrockPromptExecutionSettings(PromptExecutionSettings):
"""Bedrock Prompt Execution Settings."""

temperature: float | None = Field(None, ge=0.0, le=1.0)
top_p: float | None = Field(None, ge=0.0, le=1.0)
top_k: int | None = Field(None, gt=0)
max_tokens: int | None = Field(None, gt=0)
stop: list[str] = Field(default_factory=list)


class BedrockChatPromptExecutionSettings(BedrockPromptExecutionSettings):
"""Bedrock Chat Prompt Execution Settings."""

tools: list[dict[str, Any]] | None = Field(
None,
max_length=64,
description="Do not set this manually. It is set by the service based on the function choice configuration.",
)
tool_choice: dict[str, Any] | None = Field(
None,
description="Do not set this manually. It is set by the service based on the function choice configuration.",
)


class BedrockTextPromptExecutionSettings(BedrockPromptExecutionSettings):
"""Bedrock Text Prompt Execution Settings."""

...


class BedrockEmbeddingPromptExecutionSettings(PromptExecutionSettings):
"""Bedrock Embedding Prompt Execution Settings."""

...
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# Copyright (c) Microsoft. All rights reserved.

from typing import ClassVar

from semantic_kernel.kernel_pydantic import KernelBaseSettings
from semantic_kernel.utils.experimental_decorator import experimental_class


@experimental_class
class BedrockSettings(KernelBaseSettings):
"""Amazon Bedrock service settings.

The settings are first loaded from environment variables with
the prefix 'BEDROCK_'.
If the environment variables are not found, the settings can
be loaded from a .env file with the encoding 'utf-8'.
If the settings are not found in the .env file, the settings
are ignored; however, validation will fail alerting that the
settings are missing.

Optional settings for prefix 'BEDROCK_' are:
- chat_model_id: str | None - The Amazon Bedrock chat model ID to use.
(Env var BEDROCK_CHAT_MODEL_ID)
- text_model_id: str | None - The Amazon Bedrock text model ID to use.
(Env var BEDROCK_TEXT_MODEL_ID)
- embedding_model_id: str | None - The Amazon Bedrock embedding model ID to use.
(Env var BEDROCK_EMBEDDING_MODEL_ID)
"""

env_prefix: ClassVar[str] = "BEDROCK_"

chat_model_id: str | None = None
text_model_id: str | None = None
embedding_model_id: str | None = None
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Copyright (c) Microsoft. All rights reserved.

from abc import ABC
from functools import partial
from typing import Any, ClassVar

from semantic_kernel.connectors.ai.bedrock.services.model_provider.utils import run_in_executor
from semantic_kernel.kernel_pydantic import KernelBaseModel


class BedrockBase(KernelBaseModel, ABC):
"""Amazon Bedrock Service Base Class."""

MODEL_PROVIDER_NAME: ClassVar[str] = "bedrock"

# Amazon Bedrock Clients
# Runtime Client: Use for inference
bedrock_runtime_client: Any
# Client: Use for model management
bedrock_client: Any

async def get_foundation_model_info(self, model_id: str) -> dict[str, Any]:
"""Get the foundation model information."""
response = await run_in_executor(
None,
partial(
self.bedrock_client.get_foundation_model,
modelIdentifier=model_id,
),
)

return response.get("modelDetails")
Loading
Loading