Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: remote development #4942

Merged
merged 8 commits into from
Sep 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion src/_bentoml_impl/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,6 @@ def import_service(
assert (
module_name and attrs_str
), f'Invalid import target "{service_identifier}", must format as "<module>:<attribute>"'

module = importlib.import_module(module_name)
root_service_name, _, depend_path = attrs_str.partition(".")
root_service = t.cast("Service[t.Any]", getattr(module, root_service_name))
Expand Down
1 change: 0 additions & 1 deletion src/_bentoml_impl/server/serving.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,6 @@ def serve_http(
if development_mode:
arbiter_kwargs["debug"] = True
arbiter_kwargs["loggerconfig"] = SERVER_LOGGING_CONFIG
arbiter_kwargs["loglevel"] = "WARNING"

arbiter = create_standalone_arbiter(**arbiter_kwargs)
arbiter.exit_stack.enter_context(
Expand Down
161 changes: 106 additions & 55 deletions src/bentoml/_internal/bento/bento.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
from _bentoml_sdk import Service as NewService
from _bentoml_sdk.service import ServiceConfig

from ..cloud.schemas.modelschemas import BentoManifestSchema
from ..models import Model as StoredModel
from ..service import Service
from ..service.inference_api import InferenceAPI
Expand Down Expand Up @@ -176,13 +177,58 @@ def _fs(self) -> FS:
def info(self) -> BentoInfo:
return self._info

@property
def entry_service(self) -> str:
return self.info.entry_service

def get_manifest(self) -> BentoManifestSchema:
from ..cloud.schemas.modelschemas import BentoManifestSchema
from ..cloud.schemas.modelschemas import BentoRunnerResourceSchema
from ..cloud.schemas.modelschemas import BentoRunnerSchema

info = self.info
models = [str(m.tag) for m in info.all_models]
runners = [
BentoRunnerSchema(
name=r.name,
runnable_type=r.runnable_type,
models=r.models,
resource_config=(
BentoRunnerResourceSchema(
cpu=r.resource_config.get("cpu"),
nvidia_gpu=r.resource_config.get("nvidia.com/gpu"),
custom_resources=r.resource_config.get("custom_resources"),
)
if r.resource_config
else None
),
)
for r in info.runners
]
return BentoManifestSchema(
name=info.name,
entry_service=info.entry_service,
service=info.service,
bentoml_version=info.bentoml_version,
apis={},
models=models,
runners=runners,
size_bytes=self.total_size(),
services=info.services,
envs=info.envs,
schema=info.schema,
version=info.version,
)

@classmethod
@inject
def create(
cls,
build_config: BentoBuildConfig,
version: t.Optional[str] = None,
build_ctx: t.Optional[str] = None,
bare: bool = False,
reload: bool = False,
) -> Bento:
from _bentoml_sdk.models import BentoModel

Expand All @@ -202,7 +248,10 @@ def create(
BentoMLContainer.model_aliases.set(build_config.model_aliases)
# This also verifies that svc can be imported correctly
svc = import_service(
build_config.service, working_dir=build_ctx, standalone_load=True
build_config.service,
working_dir=build_ctx,
reload=reload,
standalone_load=True,
)
is_legacy = isinstance(svc, Service)
# Apply default build options
Expand All @@ -228,13 +277,10 @@ def create(
logger.debug(
'Building BentoML service "%s" from build context "%s".', tag, build_ctx
)

bento_fs = TempFS(
identifier=f"bentoml_bento_{bento_name}",
temp_dir=BentoMLContainer.tmp_bento_store_dir.get(),
)
ctx_fs = fs.open_fs(encode_path_for_uri(build_ctx))

models: list[BentoModelInfo] = []

def append_model(model: BentoModelInfo) -> None:
Expand All @@ -255,57 +301,60 @@ def append_model(model: BentoModelInfo) -> None:
for model in runner.models:
append_model(BentoModel(model.tag).to_info())

# create ignore specs
specs = BentoPathSpec(build_config.include, build_config.exclude)
if not bare:
ctx_fs = fs.open_fs(encode_path_for_uri(build_ctx))

# Copy all files base on include and exclude, into `src` directory
relpaths = [s for s in build_config.include if s.startswith("../")]
if len(relpaths) != 0:
raise InvalidArgument(
"Paths outside of the build context directory cannot be included; use a symlink or copy those files into the working directory manually."
)
bento_fs.makedir(BENTO_PROJECT_DIR_NAME)
target_fs = bento_fs.opendir(BENTO_PROJECT_DIR_NAME)
with target_fs.open(DEFAULT_BENTO_BUILD_FILE, "w") as bentofile_yaml:
build_config.to_yaml(bentofile_yaml)
ignore_specs = list(specs.from_path(build_ctx))

for dir_path, _, files in ctx_fs.walk():
for f in files:
path = fs.path.combine(dir_path, f.name).lstrip("/")
if specs.includes(path, recurse_exclude_spec=ignore_specs):
if ctx_fs.getsize(path) > 10 * 1024 * 1024:
logger.warn("File size is larger than 10MiB: %s", path)
target_fs.makedirs(dir_path, recreate=True)
copy_file(ctx_fs, path, target_fs, path)

# NOTE: we need to generate both Python and Conda
# first to make sure we can generate the Dockerfile correctly.
build_config.python.write_to_bento(bento_fs, build_ctx)
build_config.conda.write_to_bento(bento_fs, build_ctx)
build_config.docker.write_to_bento(bento_fs, build_ctx, build_config.conda)

# Create `readme.md` file
if build_config.description is None:
with bento_fs.open(BENTO_README_FILENAME, "w", encoding="utf-8") as f:
f.write(get_default_svc_readme(svc, svc_version=tag.version))
else:
if build_config.description.startswith("file:"):
file_name = build_config.description[5:].strip()
copy_file_to_fs_folder(
file_name, bento_fs, dst_filename=BENTO_README_FILENAME
# create ignore specs
specs = BentoPathSpec(build_config.include, build_config.exclude)

# Copy all files base on include and exclude, into `src` directory
relpaths = [s for s in build_config.include if s.startswith("../")]
if len(relpaths) != 0:
raise InvalidArgument(
"Paths outside of the build context directory cannot be included; use a symlink or copy those files into the working directory manually."
)
bento_fs.makedir(BENTO_PROJECT_DIR_NAME)
target_fs = bento_fs.opendir(BENTO_PROJECT_DIR_NAME)
with target_fs.open(DEFAULT_BENTO_BUILD_FILE, "w") as bentofile_yaml:
build_config.to_yaml(bentofile_yaml)
ignore_specs = list(specs.from_path(build_ctx))

for dir_path, _, files in ctx_fs.walk():
for f in files:
path = fs.path.combine(dir_path, f.name).lstrip("/")
if specs.includes(path, recurse_exclude_spec=ignore_specs):
if ctx_fs.getsize(path) > 10 * 1024 * 1024:
logger.warn("File size is larger than 10MiB: %s", path)
target_fs.makedirs(dir_path, recreate=True)
copy_file(ctx_fs, path, target_fs, path)

# NOTE: we need to generate both Python and Conda
# first to make sure we can generate the Dockerfile correctly.
build_config.python.write_to_bento(bento_fs, build_ctx)
build_config.conda.write_to_bento(bento_fs, build_ctx)
build_config.docker.write_to_bento(bento_fs, build_ctx, build_config.conda)

# Create `readme.md` file
if build_config.description is None:
with bento_fs.open(BENTO_README_FILENAME, "w", encoding="utf-8") as f:
f.write(get_default_svc_readme(svc, svc_version=tag.version))
else:
with bento_fs.open(BENTO_README_FILENAME, "w") as f:
f.write(build_config.description)

# Create 'apis/openapi.yaml' file
bento_fs.makedir("apis")
with bento_fs.open(fs.path.combine("apis", "openapi.yaml"), "w") as f:
yaml.dump(svc.openapi_spec, f)
if not is_legacy:
with bento_fs.open(fs.path.combine("apis", "schema.json"), "w") as f:
json.dump(svc.schema(), f, indent=2)
if build_config.description.startswith("file:"):
file_name = build_config.description[5:].strip()
copy_file_to_fs_folder(
file_name, bento_fs, dst_filename=BENTO_README_FILENAME
)
else:
with bento_fs.open(BENTO_README_FILENAME, "w") as f:
f.write(build_config.description)

# Create 'apis/openapi.yaml' file
bento_fs.makedir("apis")
with bento_fs.open(fs.path.combine("apis", "openapi.yaml"), "w") as f:
yaml.dump(svc.openapi_spec, f)
if not is_legacy:
with bento_fs.open(fs.path.combine("apis", "schema.json"), "w") as f:
json.dump(svc.schema(), f, indent=2)

res = Bento(
tag,
Expand Down Expand Up @@ -341,6 +390,8 @@ def append_model(model: BentoModelInfo) -> None:
schema=svc.schema() if not is_legacy else {},
),
)
if bare:
return res
# Create bento.yaml
res.flush_info()
try:
Expand Down Expand Up @@ -558,9 +609,9 @@ def from_bento_model(
class BentoServiceInfo:
name: str
service: str
models: t.List[BentoModelInfo] = attr.field(factory=list)
dependencies: t.List[str] = attr.field(factory=list)
config: ServiceConfig = attr.field(factory=dict)
models: t.List[BentoModelInfo] = attr.field(factory=list, eq=False)
dependencies: t.List[str] = attr.field(factory=list, eq=False)
config: ServiceConfig = attr.field(factory=dict, eq=False)

@classmethod
def from_service(cls, svc: NewService[t.Any]) -> BentoServiceInfo:
Expand Down
23 changes: 11 additions & 12 deletions src/bentoml/_internal/cloud/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from contextlib import contextmanager

import attrs
from rich import get_console
from rich.console import Group
from rich.live import Live
from rich.panel import Panel
Expand Down Expand Up @@ -87,7 +88,8 @@ class Spinner:
Use it as a context manager to start the live updating.
"""

def __init__(self):
def __init__(self, console: Console | None = None) -> None:
self.console = console or get_console()
self.transmission_progress = Progress(
TextColumn("[bold blue]{task.description}", justify="right"),
BarColumn(bar_width=None),
Expand All @@ -98,6 +100,7 @@ def __init__(self):
TransferSpeedColumn(),
"•",
TimeRemainingColumn(),
console=self.console,
)

self._logs: list[str] = []
Expand All @@ -106,24 +109,21 @@ def __init__(self):
TimeElapsedColumn(),
TextColumn("[bold purple]{task.description}"),
SpinnerColumn("simpleDots"),
console=self.console,
)
self._spinner_task_id: t.Optional[TaskID] = None
self._live = Live(self)

@property
def console(self) -> "Console":
return self._live.console
self._live = Live(self, console=self.console)

@contextmanager
def spin(self, text: str) -> t.Generator[TaskID, None, None]:
"""Create a spinner as a context manager."""
task_id = self.update(text, new=True)
try:
task_id = self.update(text, new=True)
yield task_id
finally:
self._spinner_task_id = None
self._spinner_progress.stop_task(task_id)
self._spinner_progress.update(task_id, visible=False)
self._spinner_progress.remove_task(task_id)
if self._spinner_task_id == task_id:
self._spinner_task_id = None

def update(self, text: str, new: bool = False) -> TaskID:
"""Update the spin text."""
Expand All @@ -149,8 +149,7 @@ def start(self) -> None:
def stop(self) -> None:
"""Stop live updating."""
if self._spinner_task_id is not None:
self._spinner_progress.stop_task(self._spinner_task_id)
self._spinner_progress.update(self._spinner_task_id, visible=False)
self._spinner_progress.remove_task(self._spinner_task_id)
self._spinner_task_id = None
self._live.stop()

Expand Down
10 changes: 8 additions & 2 deletions src/bentoml/_internal/cloud/bentocloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,16 @@ def push_bento(
bento: Bento,
*,
force: bool = False,
bare: bool = False,
threads: int = 10,
):
with self.spinner:
upload_task_id = self.spinner.transmission_progress.add_task(
f'Pushing Bento "{bento.tag}"', start=False, visible=False
)
self._do_push_bento(bento, upload_task_id, force=force, threads=threads)
self._do_push_bento(
bento, upload_task_id, force=force, threads=threads, bare=bare
)
self.spinner.log(f'✅ Pushed Bento "{bento.tag}"')

@inject
Expand All @@ -79,6 +82,7 @@ def _do_push_bento(
*,
force: bool = False,
threads: int = 10,
bare: bool = False,
rest_client: RestApiClient = Provide[BentoMLContainer.rest_api_client],
bentoml_tmp_dir: str = Provide[BentoMLContainer.tmp_bento_store_dir],
):
Expand Down Expand Up @@ -112,6 +116,7 @@ def push_model(model: Model[t.Any]) -> None:
)

executor.map(push_model, models_to_push)

with self.spinner.spin(text=f'Fetching Bento repository "{name}"'):
bento_repository = rest_client.v1.get_bento_repository(
bento_repository_name=name
Expand Down Expand Up @@ -197,7 +202,8 @@ def push_model(model: Model[t.Any]) -> None:
labels=labels,
),
)

if bare:
return
transmission_strategy: TransmissionStrategy = "proxy"
presigned_upload_url: str | None = None

Expand Down
Loading
Loading