Skip to content

Commit

Permalink
Fix ModelInfo cattrs serialization issue (#2446)
Browse files Browse the repository at this point in the history
* WIP

* WIP

* WIP

* updates

* remove print
  • Loading branch information
parano authored May 4, 2022
1 parent 1f31301 commit b50051a
Show file tree
Hide file tree
Showing 20 changed files with 293 additions and 344 deletions.
20 changes: 17 additions & 3 deletions bentoml/_internal/bento/bento.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ class BentoRunnerInfo:
name: str
runnable_type: str
models: t.List[str] = attr.field(factory=list)
resource_config: Resource | None = attr.field(default=None)
resource_config: t.Optional[Resource] = attr.field(default=None)

@classmethod
def from_runner(cls, r: Runner) -> "BentoRunnerInfo":
Expand All @@ -356,6 +356,10 @@ def from_runner(cls, r: Runner) -> "BentoRunnerInfo":
)


# Remove after attrs support ForwardRef natively
attr.resolve_types(BentoRunnerInfo, globals(), locals())


@attr.define(frozen=True, on_setattr=None)
class BentoApiInfo:
name: str
Expand All @@ -371,6 +375,10 @@ def from_inference_api(cls, api: "InferenceAPI") -> "BentoApiInfo":
)


# Remove after attrs support ForwardRef natively
attr.resolve_types(BentoApiInfo, globals(), locals())


@attr.define(frozen=True, on_setattr=None)
class BentoModelInfo:
tag: Tag = attr.field(converter=Tag.from_taglike)
Expand All @@ -386,6 +394,10 @@ def from_bento_model(cls, bento_model: "Model") -> "BentoModelInfo":
)


# Remove after attrs support ForwardRef natively
attr.resolve_types(BentoModelInfo, globals(), locals())


@attr.define(repr=False, frozen=True, on_setattr=None)
class BentoInfo:
tag: Tag
Expand Down Expand Up @@ -453,7 +465,6 @@ def from_yaml_file(cls, stream: t.IO[t.Any]) -> "BentoInfo":
)
try:
# type: ignore[attr-defined]
print("####", yaml_content, cls)
return bentoml_cattr.structure(yaml_content, cls)
except KeyError as e:
raise BentoMLException(f"Missing field {e} in {BENTO_YAML_FILENAME}")
Expand All @@ -463,10 +474,13 @@ def validate(self):
...


# Remove after attrs support ForwardRef natively
attr.resolve_types(BentoInfo, globals(), locals())

bentoml_cattr.register_unstructure_hook(
BentoInfo,
# Ignore tag, tag is saved via the name and version field
make_dict_unstructure_fn(BentoInfo, bentoml_cattr, tag=override(omit=True)),
make_dict_unstructure_fn(BentoInfo, bentoml_cattr, tag=override(omit=True)), # type: ignore
)


Expand Down
22 changes: 20 additions & 2 deletions bentoml/_internal/frameworks/FRAMEWORK_TEMPLATE_PY
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,15 @@ if TYPE_CHECKING:
MODULE_NAME = "bentoml.MY_MODULE"


def get(tag_like: str | Tag) -> Model:
model = bentoml.models.get(tag_like)
if model.info.module not in (MODULE_NAME, __name__):
raise NotFound(
f"Model {model.tag} was saved with module {model.info.module}, failed loading with {MODULE_NAME}."
)
return model


@attr.define(frozen=True)
class FrameworkOptions(ModelOptions):
pass
Expand Down Expand Up @@ -46,7 +55,7 @@ def load_model(
<LOAD EXAMPLE>
""" # noqa
if not isinstance(bento_model, bentoml.Model):
bento_model = bentoml.models.get(bento_model)
bento_model = get(bento_model)

...

Expand Down Expand Up @@ -103,6 +112,15 @@ def save_model(
return bentoml_model.tag


def get(tag_like: str | Tag) -> Model:
model = bentoml.models.get(tag_like)
if model.info.module != MODULE_NAME:
raise NotFound(
f'Model "{tag_like}" saved with module "{MODULE_NAME}" is not found'
)
return model


def get_runnable(
bento_model: bentoml.Model,
) -> t.Type[bentoml.Runnable]:
Expand All @@ -124,7 +142,7 @@ def get_runnable(

for method_name, options in bento_model.info.signatures.items():

def _run(input_data) -> output_data:
def _run(self, input_data) -> output_data:
...

FrameworkRunnable.add_method(
Expand Down
75 changes: 0 additions & 75 deletions bentoml/_internal/frameworks/common/model_runner.py

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import pickle
import typing as t
from typing import TYPE_CHECKING

Expand All @@ -12,18 +11,16 @@

from ..models import PKL_EXT
from ..models import SAVE_NAMESPACE
from .common.model_runner import BaseModelRunner
from .common.model_runner import BaseModelSimpleRunner
from ..configuration.containers import BentoMLContainer

if TYPE_CHECKING:
from ..models import ModelStore

MODULE_NAME = "bentoml.picklable_model"
MODULE_NAME = "bentoml.picklable"


@inject
def load(
def load_model(
tag: t.Union[str, Tag],
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
) -> t.Any:
Expand Down Expand Up @@ -58,10 +55,10 @@ def load(
with open(model_file, "rb") as f:
# The protocol version used is detected automatically, so we do not
# have to specify it.
return pickle.load(f)
return cloudpickle.load(f)


def save(
def save_model(
name: str,
obj: t.Any,
*,
Expand Down Expand Up @@ -101,10 +98,13 @@ def predict(self, some_integer: int):
return some_integer**2
model_to_save = MyCoolModel();
tag_info = bentoml.picklable_model.save("test_pickle_model", model_to_save)
runner = bentoml.picklable_model.load_runner(tag_info)
runner.run(3)
tag_info = bentoml.picklable.save_model("test_pickle_model", model_to_save)
loaded_model = bentoml.picklable.load_model("test_pickle_model:latest")
Using saved pickable model in Service via Runner:
.. code-block:: python
runner = bentoml.picklable.get("test_pickle_model").to_runner()
"""
context = {"framework_name": "picklable_model"}

Expand All @@ -121,98 +121,3 @@ def predict(self, some_integer: int):
cloudpickle.dump(obj, f)

return _model.tag


class _PicklableModelRunner(BaseModelRunner):
def __init__(self, tag: t.Union[Tag, str], method_name: str, name: t.Optional[str]):
super().__init__(tag=tag, name=name)

self._method_name = method_name

self._model: t.Any = None
self._infer_func: t.Any = None

@property
def num_replica(self) -> int:
return max(round(self.resource_quota.cpu), 1)

def _setup(self) -> None:
self._model = load(self._tag, model_store=self.model_store)
if self._method_name == "__call__":
self._infer_func = self._model
else:
self._infer_func = getattr(self._model, self._method_name)

def _run_batch(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
return self._infer_func(*args, **kwargs)


class _PicklableModelSimpleRunner(BaseModelSimpleRunner):
def __init__(self, tag: t.Union[Tag, str], method_name: str, name: t.Optional[str]):
super().__init__(tag=tag, name=name)
self._method_name = method_name

self._model: t.Any = None
self._infer_func: t.Any = None

@property
def num_replica(self) -> int:
return max(round(self.resource_quota.cpu), 1)

def _setup(self) -> None:
self._model = load(self._tag, model_store=self.model_store)

if self._method_name == "__call__":
self._infer_func = self._model
else:
self._infer_func = getattr(self._model, self._method_name)

def _run(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
return self._infer_func(*args, **kwargs)


@inject
def load_runner(
tag: t.Union[str, Tag],
*,
name: t.Optional[str] = None,
method_name: str = "__call__",
batch: bool = False,
) -> t.Union[_PicklableModelRunner, _PicklableModelSimpleRunner]:
"""
Runner represents a unit of serving logic that can be scaled horizontally to
maximize throughput. :func:`bentoml.picklable_model.load_runner` implements a Runner class that
wraps the commands that dump and load a pickled object, which optimizes it for the BentoML runtime.
Args:
tag (:code:`Union[str, Tag]`):
Tag of a saved model in BentoML local modelstore..
method_name:
Method to call on the pickled object
batch:
Determines whether the model supports batching
Returns:
:obj:`~bentoml._internal.runner.Runner`: Runner instances for the target :mod:`bentoml.picklable_model` model
Examples:
.. code-block:: python
import bentoml
runner = bentoml.picklable_model.load_runner("my_model:latest")
runner.run([[1,2,3,4]])
"""
if batch:
return _PicklableModelRunner(
tag=tag,
method_name=method_name,
name=name,
)
else:
return _PicklableModelSimpleRunner(
tag=tag,
method_name=method_name,
name=name,
)
Loading

0 comments on commit b50051a

Please sign in to comment.