From 28ae5b0175edab07f008a3a6a01908c468e45255 Mon Sep 17 00:00:00 2001 From: phatvo9 Date: Fri, 20 Oct 2023 15:38:53 +0700 Subject: [PATCH] addressed comments --- clarifai/models/api.py | 2 +- .../docs/inference_parameters.md | 82 +++++++++++++++---- .../model_config/inference_parameter.py | 19 +++-- .../model_serving/models/model_types.py | 4 +- clarifai/models/model_serving/models/test.py | 4 +- 5 files changed, 84 insertions(+), 27 deletions(-) diff --git a/clarifai/models/api.py b/clarifai/models/api.py index 406a8276..3f476ad0 100644 --- a/clarifai/models/api.py +++ b/clarifai/models/api.py @@ -25,7 +25,7 @@ def _make_default_value_proto(dtype, value): if dtype == 1: return Value(bool_value=value) - elif dtype == 2: + elif dtype == 2 or dtype == 21: return Value(string_value=value) elif dtype == 3: return Value(number_value=value) diff --git a/clarifai/models/model_serving/docs/inference_parameters.md b/clarifai/models/model_serving/docs/inference_parameters.md index 37b84e61..bb0a752e 100644 --- a/clarifai/models/model_serving/docs/inference_parameters.md +++ b/clarifai/models/model_serving/docs/inference_parameters.md @@ -1,21 +1,19 @@ -## Inference paramters +## Inference paramaters -When making prediction, you may need to change some paramters to adjust the result. Those paramters will be passed through `paramters()` of a request in triton python model. +When making prediction, you may need to change some paramaters to adjust the result. Those paramaters will be passed through `paramaters()` of a request in triton python model. -In order to send it to `**kwargs` of `get_predictions` in `inference.py`, there are 2 ways: -1. You can send any arbitrary parameters via clarifai API. -2. You can define some parameters and they will be visible and adjustable on Clarifai model views. +In order to send it to `**kwargs` of `get_predictions` in `inference.py`, you can define some parameters and they will be visible and adjustable on Clarifai model view. -This document helps you to create your inference parameters that can be visibale and adjustable easily on Clarifai platform. `Again, you can still send any parameters via API but undefined parameters won't appear on Clarifai UI`. The defined parameters will be sent as `json` file when you use `clarifai-upload-model` cli. +This document helps you to create your inference parameters that can be visibale and adjustable easily on Clarifai platform. The defined parameters will be sent as `json` file when you use `clarifai-upload-model` cli. ### JSON file structure: The file contains a list of object has 4 fields: * `path` (str): name of your parameter, it must be valid as python variable -* `field_type` (int): the parameter data type is one of {1,2,3}, it means {boolean, string, number} respectively. `Number` means `int` or `float` +* `field_type` (int): the parameter data type is one of {1,2,21,3}, it means {boolean, string, encrypted_string, number} respectively. `Number` means `int` or `float`. `Encrypted_string` is simple a string can be used to store your secrets like API key... * `default_value`: a default value of the parameter. * `description` (str): short sentence describes what the parameter does -An example of 3 parameters: +An example of 4 parameters: ```json [ { @@ -35,17 +33,24 @@ An example of 3 parameters: "field_type": 3, "default_value": 9.9, "description": "a float number variable" - } + }, + { + "path": "secret_string_var", + "field_type": 21, + "default_value": "API_KEY", + "description": "a string variable contains secret like API key" + }, ] ``` ### Generate JSON file 1. Manually create the file based on above structure 2. By code: + +#### 2.1. Fully setup ```python from clarifai.models.model_serving.model_config.inference_parameter import InferParamManager, InferParam, InferParamType -# 2.1. Fully setup params = [ InferParam( path="boolean_var", @@ -65,18 +70,65 @@ params = [ default_value=9.9, description="a float number varaiabe" ), + InferParam( + path=secret_string_var", + field_type=InferParamType.ENCRYPTED_STRING, + default_value="API_KEY", + description="a string variable contains secret like API key" + ), ] ipm = InferParamManager(params=params) ipm.export("your_file.json") +``` + +##### 2.2. Shorten +`NOTE`: in this way `description` field will be set as empty aka "". +*You need to modify* `description` in order to be able to upload the settings to Clarifai. + +`NOTE`: in this way `ENCRYPTED_STRING` type must be defined with "_" prefix -# 2.2. Shorten -# `NOTE`: in this way `description` field will be set as empty aka "" -# *You need to modify* `description` in order to be able to upload the settings to Clarifai -params = dict(boolean_var=True, string_var="string_1", number_var=9.9) +```python +params = dict(boolean_var=True, string_var="string_1", number_var=9.9, _secret_string_var="YOUR_KEY") ipm = InferParamManager.from_kwargs(**params) ipm.export("your_file.json") ``` -3. In `test.py`. You can define your paramaters like `2.2. Shorten` in `inference_paramters` attribute of `CustomTestInferenceModel`, the file will be generated when you run the test. Keep in mind to change `description` +3. In `test.py`. You can define your paramaters like `2.2. Shorten` in `inference_parameters` attribute of `CustomTestInferenceModel`, the file will be generated when you run the test. Keep in mind to change `description` + +### Usage +Your defined parameters will be passed through `kwargs` of `InferenceModel.get_predictions` method +in `inference.py` +```python +class InferenceModel: + def __init__(): + # initialization + self.model = YourModel() + + @some_wrapper_function + def get_predictions(self, input_data, **kwargs): + # `kwargs` contains your inference parameters + + # get a value from kwargs + number_var = kwargs.get("number_var", 9.9) + + # pass everything to a function + output = self.model.predict(input_data, **kwargs) + + return SomeOutputType(output) + +``` + +in `test.py` +```python +class CustomTestInferenceModel: + inference_parameters = "" # input a path of json file from `2.1` or a dict from `2.2` + + ... + + def test_something(self): + input = ... + output = self.triton_get_predictions(input, number_var=1, string_var="test", _secret="KEY") + self.assert(...) +``` diff --git a/clarifai/models/model_serving/model_config/inference_parameter.py b/clarifai/models/model_serving/model_config/inference_parameter.py index 211d4161..24228415 100644 --- a/clarifai/models/model_serving/model_config/inference_parameter.py +++ b/clarifai/models/model_serving/model_config/inference_parameter.py @@ -8,6 +8,7 @@ class InferParamType: BOOL: int = 1 STRING: int = 2 NUMBER: int = 3 + ENCRYPTED_STRING: int = 21 @dataclass @@ -26,11 +27,13 @@ def __post_init__(self): def validate_type(self, value): if self.field_type == InferParamType.BOOL: assert isinstance(value, bool), f"`field_type` is `BOOL` (bool), however got {type(value)}" - elif self.field_type == InferParamType.STRING: - assert isinstance(value, str), f"`field_type` is `STRING` (str), however got {type(value)}" - else: + elif self.field_type == InferParamType.NUMBER: assert isinstance(value, float) or isinstance( value, int), f"`field_type` is `NUMBER` (float or int), however got {type(value)}" + else: + assert isinstance( + value, + str), f"`field_type` is `STRING` or `ENCRYPTED_STRING` (str), however got {type(value)}" def todict(self): return {k: v for k, v in asdict(self).items()} @@ -46,7 +49,9 @@ class InferParamManager: def from_kwargs(cls, **kwargs): params = list() for k, v in kwargs.items(): - if isinstance(v, str): + if isinstance(v, str) and k.startswith("_"): + _type = InferParamType.ENCRYPTED_STRING + elif isinstance(v, str): _type = InferParamType.STRING elif isinstance(v, bool): _type = InferParamType.BOOL @@ -85,9 +90,9 @@ def validate(self, **kwargs) -> dict: output_kwargs = {k: v.default_value for k, v in self._dict_params.items()} assert not (kwargs != {} and self.params == []), "kwargs are rejected since `params` is empty" for key, value in kwargs.items(): - #assert key in self._dict_params, f"param `{key}` is not in setting: {list(self._dict_params.keys())}" - #if key in self._dict_params: - # self._dict_params[key].validate_type(value) + assert key in self._dict_params, f"param `{key}` is not in setting: {list(self._dict_params.keys())}" + if key in self._dict_params: + self._dict_params[key].validate_type(value) output_kwargs.update({key: value}) return output_kwargs diff --git a/clarifai/models/model_serving/models/model_types.py b/clarifai/models/model_serving/models/model_types.py index 8b4509dd..8515caef 100644 --- a/clarifai/models/model_serving/models/model_types.py +++ b/clarifai/models/model_serving/models/model_types.py @@ -238,8 +238,8 @@ def parse_predictions(self, input_data: np.ndarray, *args, **kwargs): def multimodal_embedder(func: Callable): """ - Visual embedder type output parser. - Generates embeddings for an input image. + Multimodal embedder type output parser. + Generates embeddings for image or text input. """ @wraps(func) diff --git a/clarifai/models/model_serving/models/test.py b/clarifai/models/model_serving/models/test.py index 7e30c9e5..1137dba2 100644 --- a/clarifai/models/model_serving/models/test.py +++ b/clarifai/models/model_serving/models/test.py @@ -43,7 +43,7 @@ def test_visual_classifier(self): # Insert your inference parameters json path here # or insert a dictionary of your_parameter_name and value, e.g dict(x=1.5, y="text", c=True) # or Leave it as "" if you don't have it. - inference_paramters = "" + inference_parameters = "" ########### Initialization. Do not change it ########### __test__ = True @@ -55,7 +55,7 @@ def setUp(self) -> None: model_type, repo_version_dir=os.path.dirname(__file__), is_instance_kind_gpu=True, - inference_parameters=self.inference_paramters) + inference_parameters=self.inference_parameters) ########################################################