Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions python/hsml/core/serving_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ def get(self, name: str):
deployment_instance = deployment.Deployment.from_response_json(deployment_json)
deployment_instance.model_registry_id = _client._project_id
deployment_instance.project_name = _client._project_name
print(deployment_json)
return deployment_instance

def get_all(self, model_name: str = None, status: str = None):
Expand Down
11 changes: 10 additions & 1 deletion python/hsml/deployment.py
Original file line number Diff line number Diff line change
Expand Up @@ -539,9 +539,18 @@ def environment(self):
def environment(self, environment: str):
self._predictor.environment = environment

@property
def env_vars(self):
"""Environment variables of the deployment"""
return self._predictor.env_vars

@env_vars.setter
def env_vars(self, env_vars: str):
self._predictor.env_vars = env_vars

@property
def project_namespace(self):
"""Name of inference environment"""
"""Name of kubernetes namespace the project is in"""
return self._predictor.project_namespace

@project_namespace.setter
Expand Down
2 changes: 2 additions & 0 deletions python/hsml/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,7 @@ def deploy(
transformer: Optional[Union[Transformer, dict]] = None,
api_protocol: Optional[str] = IE.API_PROTOCOL_REST,
environment: Optional[str] = None,
env_vars: Optional[dict] = None,
) -> deployment.Deployment:
"""Deploy the model.

Expand Down Expand Up @@ -266,6 +267,7 @@ def deploy(
transformer=transformer,
api_protocol=api_protocol,
environment=environment,
env_vars=env_vars,
)

return predictor.deploy()
Expand Down
6 changes: 6 additions & 0 deletions python/hsml/model_serving.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ def create_predictor(
transformer: Optional[Union[Transformer, dict]] = None,
api_protocol: Optional[str] = IE.API_PROTOCOL_REST,
environment: Optional[str] = None,
env_vars: Optional[dict] = None,
) -> Predictor:
"""Create a Predictor metadata object.

Expand Down Expand Up @@ -212,6 +213,7 @@ def create_predictor(
transformer: Transformer to be deployed together with the predictor.
api_protocol: API protocol to be enabled in the deployment (i.e., 'REST' or 'GRPC'). Defaults to 'REST'.
environment: The project Python environment to use
env_vars: Additional environment variables to use

# Returns
`Predictor`. The predictor metadata object.
Expand All @@ -232,6 +234,7 @@ def create_predictor(
transformer=transformer,
api_protocol=api_protocol,
environment=environment,
env_vars=env_vars,
)

@usage.method_logger
Expand Down Expand Up @@ -313,6 +316,7 @@ def create_endpoint(
inference_batcher: Optional[Union[InferenceBatcher, dict]] = None,
api_protocol: Optional[str] = IE.API_PROTOCOL_REST,
environment: Optional[str] = None,
env_vars: Optional[dict] = None,
) -> Predictor:
"""Create an Entrypoint metadata object.

Expand Down Expand Up @@ -341,6 +345,7 @@ def create_endpoint(
inference_batcher: Inference batcher configuration.
api_protocol: API protocol to be enabled in the deployment (i.e., 'REST' or 'GRPC'). Defaults to 'REST'.
environment: The project Python environment to use
env_vars: Additional environment variables to use

# Returns
`Predictor`. The predictor metadata object.
Expand All @@ -355,6 +360,7 @@ def create_endpoint(
inference_batcher=inference_batcher,
api_protocol=api_protocol,
environment=environment,
env_vars=env_vars,
)

@usage.method_logger
Expand Down
15 changes: 15 additions & 0 deletions python/hsml/predictor.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ def __init__(
api_protocol: Optional[str] = INFERENCE_ENDPOINTS.API_PROTOCOL_REST,
environment: Optional[str] = None,
project_namespace: str = None,
env_vars: Optional[dict] = None,
**kwargs,
):
serving_tool = (
Expand Down Expand Up @@ -104,6 +105,7 @@ def __init__(
self._environment = environment
self._project_namespace = project_namespace
self._project_name = None
self._env_vars = env_vars

def deploy(self):
"""Create a deployment for this predictor and persists it in the Model Serving.
Expand Down Expand Up @@ -302,6 +304,8 @@ def extract_fields_from_json(cls, json_decamelized):
if "environment_dto" in json_decamelized:
environment = json_decamelized.pop("environment_dto")
kwargs["environment"] = environment["name"]
if "env_vars" in json_decamelized:
kwargs["env_vars"] = json_decamelized.pop("env_vars")
kwargs["project_namespace"] = json_decamelized.pop("project_namespace")
return kwargs

Expand Down Expand Up @@ -337,6 +341,8 @@ def to_dict(self):
json = {**json, **{"modelVersion": self._model_version}}
if self.model_framework is not None:
json = {**json, **{"modelFramework": self._model_framework}}
if self.env_vars is not None:
json = {**json, **{"envVars": self._env_vars}}
if self.environment is not None:
json = {**json, **{"environmentDTO": {"name": self._environment}}}
if self._resources is not None:
Expand Down Expand Up @@ -533,6 +539,15 @@ def api_protocol(self):
def api_protocol(self, api_protocol):
self._api_protocol = api_protocol

@property
def env_vars(self):
"""Environment variables of the inference environment"""
return self._env_vars

@env_vars.setter
def env_vars(self, env_vars):
self._env_vars = env_vars

@property
def environment(self):
"""Name of the inference environment"""
Expand Down
2 changes: 1 addition & 1 deletion python/hsml/predictor_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def to_dict(self):

@property
def available_predictor_instances(self):
"""Available predicotr instances."""
"""Available predictor instances."""
return self._available_predictor_instances

@property
Expand Down