Module aiolirest.models.autoscaling
HPE Machine Learning Inference Software (MLIS/Aioli)
HPE MLIS is Aioli – The AI On-line Inference Platform that enables easy deployment, tracking, and serving of your packaged models regardless of your preferred AI framework.
The version of the OpenAPI document: 1.0.0 Contact: community@determined-ai Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
Expand source code
# coding: utf-8
"""
HPE Machine Learning Inference Software (MLIS/Aioli)
HPE MLIS is *Aioli* -- The AI On-line Inference Platform that enables easy deployment, tracking, and serving of your packaged models regardless of your preferred AI framework.
The version of the OpenAPI document: 1.0.0
Contact: community@determined-ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
""" # noqa: E501
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Any, ClassVar, Dict, List, Optional
from pydantic import BaseModel, StrictInt, StrictStr
from pydantic import Field
try:
from typing import Self
except ImportError:
from typing_extensions import Self
class Autoscaling(BaseModel):
"""
Configuration that controls scaling up/down.
""" # noqa: E501
max_replicas: Optional[StrictInt] = Field(default=None, description="Maximum number of replicas created based upon demand.", alias="maxReplicas")
metric: Optional[StrictStr] = Field(default='concurrency', description="Metric name which controls autoscaling. Must be one of the values: (concurrency, rps, cpu, memory).")
min_replicas: Optional[StrictInt] = Field(default=None, description="Minimum number of replicas. A value of zero, enables scale-down-to-zero for the service. By default, scale-down happens when no requests have occurred for more than 30 seconds. Requests that occur when there are zero replicas may experience longer delays while the container is started. During this time the request is blocked. On rollout of a new service version, a value greater than zero will cause the prior version to remain in standby until garbage collected (up to 24 hours).", alias="minReplicas")
target: Optional[StrictInt] = Field(default=None, description="Metric target value. When the metric value reaches the target, additional replicas will be created. A concurrency target of 1, will cause a second replica to be started whenever there is an active request.")
__properties: ClassVar[List[str]] = ["maxReplicas", "metric", "minReplicas", "target"]
model_config = {
"populate_by_name": True,
"validate_assignment": True
}
def to_str(self) -> str:
"""Returns the string representation of the model using alias"""
return pprint.pformat(self.model_dump(by_alias=True))
def to_json(self) -> str:
"""Returns the JSON representation of the model using alias"""
# TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
return json.dumps(self.to_dict())
@classmethod
def from_json(cls, json_str: str) -> Self:
"""Create an instance of Autoscaling from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
_dict = self.model_dump(
by_alias=True,
exclude={
},
exclude_none=True,
)
return _dict
@classmethod
def from_dict(cls, obj: Dict) -> Self:
"""Create an instance of Autoscaling from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return cls.model_validate(obj)
_obj = cls.model_validate({
"maxReplicas": obj.get("maxReplicas"),
"metric": obj.get("metric") if obj.get("metric") is not None else 'concurrency',
"minReplicas": obj.get("minReplicas"),
"target": obj.get("target")
})
return _obj
Classes
class Autoscaling (**data: Any)
-
Configuration that controls scaling up/down.
Create a new model by parsing and validating input data from keyword arguments.
Raises [
ValidationError
][pydantic_core.ValidationError] if the input data cannot be validated to form a valid model.self
is explicitly positional-only to allowself
as a field name.Expand source code
class Autoscaling(BaseModel): """ Configuration that controls scaling up/down. """ # noqa: E501 max_replicas: Optional[StrictInt] = Field(default=None, description="Maximum number of replicas created based upon demand.", alias="maxReplicas") metric: Optional[StrictStr] = Field(default='concurrency', description="Metric name which controls autoscaling. Must be one of the values: (concurrency, rps, cpu, memory).") min_replicas: Optional[StrictInt] = Field(default=None, description="Minimum number of replicas. A value of zero, enables scale-down-to-zero for the service. By default, scale-down happens when no requests have occurred for more than 30 seconds. Requests that occur when there are zero replicas may experience longer delays while the container is started. During this time the request is blocked. On rollout of a new service version, a value greater than zero will cause the prior version to remain in standby until garbage collected (up to 24 hours).", alias="minReplicas") target: Optional[StrictInt] = Field(default=None, description="Metric target value. When the metric value reaches the target, additional replicas will be created. A concurrency target of 1, will cause a second replica to be started whenever there is an active request.") __properties: ClassVar[List[str]] = ["maxReplicas", "metric", "minReplicas", "target"] model_config = { "populate_by_name": True, "validate_assignment": True } def to_str(self) -> str: """Returns the string representation of the model using alias""" return pprint.pformat(self.model_dump(by_alias=True)) def to_json(self) -> str: """Returns the JSON representation of the model using alias""" # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict()) @classmethod def from_json(cls, json_str: str) -> Self: """Create an instance of Autoscaling from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: """Return the dictionary representation of the model using alias. This has the following differences from calling pydantic's `self.model_dump(by_alias=True)`: * `None` is only added to the output dict for nullable fields that were set at model initialization. Other fields with value `None` are ignored. """ _dict = self.model_dump( by_alias=True, exclude={ }, exclude_none=True, ) return _dict @classmethod def from_dict(cls, obj: Dict) -> Self: """Create an instance of Autoscaling from a dict""" if obj is None: return None if not isinstance(obj, dict): return cls.model_validate(obj) _obj = cls.model_validate({ "maxReplicas": obj.get("maxReplicas"), "metric": obj.get("metric") if obj.get("metric") is not None else 'concurrency', "minReplicas": obj.get("minReplicas"), "target": obj.get("target") }) return _obj
Ancestors
- pydantic.main.BaseModel
Class variables
var max_replicas : Optional[int]
var metric : Optional[str]
var min_replicas : Optional[int]
var model_computed_fields
var model_config
var model_fields
var target : Optional[int]
Static methods
def from_dict(obj: Dict) ‑> Self
-
Create an instance of Autoscaling from a dict
Expand source code
@classmethod def from_dict(cls, obj: Dict) -> Self: """Create an instance of Autoscaling from a dict""" if obj is None: return None if not isinstance(obj, dict): return cls.model_validate(obj) _obj = cls.model_validate({ "maxReplicas": obj.get("maxReplicas"), "metric": obj.get("metric") if obj.get("metric") is not None else 'concurrency', "minReplicas": obj.get("minReplicas"), "target": obj.get("target") }) return _obj
def from_json(json_str: str) ‑> Self
-
Create an instance of Autoscaling from a JSON string
Expand source code
@classmethod def from_json(cls, json_str: str) -> Self: """Create an instance of Autoscaling from a JSON string""" return cls.from_dict(json.loads(json_str))
Methods
def model_post_init(self: BaseModel, context: Any, /) ‑> None
-
This function is meant to behave like a BaseModel method to initialise private attributes.
It takes context as an argument since that's what pydantic-core passes when calling it.
Args
self
- The BaseModel instance.
context
- The context.
Expand source code
def init_private_attributes(self: BaseModel, context: Any, /) -> None: """This function is meant to behave like a BaseModel method to initialise private attributes. It takes context as an argument since that's what pydantic-core passes when calling it. Args: self: The BaseModel instance. context: The context. """ if getattr(self, '__pydantic_private__', None) is None: pydantic_private = {} for name, private_attr in self.__private_attributes__.items(): default = private_attr.get_default() if default is not PydanticUndefined: pydantic_private[name] = default object_setattr(self, '__pydantic_private__', pydantic_private)
def to_dict(self) ‑> Dict[str, Any]
-
Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
self.model_dump(by_alias=True)
:None
is only added to the output dict for nullable fields that were set at model initialization. Other fields with valueNone
are ignored.
Expand source code
def to_dict(self) -> Dict[str, Any]: """Return the dictionary representation of the model using alias. This has the following differences from calling pydantic's `self.model_dump(by_alias=True)`: * `None` is only added to the output dict for nullable fields that were set at model initialization. Other fields with value `None` are ignored. """ _dict = self.model_dump( by_alias=True, exclude={ }, exclude_none=True, ) return _dict
def to_json(self) ‑> str
-
Returns the JSON representation of the model using alias
Expand source code
def to_json(self) -> str: """Returns the JSON representation of the model using alias""" # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict())
def to_str(self) ‑> str
-
Returns the string representation of the model using alias
Expand source code
def to_str(self) -> str: """Returns the string representation of the model using alias""" return pprint.pformat(self.model_dump(by_alias=True))