Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Py OV] Update .runtime in strings #28479

Merged
merged 4 commits into from
Jan 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 28 additions & 28 deletions src/bindings/python/src/openvino/_ov_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,13 +63,13 @@ def clone(self) -> "Model":
return Model(self.__model.clone())

def __copy__(self) -> "Model":
raise TypeError("Cannot copy 'openvino.runtime.Model'. Please, use deepcopy instead.")
raise TypeError("Cannot copy 'openvino.Model'. Please, use deepcopy instead.")

def __deepcopy__(self, memo: Dict) -> "Model":
"""Returns a deepcopy of Model.

:return: A copy of Model.
:rtype: openvino.runtime.Model
:rtype: openvino.Model
"""
return Model(self.__model.clone())

Expand Down Expand Up @@ -108,14 +108,14 @@ def infer(

(1) `int`
(2) `str`
(3) `openvino.runtime.ConstOutput`
(3) `openvino.ConstOutput`

The allowed types of values in the `inputs` are:

(1) `numpy.ndarray` and all the types that are castable to it, e.g. `torch.Tensor`
(2) `openvino.runtime.Tensor`
(2) `openvino.Tensor`

Can be called with only one `openvino.runtime.Tensor` or `numpy.ndarray`,
Can be called with only one `openvino.Tensor` or `numpy.ndarray`,
it will work only with one-input models. When model has more inputs,
function throws error.

Expand Down Expand Up @@ -190,14 +190,14 @@ def start_async(

(1) `int`
(2) `str`
(3) `openvino.runtime.ConstOutput`
(3) `openvino.ConstOutput`

The allowed types of values in the `inputs` are:

(1) `numpy.ndarray` and all the types that are castable to it, e.g. `torch.Tensor`
(2) `openvino.runtime.Tensor`
(2) `openvino.Tensor`

Can be called with only one `openvino.runtime.Tensor` or `numpy.ndarray`,
Can be called with only one `openvino.Tensor` or `numpy.ndarray`,
it will work only with one-input models. When model has more inputs,
function throws error.

Expand Down Expand Up @@ -241,7 +241,7 @@ def get_compiled_model(self) -> "CompiledModel":
"""Gets the compiled model this InferRequest is using.

:return: a CompiledModel object
:rtype: openvino.runtime.ie_api.CompiledModel
:rtype: openvino.CompiledModel
"""
return CompiledModel(super().get_compiled_model())

Expand All @@ -250,7 +250,7 @@ def results(self) -> OVDict:
"""Gets all outputs tensors of this InferRequest.

:return: Dictionary of results from output tensors with ports as keys.
:rtype: Dict[openvino.runtime.ConstOutput, numpy.array]
:rtype: Dict[openvino.ConstOutput, numpy.array]
"""
return OVDict(super().results)

Expand All @@ -277,15 +277,15 @@ def create_infer_request(self) -> InferRequest:
The created request has allocated input and output tensors.

:return: New InferRequest object.
:rtype: openvino.runtime.InferRequest
:rtype: openvino.InferRequest
"""
return InferRequest(super().create_infer_request())

def query_state(self) -> None:
"""Gets state control interface for the underlaying infer request.

:return: List of VariableState objects.
:rtype: List[openvino.runtime.VariableState]
:rtype: List[openvino.VariableState]
"""
if self._infer_request is None:
self._infer_request = self.create_infer_request()
Expand Down Expand Up @@ -316,14 +316,14 @@ def infer_new_request(self, inputs: Any = None) -> OVDict:

(1) `int`
(2) `str`
(3) `openvino.runtime.ConstOutput`
(3) `openvino.ConstOutput`

The allowed types of values in the `inputs` are:

(1) `numpy.ndarray` and all the types that are castable to it, e.g. `torch.Tensor`
(2) `openvino.runtime.Tensor`
(2) `openvino.Tensor`

Can be called with only one `openvino.runtime.Tensor` or `numpy.ndarray`,
Can be called with only one `openvino.Tensor` or `numpy.ndarray`,
it will work only with one-input models. When model has more inputs,
function throws error.

Expand Down Expand Up @@ -361,14 +361,14 @@ def __call__(

(1) `int`
(2) `str`
(3) `openvino.runtime.ConstOutput`
(3) `openvino.ConstOutput`

The allowed types of values in the `inputs` are:

(1) `numpy.ndarray` and all the types that are castable to it, e.g. `torch.Tensor`
(2) `openvino.runtime.Tensor`
(2) `openvino.Tensor`

Can be called with only one `openvino.runtime.Tensor` or `numpy.ndarray`,
Can be called with only one `openvino.Tensor` or `numpy.ndarray`,
it will work only with one-input models. When model has more inputs,
function throws error.

Expand Down Expand Up @@ -448,7 +448,7 @@ def __iter__(self) -> Iterable[InferRequest]:
will put the parent AsyncInferQueue object in an invalid state.

:return: a generator that yields InferRequests.
:rtype: Iterable[openvino.runtime.InferRequest]
:rtype: Iterable[openvino.InferRequest]
"""
return (InferRequest(x) for x in super().__iter__())

Expand All @@ -462,7 +462,7 @@ def __getitem__(self, i: int) -> InferRequest:
:param i: InferRequest id.
:type i: int
:return: InferRequests from the pool with given id.
:rtype: openvino.runtime.InferRequest
:rtype: openvino.InferRequest
"""
return InferRequest(super().__getitem__(i))

Expand All @@ -478,14 +478,14 @@ def start_async(

(1) `int`
(2) `str`
(3) `openvino.runtime.ConstOutput`
(3) `openvino.ConstOutput`

The allowed types of values in the `inputs` are:

(1) `numpy.ndarray` and all the types that are castable to it, e.g. `torch.Tensor`
(2) `openvino.runtime.Tensor`
(2) `openvino.Tensor`

Can be called with only one `openvino.runtime.Tensor` or `numpy.ndarray`,
Can be called with only one `openvino.Tensor` or `numpy.ndarray`,
it will work only with one-input models. When model has more inputs,
function throws error.

Expand Down Expand Up @@ -574,7 +574,7 @@ def compile_model(

:param model: Model acquired from read_model function or a path to a model in IR / ONNX / PDPD /
TF and TFLite format.
:type model: Union[openvino.runtime.Model, str, pathlib.Path]
:type model: Union[openvino.Model, str, pathlib.Path]
:param device_name: Optional. Name of the device to load the model to. If not specified,
the default OpenVINO device will be selected by AUTO plugin.
:type device_name: str
Expand All @@ -584,7 +584,7 @@ def compile_model(
:param weights: Optional. Weights of model in memory to be loaded to the model.
:type weights: bytes, optional, keyword-only
:return: A compiled model.
:rtype: openvino.runtime.CompiledModel
:rtype: openvino.CompiledModel
"""
if isinstance(model, Model):
model = model._Model__model
Expand Down Expand Up @@ -635,7 +635,7 @@ def import_model(
(property name, property value) relevant only for this load operation.
:type config: dict, optional
:return: A compiled model.
:rtype: openvino.runtime.CompiledModel
:rtype: openvino.CompiledModel

:Example:

Expand Down Expand Up @@ -680,15 +680,15 @@ def compile_model(

:param model: Model acquired from read_model function or a path to a model in IR / ONNX / PDPD /
TF and TFLite format.
:type model: Union[openvino.runtime.Model, str, pathlib.Path]
:type model: Union[openvino.Model, str, pathlib.Path]
:param device_name: Optional. Name of the device to load the model to. If not specified,
the default OpenVINO device will be selected by AUTO plugin.
:type device_name: str
:param config: Optional dict of pairs:
(property name, property value) relevant only for this load operation.
:type config: dict, optional
:return: A compiled model.
:rtype: openvino.runtime.CompiledModel
:rtype: openvino.CompiledModel

"""
core = Core()
Expand Down
6 changes: 3 additions & 3 deletions src/bindings/python/src/openvino/helpers/packing.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def pack_data(array: np.ndarray, type: Type) -> np.ndarray:
:param array: numpy array with values to pack.
:type array: numpy array
:param type: Type to interpret the array values. Type must be u1, u4, i4, nf4 or f4e2m1.
:type type: openvino.runtime.Type
:type type: openvino.Type
"""
assert type in [Type.u1, Type.u4, Type.i4, Type.nf4, Type.f4e2m1], "Packing algorithm for the" "data types stored in 1, 2 or 4 bits"

Expand Down Expand Up @@ -58,9 +58,9 @@ def unpack_data(array: np.ndarray, type: Type, shape: Union[list, Shape]) -> np.
:param array: numpy array to unpack.
:type array: numpy array
:param type: Type to extract from array values. Type must be u1, u4, i4, nf4 or f4e2m1.
:type type: openvino.runtime.Type
:type type: openvino.Type
:param shape: the new shape for the unpacked array.
:type shape: Union[list, openvino.runtime.Shape]
:type shape: Union[list, openvino.Shape]
"""
assert type in [Type.u1, Type.u4, Type.i4, Type.nf4, Type.f4e2m1], "Unpacking algorithm for the" "data types stored in 1, 2 or 4 bits"
unpacked = np.unpackbits(array.view(np.uint8))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class OVDict(Mapping):
This class is a dict-like object. It provides possibility to
address data tensors with three key types:
* `openvino.runtime.ConstOutput` - port of the output
* `openvino.ConstOutput` - port of the output
* `int` - index of the output
* `str` - names of the output
Expand Down
12 changes: 6 additions & 6 deletions src/bindings/python/src/pyopenvino/core/async_infer_queue.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ class AsyncInferQueue {

void regclass_AsyncInferQueue(py::module m) {
py::class_<AsyncInferQueue, std::shared_ptr<AsyncInferQueue>> cls(m, "AsyncInferQueue");
cls.doc() = "openvino.runtime.AsyncInferQueue represents a helper that creates a pool of asynchronous"
cls.doc() = "openvino.AsyncInferQueue represents a helper that creates a pool of asynchronous"
"InferRequests and provides synchronization functions to control flow of a simple pipeline.";

cls.def(py::init<ov::CompiledModel&, size_t>(),
Expand All @@ -176,11 +176,11 @@ void regclass_AsyncInferQueue(py::module m) {
Creates AsyncInferQueue.

:param model: Model to be used to create InferRequests in a pool.
:type model: openvino.runtime.CompiledModel
:type model: openvino.CompiledModel
:param jobs: Number of InferRequests objects in a pool. If 0, jobs number
will be set automatically to the optimal number. Default: 0
:type jobs: int
:rtype: openvino.runtime.AsyncInferQueue
:rtype: openvino.AsyncInferQueue
)");

// Overload for single input, it will throw error if a model has more than one input.
Expand Down Expand Up @@ -216,7 +216,7 @@ void regclass_AsyncInferQueue(py::module m) {

:param inputs: Data to set on single input tensor of next available InferRequest from
AsyncInferQueue's pool.
:type inputs: openvino.runtime.Tensor
:type inputs: openvino.Tensor
:param userdata: Any data that will be passed to a callback
:type userdata: Any
:rtype: None
Expand Down Expand Up @@ -262,7 +262,7 @@ void regclass_AsyncInferQueue(py::module m) {

:param inputs: Data to set on input tensors of next available InferRequest from
AsyncInferQueue's pool.
:type inputs: dict[Union[int, str, openvino.runtime.ConstOutput] : openvino.runtime.Tensor]
:type inputs: dict[Union[int, str, openvino.ConstOutput] : openvino.Tensor]
:param userdata: Any data that will be passed to a callback
:rtype: None

Expand Down Expand Up @@ -348,7 +348,7 @@ void regclass_AsyncInferQueue(py::module m) {
:param i: InferRequest id
:type i: int
:return: InferRequests from the pool with given id.
:rtype: openvino.runtime.InferRequest
:rtype: openvino.InferRequest
)");

cls.def_property_readonly(
Expand Down
2 changes: 1 addition & 1 deletion src/bindings/python/src/pyopenvino/core/common.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -578,7 +578,7 @@ ov::PartialShape partial_shape_from_list(const py::list& shape) {
} else {
throw py::type_error("Incorrect type " + std::string(py::str(dim.get_type())) +
" for dimension. Expected types are: "
"int, str, openvino.runtime.Dimension, list/tuple with lower and upper values for "
"int, str, openvino.Dimension, list/tuple with lower and upper values for "
"dynamic dimension.");
}
}
Expand Down
22 changes: 11 additions & 11 deletions src/bindings/python/src/pyopenvino/core/compiled_model.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ namespace py = pybind11;

void regclass_CompiledModel(py::module m) {
py::class_<ov::CompiledModel, std::shared_ptr<ov::CompiledModel>> cls(m, "CompiledModel");
cls.doc() = "openvino.runtime.CompiledModel represents Model that is compiled for a specific device by applying "
cls.doc() = "openvino.CompiledModel represents Model that is compiled for a specific device by applying "
"multiple optimization transformations, then mapping to compute kernels.";

cls.def(py::init([](ov::CompiledModel& other) {
Expand All @@ -40,7 +40,7 @@ void regclass_CompiledModel(py::module m) {
The created request has allocated input and output tensors.

:return: New InferRequest object.
:rtype: openvino.runtime.InferRequest
:rtype: openvino.InferRequest
)");

cls.def(
Expand Down Expand Up @@ -174,7 +174,7 @@ void regclass_CompiledModel(py::module m) {
is optimized and which kernels, element types, and layouts are selected.

:return: Model, containing Executable Graph information.
:rtype: openvino.runtime.Model
:rtype: openvino.Model
)");

cls.def("release_memory",
Expand All @@ -193,7 +193,7 @@ void regclass_CompiledModel(py::module m) {
Gets all inputs of a compiled model.

:return: Inputs of a compiled model.
:rtype: List[openvino.runtime.ConstOutput]
:rtype: List[openvino.ConstOutput]
)");

cls.def("input",
Expand All @@ -203,7 +203,7 @@ void regclass_CompiledModel(py::module m) {
If a model has more than one input, this method throws an exception.

:return: A compiled model input.
:rtype: openvino.runtime.ConstOutput
:rtype: openvino.ConstOutput
)");

cls.def("input",
Expand All @@ -216,7 +216,7 @@ void regclass_CompiledModel(py::module m) {
:param index: An input index.
:type index: int
:return: A compiled model input.
:rtype: openvino.runtime.ConstOutput
:rtype: openvino.ConstOutput
)");

cls.def(
Expand All @@ -230,7 +230,7 @@ void regclass_CompiledModel(py::module m) {
:param tensor_name: An input tensor name.
:type tensor_name: str
:return: A compiled model input.
:rtype: openvino.runtime.ConstOutput
:rtype: openvino.ConstOutput
)");

cls.def_property_readonly("outputs",
Expand All @@ -239,7 +239,7 @@ void regclass_CompiledModel(py::module m) {
Gets all outputs of a compiled model.

:return: Outputs of a compiled model.
:rtype: List[openvino.runtime.ConstOutput]
:rtype: List[openvino.ConstOutput]
)");

cls.def("output",
Expand All @@ -249,7 +249,7 @@ void regclass_CompiledModel(py::module m) {
If the model has more than one output, this method throws an exception.

:return: A compiled model output.
:rtype: openvino.runtime.ConstOutput
:rtype: openvino.ConstOutput
)");

cls.def("output",
Expand All @@ -262,7 +262,7 @@ void regclass_CompiledModel(py::module m) {
:param index: An output index.
:type index: int
:return: A compiled model output.
:rtype: openvino.runtime.ConstOutput
:rtype: openvino.ConstOutput
)");

cls.def("output",
Expand All @@ -276,7 +276,7 @@ void regclass_CompiledModel(py::module m) {
:param tensor_name: An output tensor name.
:type tensor_name: str
:return: A compiled model output.
:rtype: openvino.runtime.ConstOutput
:rtype: openvino.ConstOutput
)");

cls.def("__repr__", [](const ov::CompiledModel& self) {
Expand Down
Loading
Loading