interface ov::ICore¶
Overview¶
Minimal ICore interface to allow plugin to get information from Core OpenVINO class. More…
#include <icore.hpp>
template ICore
{
// methods
virtual std::shared_ptr<ov::Model> read_model(
const std::string& model,
const ov::Tensor& weights,
bool frontend_mode = false
) const = 0;
virtual std::shared_ptr<ov::Model> read_model(
const std::string& model_path,
const std::string& bin_path
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::shared_ptr<const ov::Model>& model,
const std::string& device_name,
const ov::AnyMap& config = {}
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::SoPtr<ov::IRemoteContext>& context,
const ov::AnyMap& config = {}
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::string& model_path,
const std::string& device_name,
const ov::AnyMap& config
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::string& model_str,
const ov::Tensor& weights,
const std::string& device_name,
const ov::AnyMap& config
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> import_model(
std::istream& model,
const std::string& device_name,
const ov::AnyMap& config = {}
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> import_model(
std::istream& modelStream,
const ov::SoPtr<ov::IRemoteContext>& context,
const ov::AnyMap& config = {}
) const = 0;
virtual ov::SupportedOpsMap query_model(
const std::shared_ptr<const ov::Model>& model,
const std::string& device_name,
const ov::AnyMap& config
) const = 0;
virtual std::vector<std::string> get_available_devices() const = 0;
virtual ov::SoPtr<ov::IRemoteContext> create_context(
const std::string& device_name,
const AnyMap& args
) const = 0;
virtual bool is_new_api() const = 0;
virtual ov::SoPtr<ov::IRemoteContext> get_default_context(const std::string& device_name) const = 0;
virtual Any get_property(
const std::string& device_name,
const std::string& name,
const AnyMap& arguments
) const = 0;
template <typename T, PropertyMutability M>
T get_property(
const std::string& device_name,
const Property<T, M>& property
) const;
template <typename T, PropertyMutability M>
T get_property(
const std::string& device_name,
const Property<T, M>& property,
const AnyMap& arguments
) const;
virtual AnyMap get_supported_property(
const std::string& full_device_name,
const AnyMap& properties
) const = 0;
virtual bool device_supports_model_caching(const std::string& device_name) const = 0;
};
// direct descendants
class ICore;
Detailed Documentation¶
Minimal ICore interface to allow plugin to get information from Core OpenVINO class.
Methods¶
virtual std::shared_ptr<ov::Model> read_model(
const std::string& model,
const ov::Tensor& weights,
bool frontend_mode = false
) const = 0
Reads IR xml and bin (with the same name) files.
Parameters:
model |
string with IR |
weights |
shared pointer to constant blob with weights |
frontend_mode |
read network without post-processing or other transformations |
Returns:
shared pointer to ov::Model
virtual std::shared_ptr<ov::Model> read_model(
const std::string& model_path,
const std::string& bin_path
) const = 0
Reads IR xml and bin files.
Parameters:
model_path |
path to IR file |
bin_path |
path to bin file, if path is empty, will try to read bin file with the same name as xml and if bin file with the same name was not found, will load IR without weights. |
Returns:
shared pointer to ov::Model
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::shared_ptr<const ov::Model>& model,
const std::string& device_name,
const ov::AnyMap& config = {}
) const = 0
Creates a compiled mdel from a model object.
Users can create as many models as they need and use them simultaneously (up to the limitation of the hardware resources)
Parameters:
model |
OpenVINO Model |
device_name |
Name of device to load model to |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation |
Returns:
A pointer to compiled model
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::SoPtr<ov::IRemoteContext>& context,
const ov::AnyMap& config = {}
) const = 0
Creates a compiled model from a model object.
Users can create as many models as they need and use them simultaneously (up to the limitation of the hardware resources)
Parameters:
model |
OpenVINO Model |
context |
“Remote” (non-CPU) accelerator device-specific execution context to use |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation |
Returns:
A pointer to compiled model
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::string& model_path,
const std::string& device_name,
const ov::AnyMap& config
) const = 0
Creates a compiled model from a model file.
Users can create as many models as they need and use them simultaneously (up to the limitation of the hardware resources)
Parameters:
model_path |
Path to model |
device_name |
Name of device to load model to |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation |
Returns:
A pointer to compiled model
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::string& model_str,
const ov::Tensor& weights,
const std::string& device_name,
const ov::AnyMap& config
) const = 0
Creates a compiled model from a model memory.
Users can create as many models as they need and use them simultaneously (up to the limitation of the hardware resources)
Parameters:
model_str |
String data of model |
weights |
Model ‘s weights |
device_name |
Name of device to load model to |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation |
Returns:
A pointer to compiled model
virtual ov::SoPtr<ov::ICompiledModel> import_model(
std::istream& model,
const std::string& device_name,
const ov::AnyMap& config = {}
) const = 0
Creates a compiled model from a previously exported model.
Parameters:
model |
model stream |
device_name |
Name of device load executable model on |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation* |
Returns:
A pointer to compiled model
virtual ov::SoPtr<ov::ICompiledModel> import_model(
std::istream& modelStream,
const ov::SoPtr<ov::IRemoteContext>& context,
const ov::AnyMap& config = {}
) const = 0
Creates a compiled model from a previously exported model.
Parameters:
model |
model stream |
context |
Remote context |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation* |
Returns:
A pointer to compiled model
virtual ov::SupportedOpsMap query_model(
const std::shared_ptr<const ov::Model>& model,
const std::string& device_name,
const ov::AnyMap& config
) const = 0
Query device if it supports specified network with specified configuration.
Parameters:
model |
OpenVINO Model |
device_name |
A name of a device to query |
config |
Optional map of pairs: (config parameter name, config parameter value) |
Returns:
An object containing a map of pairs a layer name -> a device name supporting this layer.
virtual std::vector<std::string> get_available_devices() const = 0
Returns devices available for neural networks inference.
Returns:
A vector of devices. The devices are returned as { CPU, GPU.0, GPU.1, MYRIAD } If there more than one device of specific type, they are enumerated with .# suffix.
virtual ov::SoPtr<ov::IRemoteContext> create_context(
const std::string& device_name,
const AnyMap& args
) const = 0
Create a new shared context object on specified accelerator device using specified plugin-specific low level device API parameters (device handle, pointer, etc.)
Parameters:
device_name |
Name of a device to create new shared context on. |
params |
Map of device-specific shared context parameters. |
Returns:
A shared pointer to a created remote context.
virtual ov::SoPtr<ov::IRemoteContext> get_default_context(const std::string& device_name) const = 0
Get a pointer to default shared context object for the specified device.
Parameters:
device_name |
|
Returns:
A shared pointer to a default remote context.
virtual Any get_property(
const std::string& device_name,
const std::string& name,
const AnyMap& arguments
) const = 0
Gets properties related to device behaviour.
Parameters:
device_name |
Name of a device to get a property value. |
name |
Property name. |
arguments |
Additional arguments to get a property. |
Returns:
Value of a property corresponding to the property name.
template <typename T, PropertyMutability M>
T get_property(
const std::string& device_name,
const Property<T, M>& property
) const
Gets properties related to device behaviour.
Parameters:
T |
Type of a returned value. |
M |
Property mutability. |
deviceName |
Name of a device to get a property value. |
property |
Property object. |
Returns:
Property value.
template <typename T, PropertyMutability M>
T get_property(
const std::string& device_name,
const Property<T, M>& property,
const AnyMap& arguments
) const
Gets properties related to device behaviour.
Parameters:
T |
Type of a returned value. |
M |
Property mutability. |
deviceName |
Name of a device to get a property value. |
property |
Property object. |
arguments |
Additional arguments to get a property. |
Returns:
Property value.
virtual AnyMap get_supported_property(
const std::string& full_device_name,
const AnyMap& properties
) const = 0
Get only properties that are supported by specified device.
Parameters:
full_device_name |
Name of a device (can be either virtual or hardware) |
properties |
Properties that can contains configs that are not supported by device |
Returns:
map of properties that are supported by device