class InferenceEngine::ICore¶
Overview¶
#include <ie_icore.hpp>
class ICore: public ov::ICore
{
public:
// methods
virtual CNNNetwork ReadNetwork(
const std::string& model,
const Blob::CPtr& weights,
bool frontendMode = false
) const = 0;
virtual CNNNetwork ReadNetwork(
const std::string& modelPath,
const std::string& binPath
) const = 0;
virtual SoExecutableNetworkInternal LoadNetwork(
const CNNNetwork& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config = {}
) = 0;
virtual SoExecutableNetworkInternal LoadNetwork(
const CNNNetwork& network,
const RemoteContext::Ptr& remoteCtx,
const std::map<std::string, std::string>& config = {}
) = 0;
virtual SoExecutableNetworkInternal LoadNetwork(
const std::string& modelStr,
const InferenceEngine::Blob::CPtr& weights,
const std::string& deviceName,
const std::map<std::string, std::string>& config,
const std::function<void(const InferenceEngine::CNNNetwork&)>& val = nullptr
) = 0;
virtual SoExecutableNetworkInternal LoadNetwork(
const std::string& modelPath,
const std::string& deviceName,
const std::map<std::string, std::string>& config,
const std::function<void(const CNNNetwork&)>& val = nullptr
) = 0;
virtual SoExecutableNetworkInternal ImportNetwork(
std::istream& networkModel,
const std::string& deviceName = {},
const std::map<std::string, std::string>& config = {}
) = 0;
virtual QueryNetworkResult QueryNetwork(
const CNNNetwork& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config
) const = 0;
virtual ov::Any GetMetric(
const std::string& deviceName,
const std::string& name,
const ov::AnyMap& options = {}
) const = 0;
virtual ov::Any GetConfig(
const std::string& deviceName,
const std::string& name
) const = 0;
virtual std::vector<std::string> GetAvailableDevices() const = 0;
virtual bool DeviceSupportsModelCaching(const std::string& deviceName) const = 0;
virtual InferenceEngine::RemoteContext::Ptr CreateContext(
const std::string& deviceName,
const ov::AnyMap&
) = 0;
virtual std::map<std::string, std::string> GetSupportedConfig(
const std::string& deviceName,
const std::map<std::string, std::string>& config
) = 0;
virtual bool isNewAPI() const = 0;
virtual RemoteContext::Ptr GetDefaultContext(const std::string& deviceName) = 0;
};
Inherited Members¶
public:
// methods
virtual std::shared_ptr<ov::Model> read_model(
const std::string& model,
const ov::Tensor& weights,
bool frontend_mode = false
) const = 0;
virtual std::shared_ptr<ov::Model> read_model(
const std::string& model_path,
const std::string& bin_path
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::shared_ptr<const ov::Model>& model,
const std::string& device_name,
const ov::AnyMap& config = {}
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::shared_ptr<const ov::Model>& model,
const ov::RemoteContext& context,
const ov::AnyMap& config = {}
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::string& model_path,
const std::string& device_name,
const ov::AnyMap& config
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> compile_model(
const std::string& model_str,
const ov::Tensor& weights,
const std::string& device_name,
const ov::AnyMap& config
) const = 0;
virtual ov::SoPtr<ov::ICompiledModel> import_model(
std::istream& model,
const std::string& device_name,
const ov::AnyMap& config = {}
) const = 0;
virtual ov::SupportedOpsMap query_model(
const std::shared_ptr<const ov::Model>& model,
const std::string& device_name,
const ov::AnyMap& config
) const = 0;
virtual std::vector<std::string> get_available_devices() const = 0;
virtual ov::RemoteContext create_context(
const std::string& device_name,
const AnyMap& args
) const = 0;
virtual bool is_new_api() const = 0;
virtual ov::RemoteContext get_default_context(const std::string& device_name) const = 0;
virtual Any get_property(
const std::string& device_name,
const std::string& name,
const AnyMap& arguments
) const = 0;
template <typename T, PropertyMutability M>
T get_property(
const std::string& device_name,
const Property<T, M>& property
) const;
template <typename T, PropertyMutability M>
T get_property(
const std::string& device_name,
const Property<T, M>& property,
const AnyMap& arguments
) const;
virtual AnyMap get_supported_property(
const std::string& full_device_name,
const AnyMap& properties
) const = 0;
Detailed Documentation¶
Methods¶
virtual CNNNetwork ReadNetwork(
const std::string& model,
const Blob::CPtr& weights,
bool frontendMode = false
) const = 0
Reads IR xml and bin (with the same name) files.
Parameters:
model |
string with IR |
weights |
shared pointer to constant blob with weights |
frontendMode |
read network without post-processing or other transformations |
Returns:
CNNNetwork
virtual CNNNetwork ReadNetwork(
const std::string& modelPath,
const std::string& binPath
) const = 0
Reads IR xml and bin files.
Parameters:
modelPath |
path to IR file |
binPath |
path to bin file, if path is empty, will try to read bin file with the same name as xml and if bin file with the same name was not found, will load IR without weights. |
Returns:
CNNNetwork
virtual SoExecutableNetworkInternal LoadNetwork(
const CNNNetwork& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config = {}
) = 0
Creates an executable network from a network object.
Users can create as many networks as they need and use them simultaneously (up to the limitation of the hardware resources)
Parameters:
network |
CNNNetwork object acquired from Core::ReadNetwork |
deviceName |
Name of device to load network to |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation |
Returns:
An executable network reference
virtual SoExecutableNetworkInternal LoadNetwork(
const CNNNetwork& network,
const RemoteContext::Ptr& remoteCtx,
const std::map<std::string, std::string>& config = {}
) = 0
Creates an executable network from a network object.
Users can create as many networks as they need and use them simultaneously (up to the limitation of the hardware resources)
Parameters:
network |
CNNNetwork object acquired from Core::ReadNetwork |
remoteCtx |
“Remote” (non-CPU) accelerator device-specific execution context to use |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation |
Returns:
An executable network reference
virtual SoExecutableNetworkInternal LoadNetwork(
const std::string& modelStr,
const InferenceEngine::Blob::CPtr& weights,
const std::string& deviceName,
const std::map<std::string, std::string>& config,
const std::function<void(const InferenceEngine::CNNNetwork&)>& val = nullptr
) = 0
Creates an executable network from a model memory.
Users can create as many networks as they need and use them simultaneously (up to the limitation of the hardware resources)
Parameters:
modelStr |
String data of model |
weights |
Model’s weights |
deviceName |
Name of device to load network to |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation |
val |
Optional callback to perform validation of loaded CNNNetwork, if ReadNetwork is triggered |
Returns:
An executable network reference
virtual SoExecutableNetworkInternal LoadNetwork(
const std::string& modelPath,
const std::string& deviceName,
const std::map<std::string, std::string>& config,
const std::function<void(const CNNNetwork&)>& val = nullptr
) = 0
Creates an executable network from a model file.
Users can create as many networks as they need and use them simultaneously (up to the limitation of the hardware resources)
Parameters:
modelPath |
Path to model |
deviceName |
Name of device to load network to |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation |
val |
Optional callback to perform validation of loaded CNNNetwork, if ReadNetwork is triggered |
Returns:
An executable network reference
virtual SoExecutableNetworkInternal ImportNetwork(
std::istream& networkModel,
const std::string& deviceName = {},
const std::map<std::string, std::string>& config = {}
) = 0
Creates an executable network from a previously exported network.
Parameters:
networkModel |
network model stream |
deviceName |
Name of device load executable network on |
config |
Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation* |
Returns:
An executable network reference
virtual QueryNetworkResult QueryNetwork(
const CNNNetwork& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config
) const = 0
Query device if it supports specified network with specified configuration.
Parameters:
deviceName |
A name of a device to query |
network |
Network object to query |
config |
Optional map of pairs: (config parameter name, config parameter value) |
Returns:
An object containing a map of pairs a layer name -> a device name supporting this layer.
virtual ov::Any GetMetric(
const std::string& deviceName,
const std::string& name,
const ov::AnyMap& options = {}
) const = 0
Gets general runtime metric for dedicated hardware.
The method is needed to request common device properties which are executable network agnostic. It can be device name, temperature, other devices-specific values.
Parameters:
deviceName |
|
name |
|
Returns:
Metric value corresponding to metric key.
virtual ov::Any GetConfig(
const std::string& deviceName,
const std::string& name
) const = 0
Gets configuration dedicated to device behaviour.
The method is targeted to extract information which can be set via SetConfig method.
Parameters:
deviceName |
|
name |
|
Returns:
Value of config corresponding to config key.
virtual std::vector<std::string> GetAvailableDevices() const = 0
Returns devices available for neural networks inference.
Returns:
A vector of devices. The devices are returned as { CPU, GPU.0, GPU.1, GNA } If there more than one device of specific type, they are enumerated with .# suffix.
virtual bool DeviceSupportsModelCaching(const std::string& deviceName) const = 0
Checks whether device supports model caching feature.
Parameters:
deviceName |
|
Returns:
True if device has IMPORT_EXPORT_SUPPORT and CACHING_PROPERTIES metric in SUPPORTED_METRICS and this metric returns ‘true’, False otherwise.
virtual InferenceEngine::RemoteContext::Ptr CreateContext(
const std::string& deviceName,
const ov::AnyMap&
) = 0
Create a new shared context object on specified accelerator device using specified plugin-specific low level device API parameters (device handle, pointer, etc.)
Parameters:
deviceName |
Name of a device to create new shared context on. |
params |
Map of device-specific shared context parameters. |
Returns:
A shared pointer to a created remote context.
virtual std::map<std::string, std::string> GetSupportedConfig(
const std::string& deviceName,
const std::map<std::string, std::string>& config
) = 0
Get only configs that are suppored by device.
Parameters:
deviceName |
Name of a device |
config |
Map of configs that can contains configs that are not supported by device |
Returns:
map of configs that are supported by device
virtual RemoteContext::Ptr GetDefaultContext(const std::string& deviceName) = 0
Get a pointer to default shared context object for the specified device.
Parameters:
deviceName |
|
Returns:
A shared pointer to a default remote context.