interface ov::ICore

Overview

Minimal ICore interface to allow plugin to get information from Core Inference Engine class. More…

#include <ie_icore.hpp>

template ICore
{
    // methods

    virtual ie::CNNNetwork ReadNetwork(
        const std::string& model,
        const ie::Blob::CPtr& weights,
        bool frontendMode = false
        ) const = 0;

    virtual ie::CNNNetwork ReadNetwork(
        const std::string& modelPath,
        const std::string& binPath
        ) const = 0;

    virtual ie::SoExecutableNetworkInternal LoadNetwork(
        const ie::CNNNetwork& network,
        const std::string& deviceName,
        const std::map<std::string, std::string>& config = {}
        ) = 0;

    virtual ie::SoExecutableNetworkInternal LoadNetwork(
        const ie::CNNNetwork& network,
        const ie::RemoteContext::Ptr& remoteCtx,
        const std::map<std::string, std::string>& config = {}
        ) = 0;

    virtual ie::SoExecutableNetworkInternal LoadNetwork(
        const std::string& modelPath,
        const std::string& deviceName,
        const std::map<std::string, std::string>& config,
        const std::function<void(const ie::CNNNetwork&)>& val = nullptr
        ) = 0;

    virtual ie::SoExecutableNetworkInternal ImportNetwork(
        std::istream& networkModel,
        const std::string& deviceName = {},
        const std::map<std::string, std::string>& config = {}
        ) = 0;

    virtual ie::QueryNetworkResult QueryNetwork(
        const ie::CNNNetwork& network,
        const std::string& deviceName,
        const std::map<std::string, std::string>& config
        ) const = 0;

    virtual Any GetMetric(
        const std::string& deviceName,
        const std::string& name,
        const AnyMap& options = {}
        ) const = 0;

    virtual Any GetConfig(const std::string& deviceName, const std::string& name) const = 0;
    virtual std::vector<std::string> GetAvailableDevices() const = 0;
    virtual bool DeviceSupportsImportExport(const std::string& deviceName) const = 0;

    virtual InferenceEngine::RemoteContext::Ptr CreateContext(
        const std::string& deviceName,
        const AnyMap&
        ) = 0;

    virtual std::map<std::string, std::string> GetSupportedConfig(
        const std::string& deviceName,
        const std::map<std::string, std::string>& config
        ) = 0;

    virtual bool isNewAPI() const = 0;
    virtual ie::RemoteContext::Ptr GetDefaultContext(const std::string& deviceName) = 0;

    virtual void set_property(
        const std::string& device_name,
        const AnyMap& properties
        ) = 0;

    template <typename... Properties>
    util::EnableIfAllStringAny<void, Properties...> set_property(
        const std::string& device_name,
        Properties&&... properties
        );

    virtual Any get_property(
        const std::string& device_name,
        const std::string& name,
        const AnyMap& arguments
        ) const = 0;

    template <typename T, PropertyMutability M>
    T get_property(
        const std::string& device_name,
        const Property<T, M>& property
        ) const;

    template <typename T, PropertyMutability M>
    T get_property(
        const std::string& device_name,
        const Property<T, M>& property,
        const AnyMap& arguments
        ) const;
};

Detailed Documentation

Minimal ICore interface to allow plugin to get information from Core Inference Engine class.

Methods

virtual ie::CNNNetwork ReadNetwork(
    const std::string& model,
    const ie::Blob::CPtr& weights,
    bool frontendMode = false
    ) const = 0

Reads IR xml and bin (with the same name) files.

Parameters:

model

string with IR

weights

shared pointer to constant blob with weights

frontendMode

read network without post-processing or other transformations

Returns:

CNNNetwork

virtual ie::CNNNetwork ReadNetwork(
    const std::string& modelPath,
    const std::string& binPath
    ) const = 0

Reads IR xml and bin files.

Parameters:

modelPath

path to IR file

binPath

path to bin file, if path is empty, will try to read bin file with the same name as xml and if bin file with the same name was not found, will load IR without weights.

Returns:

CNNNetwork

virtual ie::SoExecutableNetworkInternal LoadNetwork(
    const ie::CNNNetwork& network,
    const std::string& deviceName,
    const std::map<std::string, std::string>& config = {}
    ) = 0

Creates an executable network from a network object.

Users can create as many networks as they need and use them simultaneously (up to the limitation of the hardware resources)

Parameters:

network

CNNNetwork object acquired from Core::ReadNetwork

deviceName

Name of device to load network to

config

Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation

Returns:

An executable network reference

virtual ie::SoExecutableNetworkInternal LoadNetwork(
    const ie::CNNNetwork& network,
    const ie::RemoteContext::Ptr& remoteCtx,
    const std::map<std::string, std::string>& config = {}
    ) = 0

Creates an executable network from a network object.

Users can create as many networks as they need and use them simultaneously (up to the limitation of the hardware resources)

Parameters:

network

CNNNetwork object acquired from Core::ReadNetwork

remoteCtx

“Remote” (non-CPU) accelerator device-specific execution context to use

config

Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation

Returns:

An executable network reference

virtual ie::SoExecutableNetworkInternal LoadNetwork(
    const std::string& modelPath,
    const std::string& deviceName,
    const std::map<std::string, std::string>& config,
    const std::function<void(const ie::CNNNetwork&)>& val = nullptr
    ) = 0

Creates an executable network from a model file.

Users can create as many networks as they need and use them simultaneously (up to the limitation of the hardware resources)

Parameters:

modelPath

Path to model

deviceName

Name of device to load network to

config

Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation

val

Optional callback to perform validation of loaded CNNNetwork, if ReadNetwork is triggered

Returns:

An executable network reference

virtual ie::SoExecutableNetworkInternal ImportNetwork(
    std::istream& networkModel,
    const std::string& deviceName = {},
    const std::map<std::string, std::string>& config = {}
    ) = 0

Creates an executable network from a previously exported network.

Parameters:

networkModel

network model stream

deviceName

Name of device load executable network on

config

Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation*

Returns:

An executable network reference

virtual ie::QueryNetworkResult QueryNetwork(
    const ie::CNNNetwork& network,
    const std::string& deviceName,
    const std::map<std::string, std::string>& config
    ) const = 0

Query device if it supports specified network with specified configuration.

Parameters:

deviceName

A name of a device to query

network

Network object to query

config

Optional map of pairs: (config parameter name, config parameter value)

Returns:

An object containing a map of pairs a layer name -> a device name supporting this layer.

virtual Any GetMetric(
    const std::string& deviceName,
    const std::string& name,
    const AnyMap& options = {}
    ) const = 0

Gets general runtime metric for dedicated hardware.

The method is needed to request common device properties which are executable network agnostic. It can be device name, temperature, other devices-specific values.

Parameters:

deviceName

  • A name of a device to get a metric value.

name

  • metric name to request.

Returns:

Metric value corresponding to metric key.

virtual Any GetConfig(const std::string& deviceName, const std::string& name) const = 0

Gets configuration dedicated to device behaviour.

The method is targeted to extract information which can be set via SetConfig method.

Parameters:

deviceName

  • A name of a device to get a configuration value.

name

  • config key.

Returns:

Value of config corresponding to config key.

virtual std::vector<std::string> GetAvailableDevices() const = 0

Returns devices available for neural networks inference.

Returns:

A vector of devices. The devices are returned as { CPU, GPU.0, GPU.1, MYRIAD } If there more than one device of specific type, they are enumerated with .# suffix.

virtual bool DeviceSupportsImportExport(const std::string& deviceName) const = 0

Checks whether device supports Export & Import functionality of network.

Parameters:

deviceName

  • A name of a device to get a metric value.

Returns:

True if device has IMPORT_EXPORT_SUPPORT metric in SUPPORTED_METRICS and this metric returns ‘true’, False otherwise.

virtual InferenceEngine::RemoteContext::Ptr CreateContext(
    const std::string& deviceName,
    const AnyMap&
    ) = 0

Create a new shared context object on specified accelerator device using specified plugin-specific low level device API parameters (device handle, pointer, etc.)

Parameters:

deviceName

Name of a device to create new shared context on.

params

Map of device-specific shared context parameters.

Returns:

A shared pointer to a created remote context.

virtual std::map<std::string, std::string> GetSupportedConfig(
    const std::string& deviceName,
    const std::map<std::string, std::string>& config
    ) = 0

Get only configs that are suppored by device.

Parameters:

deviceName

Name of a device

config

Map of configs that can contains configs that are not supported by device

Returns:

map of configs that are supported by device

virtual ie::RemoteContext::Ptr GetDefaultContext(const std::string& deviceName) = 0

Get a pointer to default shared context object for the specified device.

Parameters:

deviceName

  • A name of a device to get create shared context from.

Returns:

A shared pointer to a default remote context.

virtual void set_property(
    const std::string& device_name,
    const AnyMap& properties
    ) = 0

Sets properties for a device, acceptable keys can be found in openvino/runtime/properties.hpp.

Parameters:

device_name

Name of a device.

properties

Map of pairs: (property name, property value).

template <typename... Properties>
util::EnableIfAllStringAny<void, Properties...> set_property(
    const std::string& device_name,
    Properties&&... properties
    )

Sets properties for a device, acceptable keys can be found in openvino/runtime/properties.hpp.

Parameters:

Properties

Should be the pack of std::pair<std::string, Any> types.

device_name

Name of a device.

properties

Optional pack of pairs: (property name, property value).

virtual Any get_property(
    const std::string& device_name,
    const std::string& name,
    const AnyMap& arguments
    ) const = 0

Gets properties related to device behaviour.

Parameters:

device_name

Name of a device to get a property value.

name

Property name.

arguments

Additional arguments to get a property.

Returns:

Value of a property corresponding to the property name.

template <typename T, PropertyMutability M>
T get_property(
    const std::string& device_name,
    const Property<T, M>& property
    ) const

Gets properties related to device behaviour.

Parameters:

T

Type of a returned value.

M

Property mutability.

deviceName

Name of a device to get a property value.

property

Property object.

Returns:

Property value.

template <typename T, PropertyMutability M>
T get_property(
    const std::string& device_name,
    const Property<T, M>& property,
    const AnyMap& arguments
    ) const

Gets properties related to device behaviour.

Parameters:

T

Type of a returned value.

M

Property mutability.

deviceName

Name of a device to get a property value.

property

Property object.

arguments

Additional arguments to get a property.

Returns:

Property value.