ie_core.hpp
Go to the documentation of this file.
1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief This is a header file for the Inference Engine Core class C++ API
7  *
8  * @file ie_core.hpp
9  */
10 #pragma once
11 
12 #include <map>
13 #include <memory>
14 #include <string>
15 #include <vector>
16 
17 #include "ie_version.hpp"
18 #include "ie_extension.h"
19 #include "ie_remote_context.hpp"
21 
22 namespace InferenceEngine {
23 
24 /**
25  * @brief This class represents Inference Engine Core entity.
26  *
27  * It can throw exceptions safely for the application, where it is properly handled.
28  */
29 class INFERENCE_ENGINE_API_CLASS(Core) {
30  class Impl;
31  std::shared_ptr<Impl> _impl;
32 
33 public:
34  /** @brief Constructs Inference Engine Core instance using XML configuration file with
35  * plugins description.
36  *
37  * See RegisterPlugins for more details.
38  *
39  * @param xmlConfigFile A path to .xml file with plugins to load from. If XML configuration file is not specified,
40  * then default Inference Engine plugins are loaded from the default plugin.xml file.
41  */
42  explicit Core(const std::string& xmlConfigFile = {});
43 
44  /**
45  * @brief Returns plugins version information
46  *
47  * @param deviceName Device name to indentify plugin
48  * @return A vector of versions
49  */
50  std::map<std::string, Version> GetVersions(const std::string& deviceName) const;
51 
52 #ifdef ENABLE_UNICODE_PATH_SUPPORT
53  /**
54  * @brief Reads models from IR and ONNX formats
55  * @param modelPath path to model
56  * @param binPath path to data file
57  * For IR format (*.bin):
58  * * if path is empty, will try to read bin file with the same name as xml and
59  * * if bin file with the same name was not found, will load IR without weights.
60  * ONNX models with data files are not supported
61  * @return CNNNetwork
62  */
63  CNNNetwork ReadNetwork(const std::wstring& modelPath, const std::wstring& binPath = {}) const;
64 #endif
65 
66  /**
67  * @brief Reads models from IR and ONNX formats
68  * @param modelPath path to model
69  * @param binPath path to data file
70  * For IR format (*.bin):
71  * * if path is empty, will try to read bin file with the same name as xml and
72  * * if bin file with the same name was not found, will load IR without weights.
73  * ONNX models with data files are not supported
74  * @return CNNNetwork
75  */
76  CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath = {}) const;
77  /**
78  * @brief Reads models from IR and ONNX formats
79  * @param model string with model in IR or ONNX format
80  * @param weights shared pointer to constant blob with weights
81  * ONNX models doesn't support models with data blobs.
82  * For ONNX case the second parameter should contain empty blob.
83  * @return CNNNetwork
84  */
85  CNNNetwork ReadNetwork(const std::string& model, const Blob::CPtr& weights) const;
86 
87  /**
88  * @brief Creates an executable network from a network object.
89  *
90  * Users can create as many networks as they need and use
91  * them simultaneously (up to the limitation of the hardware resources)
92  *
93  * @param network CNNNetwork object acquired from Core::ReadNetwork
94  * @param deviceName Name of device to load network to
95  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
96  * operation
97  * @return An executable network reference
98  */
100  const CNNNetwork& network, const std::string& deviceName,
101  const std::map<std::string, std::string>& config = {});
102 
103  /**
104  * @brief Registers extension
105  * @param extension Pointer to already loaded extension
106  */
107  void AddExtension(const IExtensionPtr& extension);
108 
109  /**
110  * @brief Creates an executable network from a network object within a specified remote context.
111  * @param network CNNNetwork object acquired from Core::ReadNetwork
112  * @param context Pointer to RemoteContext object
113  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
114  * operation
115  * @return An executable network object
116  */
118  const CNNNetwork& network, RemoteContext::Ptr context,
119  const std::map<std::string, std::string>& config = {});
120 
121  /**
122  * @brief Registers extension for the specified plugin
123  *
124  * @param extension Pointer to already loaded extension
125  * @param deviceName Device name to identify plugin to add an executable extension
126  */
127  void AddExtension(IExtensionPtr extension, const std::string& deviceName);
128 
129  /**
130  * @brief Creates an executable network from a previously exported network
131  *
132  * @param deviceName Name of device load executable network on
133  * @param modelFileName Path to the location of the exported file
134  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
135  * operation*
136  * @return An executable network reference
137  */
139  const std::string& modelFileName, const std::string& deviceName,
140  const std::map<std::string, std::string>& config = {});
141 
142  /**
143  * @brief Creates an executable network from a previously exported network
144  * @param deviceName Name of device load executable network on
145  * @param networkModel network model stream
146  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
147  * operation*
148  * @return An executable network reference
149  */
150  ExecutableNetwork ImportNetwork(std::istream& networkModel, const std::string& deviceName = {},
151  const std::map<std::string, std::string>& config = {});
152 
153  /**
154  * @brief Creates an executable network from a previously exported network within a specified
155  * remote context.
156  *
157  * @param networkModel Network model stream
158  * @param context Pointer to RemoteContext object
159  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
160  * operation
161  * @return An executable network reference
162  */
163  ExecutableNetwork ImportNetwork(std::istream& networkModel,
164  const RemoteContext::Ptr& context,
165  const std::map<std::string, std::string>& config = {});
166 
167  /**
168  * @brief Query device if it supports specified network with specified configuration
169  *
170  * @param deviceName A name of a device to query
171  * @param network Network object to query
172  * @param config Optional map of pairs: (config parameter name, config parameter value)
173  * @return An object containing a map of pairs a layer name -> a device name supporting this layer.
174  */
176  const CNNNetwork& network, const std::string& deviceName,
177  const std::map<std::string, std::string>& config = {}) const;
178 
179  /**
180  * @brief Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp
181  *
182  * @param deviceName An optinal name of a device. If device name is not specified, the config is set for all the
183  * registered devices.
184  *
185  * @param config Map of pairs: (config parameter name, config parameter value)
186  */
187  void SetConfig(const std::map<std::string, std::string>& config, const std::string& deviceName = {});
188 
189  /**
190  * @brief Gets configuration dedicated to device behaviour.
191  *
192  * The method is targeted to extract information which can be set via SetConfig method.
193  *
194  * @param deviceName - A name of a device to get a configuration value.
195  * @param name - value of config corresponding to config key.
196  * @return Value of config corresponding to config key.
197  */
198  Parameter GetConfig(const std::string& deviceName, const std::string& name) const;
199 
200  /**
201  * @brief Gets general runtime metric for dedicated hardware.
202  *
203  * The method is needed to request common device properties
204  * which are executable network agnostic. It can be device name, temperature, other devices-specific values.
205  *
206  * @param deviceName - A name of a device to get a metric value.
207  * @param name - metric name to request.
208  * @return Metric value corresponding to metric key.
209  */
210  Parameter GetMetric(const std::string& deviceName, const std::string& name) const;
211 
212  /**
213  * @brief Returns devices available for neural networks inference
214  *
215  * @return A vector of devices. The devices are returned as { CPU, FPGA.0, FPGA.1, MYRIAD }
216  If there more than one device of specific type, they are enumerated with .# suffix.
217  */
218  std::vector<std::string> GetAvailableDevices() const;
219 
220  /**
221  * @brief Register new device and plugin which implement this device inside Inference Engine.
222  *
223  * @param pluginName A name of plugin. Depending on platform pluginName is wrapped with shared library suffix and
224  * prefix to identify library full name
225  *
226  * @param deviceName A device name to register plugin for. If device name is not specified, then it's taken from
227  * plugin itself.
228  */
229  void RegisterPlugin(const std::string& pluginName, const std::string& deviceName);
230 
231  /**
232  * @brief Unloads previously loaded plugin with a specified name from Inference Engine
233  * The method is needed to remove plugin instance and free its resources. If plugin for a
234  * specified device has not been created before, the method throws an exception.
235  *
236  * @param deviceName Device name identifying plugin to remove from Inference Engine
237  */
238  void UnregisterPlugin(const std::string& deviceName);
239 
240  /** @brief Registers plugin to Inference Engine Core instance using XML configuration file with
241  * plugins description.
242  *
243  * XML file has the following structure:
244  *
245  * ```xml
246  * <ie>
247  * <plugins>
248  * <plugin name="" location="">
249  * <extensions>
250  * <extension location=""/>
251  * </extensions>
252  * <properties>
253  * <property key="" value=""/>
254  * </properties>
255  * </plugin>
256  * </plugins>
257  * </ie>
258  * ```
259  *
260  * - `name` identifies name of device enabled by plugin
261  * - `location` specifies absolute path to dynamic library with plugin. A path can also be relative to inference
262  * engine shared library. It allows to have common config for different systems with different configurations.
263  * - Properties are set to plugin via the `SetConfig` method.
264  * - Extensions are set to plugin via the `AddExtension` method.
265  *
266  * @param xmlConfigFile A path to .xml file with plugins to register.
267  */
268  void RegisterPlugins(const std::string& xmlConfigFile);
269 
270  /**
271  * @brief Create a new shared context object on specified accelerator device
272  * using specified plugin-specific low level device API parameters (device handle, pointer, etc.)
273  * @param deviceName Name of a device to create new shared context on.
274  * @param params Map of device-specific shared context parameters.
275  * @return A shared pointer to a created remote context.
276  */
277  RemoteContext::Ptr CreateContext(const std::string& deviceName, const ParamMap& params);
278 
279  /**
280  * @brief Get a pointer to default(plugin-supplied) shared context object for specified accelerator device.
281  * @param deviceName - A name of a device to get create shared context from.
282  * @return A shared pointer to a default remote context.
283  */
284  RemoteContext::Ptr GetDefaultContext(const std::string& deviceName);
285 };
286 } // namespace InferenceEngine
InferenceEngine::Core::ReadNetwork
CNNNetwork ReadNetwork(const std::string &modelPath, const std::string &binPath={}) const
Reads models from IR and ONNX formats.
InferenceEngine::Core::GetDefaultContext
RemoteContext::Ptr GetDefaultContext(const std::string &deviceName)
Get a pointer to default(plugin-supplied) shared context object for specified accelerator device.
InferenceEngine::Core::ImportNetwork
ExecutableNetwork ImportNetwork(const std::string &modelFileName, const std::string &deviceName, const std::map< std::string, std::string > &config={})
Creates an executable network from a previously exported network.
InferenceEngine::Core::CreateContext
RemoteContext::Ptr CreateContext(const std::string &deviceName, const ParamMap &params)
Create a new shared context object on specified accelerator device using specified plugin-specific lo...
InferenceEngine::Core::RegisterPlugin
void RegisterPlugin(const std::string &pluginName, const std::string &deviceName)
Register new device and plugin which implement this device inside Inference Engine.
InferenceEngine::Blob::CPtr
std::shared_ptr< const Blob > CPtr
A smart pointer to the const Blob object.
Definition: ie_blob.h:48
InferenceEngine::CNNNetwork
This class contains all the information about the Neural Network and the related binary information.
Definition: ie_cnn_network.h:35
InferenceEngine::Parameter
This class represents an object to work with different parameters.
Definition: ie_parameter.hpp:37
InferenceEngine::Core::UnregisterPlugin
void UnregisterPlugin(const std::string &deviceName)
Unloads previously loaded plugin with a specified name from Inference Engine The method is needed to ...
InferenceEngine::Core::QueryNetwork
QueryNetworkResult QueryNetwork(const CNNNetwork &network, const std::string &deviceName, const std::map< std::string, std::string > &config={}) const
Query device if it supports specified network with specified configuration.
InferenceEngine::Core::ReadNetwork
CNNNetwork ReadNetwork(const std::string &model, const Blob::CPtr &weights) const
Reads models from IR and ONNX formats.
InferenceEngine::Core::AddExtension
void AddExtension(IExtensionPtr extension, const std::string &deviceName)
Registers extension for the specified plugin.
InferenceEngine::Core::AddExtension
void AddExtension(const IExtensionPtr &extension)
Registers extension.
InferenceEngine::IExtensionPtr
std::shared_ptr< IExtension > IExtensionPtr
A shared pointer to a IExtension interface.
Definition: ie_iextension.h:195
ie_executable_network.hpp
A header file that provides wrapper classes for IExecutableNetwork.
InferenceEngine::ParamMap
std::map< std::string, Parameter > ParamMap
An std::map object containing low-level object parameters of classes that are derived from RemoteBlob...
Definition: ie_remote_context.hpp:26
InferenceEngine::Core::Core
Core(const std::string &xmlConfigFile={})
Constructs Inference Engine Core instance using XML configuration file with plugins description.
InferenceEngine::RemoteContext::Ptr
std::shared_ptr< RemoteContext > Ptr
A smart pointer to the RemoteContext object.
Definition: ie_remote_context.hpp:99
InferenceEngine::Core::GetConfig
Parameter GetConfig(const std::string &deviceName, const std::string &name) const
Gets configuration dedicated to device behaviour.
InferenceEngine::ExecutableNetwork
wrapper over IExecutableNetwork
Definition: ie_executable_network.hpp:30
ie_extension.h
A header file that defines a wrapper class for handling extension instantiation and releasing resourc...
InferenceEngine::Core::ImportNetwork
ExecutableNetwork ImportNetwork(std::istream &networkModel, const RemoteContext::Ptr &context, const std::map< std::string, std::string > &config={})
Creates an executable network from a previously exported network within a specified remote context.
InferenceEngine::Core::RegisterPlugins
void RegisterPlugins(const std::string &xmlConfigFile)
Registers plugin to Inference Engine Core instance using XML configuration file with plugins descript...
InferenceEngine::Core::GetAvailableDevices
std::vector< std::string > GetAvailableDevices() const
Returns devices available for neural networks inference.
ie_remote_context.hpp
This is a header file for the IE RemoteContext and RemoteBlob classes.
InferenceEngine::Core::SetConfig
void SetConfig(const std::map< std::string, std::string > &config, const std::string &deviceName={})
Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp.
InferenceEngine::Core::GetVersions
std::map< std::string, Version > GetVersions(const std::string &deviceName) const
Returns plugins version information.
InferenceEngine::QueryNetworkResult
Responce structure encapsulating information about supported layer.
Definition: ie_common.h:242
InferenceEngine::Core::GetMetric
Parameter GetMetric(const std::string &deviceName, const std::string &name) const
Gets general runtime metric for dedicated hardware.
InferenceEngine::Core::LoadNetwork
ExecutableNetwork LoadNetwork(const CNNNetwork &network, RemoteContext::Ptr context, const std::map< std::string, std::string > &config={})
Creates an executable network from a network object within a specified remote context.
InferenceEngine::Core::LoadNetwork
ExecutableNetwork LoadNetwork(const CNNNetwork &network, const std::string &deviceName, const std::map< std::string, std::string > &config={})
Creates an executable network from a network object.
ie_version.hpp
A header file that provides versioning information for the inference engine shared library.
InferenceEngine::Core::ImportNetwork
ExecutableNetwork ImportNetwork(std::istream &networkModel, const std::string &deviceName={}, const std::map< std::string, std::string > &config={})
Creates an executable network from a previously exported network.