ie_core.hpp
Go to the documentation of this file.
1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief This is a header file for the Inference Engine Core class C++ API
7  *
8  * @file ie_core.hpp
9  */
10 #pragma once
11 
12 #include <map>
13 #include <memory>
14 #include <string>
15 #include <vector>
16 
19 #include "ie_extension.h"
20 #include "ie_remote_context.hpp"
21 
22 namespace InferenceEngine {
23 
24 /**
25  * @brief Responce structure encapsulating information about supported layer
26  */
28  /**
29  * @brief A map of supported layers:
30  * - key - a layer name
31  * - value - a device name on which layer is assigned
32  */
33  std::map<std::string, std::string> supportedLayersMap;
34 
35  /**
36  * @brief A status code
37  */
38  StatusCode rc = OK;
39 
40  /**
41  * @brief Response message
42  */
44 };
45 
46 /**
47  * @brief This class represents Inference Engine Core entity.
48  *
49  * It can throw exceptions safely for the application, where it is properly handled.
50  */
51 class INFERENCE_ENGINE_API_CLASS(Core) {
52  class Impl;
53  std::shared_ptr<Impl> _impl;
54 
55 public:
56  /** @brief Constructs Inference Engine Core instance using XML configuration file with
57  * plugins description.
58  *
59  * See RegisterPlugins for more details.
60  *
61  * @param xmlConfigFile A path to .xml file with plugins to load from. If XML configuration file is not specified,
62  * then default Inference Engine plugins are loaded from the default plugin.xml file.
63  */
64  explicit Core(const std::string& xmlConfigFile = {});
65 
66  /**
67  * @brief Returns plugins version information
68  *
69  * @param deviceName Device name to indentify plugin
70  * @return A vector of versions
71  */
72  std::map<std::string, Version> GetVersions(const std::string& deviceName) const;
73 
74  /**
75  * @deprecated IErrorListener is not used anymore. An exception is thrown in case of any unexpected situations.
76  * The function will be removed in 2021.1 release.
77  * @brief Sets logging callback
78  *
79  * Logging is used to track what is going on inside the plugins, Inference Engine library
80  *
81  * @param listener Logging sink
82  */
83  IE_SUPPRESS_DEPRECATED_START
84  INFERENCE_ENGINE_DEPRECATED("IErrorListener is not used anymore. An exception is thrown in case of any unexpected situations.")
85  void SetLogCallback(IErrorListener& listener) const;
86  IE_SUPPRESS_DEPRECATED_END
87 
88 #ifdef ENABLE_UNICODE_PATH_SUPPORT
89  /**
90  * @brief Reads models from IR and ONNX formats
91  * @param modelPath path to model
92  * @param binPath path to data file
93  * For IR format (*.bin):
94  * * if path is empty, will try to read bin file with the same name as xml and
95  * * if bin file with the same name was not found, will load IR without weights.
96  * ONNX models with data files are not supported
97  * @return CNNNetwork
98  */
99  CNNNetwork ReadNetwork(const std::wstring& modelPath, const std::wstring& binPath = {}) const {
100  return ReadNetwork(details::wStringtoMBCSstringChar(modelPath), details::wStringtoMBCSstringChar(binPath));
101  }
102 #endif
103 
104  /**
105  * @brief Reads models from IR and ONNX formats
106  * @param modelPath path to model
107  * @param binPath path to data file
108  * For IR format (*.bin):
109  * * if \p binPath is empty, tries to read *.bin file with the same name as xml and
110  * * if *.bin file with the same name was not found, tries to load IR without weights.
111  * ONNX models with data files are not supported
112  * @return CNNNetwork
113  */
114  CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath = {}) const;
115  /**
116  * @brief Reads models from IR and ONNX formats
117  * @param model string with model in IR or ONNX format
118  * @param weights shared pointer to constant blob with weights
119  * ONNX models doesn't support models with data blobs.
120  * For ONNX case the second parameter should contain empty blob.
121  * @return CNNNetwork
122  */
123  CNNNetwork ReadNetwork(const std::string& model, const Blob::CPtr& weights) const;
124 
125  /**
126  * @brief Creates an executable network from a network object.
127  *
128  * Users can create as many networks as they need and use
129  * them simultaneously (up to the limitation of the hardware resources)
130  *
131  * @param network CNNNetwork object acquired from Core::ReadNetwork
132  * @param deviceName Name of device to load network to
133  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
134  * operation
135  * @return An executable network reference
136  */
137  ExecutableNetwork LoadNetwork(
138  const CNNNetwork& network, const std::string& deviceName,
139  const std::map<std::string, std::string>& config = {});
140 
141  /**
142  * @brief Registers extension
143  * @param extension Pointer to already loaded extension
144  */
145  void AddExtension(const IExtensionPtr& extension);
146 
147  /**
148  * @brief Creates an executable network from a network object within a specified remote context.
149  * @param network CNNNetwork object acquired from Core::ReadNetwork
150  * @param context Pointer to RemoteContext object
151  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
152  * operation
153  * @return An executable network object
154  */
155  ExecutableNetwork LoadNetwork(
156  const CNNNetwork& network, RemoteContext::Ptr context,
157  const std::map<std::string, std::string>& config = {});
158 
159  /**
160  * @brief Registers extension for the specified plugin
161  *
162  * @param extension Pointer to already loaded extension
163  * @param deviceName Device name to identify plugin to add an executable extension
164  */
165  void AddExtension(IExtensionPtr extension, const std::string& deviceName);
166 
167  /**
168  * @brief Creates an executable network from a previously exported network
169  *
170  * @param deviceName Name of device load executable network on
171  * @param modelFileName Path to the location of the exported file
172  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
173  * operation*
174  * @return An executable network reference
175  */
176  ExecutableNetwork ImportNetwork(
177  const std::string& modelFileName, const std::string& deviceName,
178  const std::map<std::string, std::string>& config = {});
179 
180  /**
181  * @brief Creates an executable network from a previously exported network
182  * @param deviceName Name of device load executable network on
183  * @param networkModel network model stream
184  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
185  * operation*
186  * @return An executable network reference
187  */
188  ExecutableNetwork ImportNetwork(std::istream& networkModel, const std::string& deviceName = {},
189  const std::map<std::string, std::string>& config = {});
190 
191  /**
192  * @brief Creates an executable network from a previously exported network within a specified
193  * remote context.
194  *
195  * @param networkModel Network model stream
196  * @param context Pointer to RemoteContext object
197  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
198  * operation
199  * @return An executable network reference
200  */
201  ExecutableNetwork ImportNetwork(std::istream& networkModel,
202  const RemoteContext::Ptr& context,
203  const std::map<std::string, std::string>& config = {});
204 
205  /**
206  * @brief Query device if it supports specified network with specified configuration
207  *
208  * @param deviceName A name of a device to query
209  * @param network Network object to query
210  * @param config Optional map of pairs: (config parameter name, config parameter value)
211  * @return An object containing a map of pairs a layer name -> a device name supporting this layer.
212  */
213  QueryNetworkResult QueryNetwork(
214  const ICNNNetwork& network, const std::string& deviceName,
215  const std::map<std::string, std::string>& config = {}) const;
216 
217  /**
218  * @brief Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp
219  *
220  * @param deviceName An optinal name of a device. If device name is not specified, the config is set for all the
221  * registered devices.
222  *
223  * @param config Map of pairs: (config parameter name, config parameter value)
224  */
225  void SetConfig(const std::map<std::string, std::string>& config, const std::string& deviceName = {});
226 
227  /**
228  * @brief Gets configuration dedicated to device behaviour.
229  *
230  * The method is targeted to extract information which can be set via SetConfig method.
231  *
232  * @param deviceName - A name of a device to get a configuration value.
233  * @param name - value of config corresponding to config key.
234  * @return Value of config corresponding to config key.
235  */
236  Parameter GetConfig(const std::string& deviceName, const std::string& name) const;
237 
238  /**
239  * @brief Gets general runtime metric for dedicated hardware.
240  *
241  * The method is needed to request common device properties
242  * which are executable network agnostic. It can be device name, temperature, other devices-specific values.
243  *
244  * @param deviceName - A name of a device to get a metric value.
245  * @param name - metric name to request.
246  * @return Metric value corresponding to metric key.
247  */
248  Parameter GetMetric(const std::string& deviceName, const std::string& name) const;
249 
250  /**
251  * @brief Returns devices available for neural networks inference
252  *
253  * @return A vector of devices. The devices are returned as { CPU, FPGA.0, FPGA.1, MYRIAD }
254  If there more than one device of specific type, they are enumerated with .# suffix.
255  */
256  std::vector<std::string> GetAvailableDevices() const;
257 
258  /**
259  * @brief Register new device and plugin which implement this device inside Inference Engine.
260  *
261  * @param pluginName A name of plugin. Depending on platform pluginName is wrapped with shared library suffix and
262  * prefix to identify library full name
263  *
264  * @param deviceName A device name to register plugin for. If device name is not specified, then it's taken from
265  * plugin itself.
266  */
267  void RegisterPlugin(const std::string& pluginName, const std::string& deviceName);
268 
269  /**
270  * @brief Unloads previously loaded plugin with a specified name from Inference Engine
271  * The method is needed to remove plugin instance and free its resources. If plugin for a
272  * specified device has not been created before, the method throws an exception.
273  *
274  * @param deviceName Device name identifying plugin to remove from Inference Engine
275  */
276  void UnregisterPlugin(const std::string& deviceName);
277 
278  /** @brief Registers plugin to Inference Engine Core instance using XML configuration file with
279  * plugins description.
280  *
281  * XML file has the following structure:
282  *
283  * ```xml
284  * <ie>
285  * <plugins>
286  * <plugin name="" location="">
287  * <extensions>
288  * <extension location=""/>
289  * </extensions>
290  * <properties>
291  * <property key="" value=""/>
292  * </properties>
293  * </plugin>
294  * </plugins>
295  * </ie>
296  * ```
297  *
298  * - `name` identifies name of device enabled by plugin
299  * - `location` specifies absolute path to dynamic library with plugin. A path can also be relative to inference
300  * engine shared library. It allows to have common config for different systems with different configurations.
301  * - Properties are set to plugin via the `SetConfig` method.
302  * - Extensions are set to plugin via the `AddExtension` method.
303  *
304  * @param xmlConfigFile A path to .xml file with plugins to register.
305  */
306  void RegisterPlugins(const std::string& xmlConfigFile);
307 
308  /**
309  * @brief Create a new shared context object on specified accelerator device
310  * using specified plugin-specific low level device API parameters (device handle, pointer, etc.)
311  * @param deviceName Name of a device to create new shared context on.
312  * @param params Map of device-specific shared context parameters.
313  * @return A shared pointer to a created remote context.
314  */
315  RemoteContext::Ptr CreateContext(const std::string& deviceName, const ParamMap& params);
316 
317  /**
318  * @brief Get a pointer to default(plugin-supplied) shared context object for specified accelerator device.
319  * @param deviceName - A name of a device to get create shared context from.
320  * @return A shared pointer to a default remote context.
321  */
322  RemoteContext::Ptr GetDefaultContext(const std::string& deviceName);
323 };
324 } // namespace InferenceEngine
A header file that provides wrapper classes for IExecutableNetwork.
std::map< std::string, std::string > supportedLayersMap
A map of supported layers:
Definition: ie_core.hpp:33
This class represents Inference Engine Core entity.
Definition: ie_core.hpp:51
Definition: cldnn_config.hpp:16
ResponseDesc resp
Response message.
Definition: ie_core.hpp:43
StatusCode
This enum contains codes for all possible return values of the interface functions.
Definition: ie_common.h:224
This is a header file for the IE RemoteContext and RemoteBlob classes.
std::string name
Layer name.
Definition: ie_layers.h:42
std::shared_ptr< const Blob > CPtr
A smart pointer to the const Blob object.
Definition: ie_blob.h:47
Represents detailed information for an error.
Definition: ie_common.h:247
std::map< std::string, Parameter > ParamMap
An std::map object containing low-level object parameters of classes that are derived from RemoteBlob...
Definition: ie_remote_context.hpp:26
This is the main interface to describe the NN topology.
Definition: ie_icnn_network.hpp:43
wrapper over IExecutableNetwork
Definition: ie_executable_network.hpp:30
This class contains all the information about the Neural Network and the related binary information...
Definition: ie_cnn_network.h:38
StatusCode rc
A status code.
Definition: ie_core.hpp:38
This is a header file with functions related to filesystem operations.
This class represents an object to work with different parameters.
Definition: ie_parameter.hpp:38
std::shared_ptr< IExtension > IExtensionPtr
A shared pointer to a IExtension interface.
Definition: ie_iextension.h:359
This class represents a custom error listener.
Definition: ie_error.hpp:18
std::map< std::string, std::string > params
Map of pairs: (parameter name, parameter value)
Definition: ie_layers.h:367
Responce structure encapsulating information about supported layer.
Definition: ie_core.hpp:27
std::shared_ptr< RemoteContext > Ptr
A smart pointer to the RemoteContext object.
Definition: ie_remote_context.hpp:99
A header file that defines a wrapper class for handling extension instantiation and releasing resourc...