ie_core.hpp
Go to the documentation of this file.
1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief This is a header file for the Inference Engine Core class C++ API
7  *
8  * @file ie_core.hpp
9  */
10 #pragma once
11 
12 #include <map>
13 #include <memory>
14 #include <string>
15 #include <vector>
16 
17 #include "cpp/ie_plugin_cpp.hpp"
19 #include "ie_extension.h"
20 #include "ie_remote_context.hpp"
21 
22 namespace InferenceEngine {
23 
24 /**
25  * @brief This class represents Inference Engine Core entity.
26  *
27  * It can throw exceptions safely for the application, where it is properly handled.
28  */
29 class INFERENCE_ENGINE_API_CLASS(Core) {
30  class Impl;
31  std::shared_ptr<Impl> _impl;
32 
33 public:
34  /** @brief Constructs Inference Engine Core instance using XML configuration file with
35  * plugins description.
36  *
37  * See RegisterPlugins for more details.
38  *
39  * @param xmlConfigFile A path to .xml file with plugins to load from. If XML configuration file is not specified,
40  * then default Inference Engine plugins are loaded from the default plugin.xml file.
41  */
42  explicit Core(const std::string& xmlConfigFile = std::string());
43 
44  /**
45  * @brief Returns plugins version information
46  *
47  * @param deviceName Device name to indentify plugin
48  * @return A vector of versions
49  */
50  std::map<std::string, Version> GetVersions(const std::string& deviceName) const;
51 
52  /**
53  * @deprecated IErrorListener is not used anymore. An exception is thrown in case of any unexpected situations.
54  * The function will be removed in 2021.1 release.
55  * @brief Sets logging callback
56  *
57  * Logging is used to track what is going on inside the plugins, Inference Engine library
58  *
59  * @param listener Logging sink
60  */
61  IE_SUPPRESS_DEPRECATED_START
62  INFERENCE_ENGINE_DEPRECATED("IErrorListener is not used anymore. An exception is thrown in case of any unexpected situations.")
63  void SetLogCallback(IErrorListener& listener) const;
64  IE_SUPPRESS_DEPRECATED_END
65 
66 #ifdef ENABLE_UNICODE_PATH_SUPPORT
67  /**
68  * @brief Reads IR xml and bin files
69  * @param modelPath path to IR file
70  * @param binPath path to bin file, if path is empty, will try to read bin file with the same name as xml and
71  * if bin file with the same name was not found, will load IR without weights.
72  * @return CNNNetwork
73  */
74  CNNNetwork ReadNetwork(const std::wstring& modelPath, const std::wstring& binPath = std::wstring()) const {
75  return ReadNetwork(details::wStringtoMBCSstringChar(modelPath), details::wStringtoMBCSstringChar(binPath));
76  }
77 #endif
78 
79  /**
80  * @brief Reads IR xml and bin files
81  * @param modelPath path to IR file
82  * @param binPath path to bin file, if path is empty, will try to read bin file with the same name as xml and
83  * if bin file with the same name was not found, will load IR without weights.
84  * @return CNNNetwork
85  */
86  CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath = "") const;
87  /**
88  * @brief Reads IR xml and bin (with the same name) files
89  * @param model string with IR
90  * @param weights shared pointer to constant blob with weights
91  * @return CNNNetwork
92  */
93  CNNNetwork ReadNetwork(const std::string& model, const Blob::CPtr& weights) const;
94 
95  /**
96  * @brief Creates an executable network from a network object.
97  *
98  * Users can create as many networks as they need and use
99  * them simultaneously (up to the limitation of the hardware resources)
100  *
101  * @param network CNNNetwork object acquired from CNNNetReader
102  * @param deviceName Name of device to load network to
103  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
104  * operation
105  * @return An executable network reference
106  */
107  ExecutableNetwork LoadNetwork(
108  const CNNNetwork network, const std::string& deviceName,
109  const std::map<std::string, std::string>& config = std::map<std::string, std::string>());
110 
111  /**
112  * @brief Registers extension
113  * @param extension Pointer to already loaded extension
114  */
115  void AddExtension(const IExtensionPtr& extension);
116 
117  /**
118  * @brief Creates an executable network from a network object within a specified remote context.
119  * @param network CNNNetwork object acquired from CNNNetReader
120  * @param context Pointer to RemoteContext object
121  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
122  * operation
123  * @return An executable network reference
124  */
125  ExecutableNetwork LoadNetwork(
126  const CNNNetwork network, RemoteContext::Ptr context,
127  const std::map<std::string, std::string>& config = std::map<std::string, std::string>());
128 
129  /**
130  * @brief Registers extension for the specified plugin
131  *
132  * @param extension Pointer to already loaded extension
133  * @param deviceName Device name to identify plugin to add an executable extension
134  */
135  void AddExtension(IExtensionPtr extension, const std::string& deviceName);
136 
137  /**
138  * @brief Creates an executable network from a previously exported network
139  *
140  * @param deviceName Name of device load executable network on
141  * @param modelFileName Path to the location of the exported file
142  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
143  * operation*
144  * @return An executable network reference
145  */
146  ExecutableNetwork ImportNetwork(
147  const std::string& modelFileName, const std::string& deviceName,
148  const std::map<std::string, std::string>& config = std::map<std::string, std::string>());
149 
150  /**
151  * @brief Creates an executable network from a previously exported network
152  * @param deviceName Name of device load executable network on
153  * @param networkModel network model stream
154  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
155  * operation*
156  * @return An executable network reference
157  */
158  ExecutableNetwork ImportNetwork(std::istream& networkModel, const std::string& deviceName = {},
159  const std::map<std::string, std::string>& config = {});
160 
161  /**
162  * @brief Creates an executable network from a previously exported network within a specified
163  * remote context.
164  *
165  * @param networkModel Network model stream
166  * @param context Pointer to RemoteContext object
167  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
168  * operation
169  * @return An executable network reference
170  */
171  ExecutableNetwork ImportNetwork(std::istream& networkModel,
172  const RemoteContext::Ptr& context,
173  const std::map<std::string, std::string>& config = {});
174 
175  /**
176  * @brief Query device if it supports specified network with specified configuration
177  *
178  * @param deviceName A name of a device to query
179  * @param network Network object to query
180  * @param config Optional map of pairs: (config parameter name, config parameter value)
181  * @return Pointer to the response message that holds a description of an error if any occurred
182  */
183  QueryNetworkResult QueryNetwork(
184  const ICNNNetwork& network, const std::string& deviceName,
185  const std::map<std::string, std::string>& config = std::map<std::string, std::string>()) const;
186 
187  /**
188  * @brief Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp
189  *
190  * @param deviceName An optinal name of a device. If device name is not specified, the config is set for all the
191  * registered devices.
192  *
193  * @param config Map of pairs: (config parameter name, config parameter value)
194  */
195  void SetConfig(const std::map<std::string, std::string>& config, const std::string& deviceName = std::string());
196 
197  /**
198  * @brief Gets configuration dedicated to device behaviour.
199  *
200  * The method is targeted to extract information which can be set via SetConfig method.
201  *
202  * @param deviceName - A name of a device to get a configuration value.
203  * @param name - value of config corresponding to config key.
204  * @return Value of config corresponding to config key.
205  */
206  Parameter GetConfig(const std::string& deviceName, const std::string& name) const;
207 
208  /**
209  * @brief Gets general runtime metric for dedicated hardware.
210  *
211  * The method is needed to request common device properties
212  * which are executable network agnostic. It can be device name, temperature, other devices-specific values.
213  *
214  * @param deviceName - A name of a device to get a metric value.
215  * @param name - metric name to request.
216  * @return Metric value corresponding to metric key.
217  */
218  Parameter GetMetric(const std::string& deviceName, const std::string& name) const;
219 
220  /**
221  * @brief Returns devices available for neural networks inference
222  *
223  * @return A vector of devices. The devices are returned as { CPU, FPGA.0, FPGA.1, MYRIAD }
224  If there more than one device of specific type, they are enumerated with .# suffix.
225  */
226  std::vector<std::string> GetAvailableDevices() const;
227 
228  /**
229  * @brief Register new device and plugin which implement this device inside Inference Engine.
230  *
231  * @param pluginName A name of plugin. Depending on platform pluginName is wrapped with shared library suffix and
232  * prefix to identify library full name
233  *
234  * @param deviceName A device name to register plugin for. If device name is not specified, then it's taken from
235  * plugin using InferenceEnginePluginPtr::GetName function
236  */
237  void RegisterPlugin(const std::string& pluginName, const std::string& deviceName);
238 
239  /**
240  * @brief Unloads previously loaded plugin with a specified name from Inference Engine
241  * The method is needed to remove plugin instance and free its resources. If plugin for a
242  * specified device has not been created before, the method throws an exception.
243  *
244  * @param deviceName Device name identifying plugin to remove from Inference Engine
245  */
246  void UnregisterPlugin(const std::string& deviceName);
247 
248  /** @brief Registers plugin to Inference Engine Core instance using XML configuration file with
249  * plugins description.
250  *
251  * XML file has the following structure:
252  *
253  * ```xml
254  * <ie>
255  * <plugins>
256  * <plugin name="" location="">
257  * <extensions>
258  * <extension location=""/>
259  * </extensions>
260  * <properties>
261  * <property key="" value=""/>
262  * </properties>
263  * </plugin>
264  * </plugins>
265  * </ie>
266  * ```
267  *
268  * - `name` identifies name of device enabled by plugin
269  * - `location` specifies absolute path to dynamic library with plugin. A path can also be relative to inference
270  * engine shared library. It allows to have common config for different systems with different configurations.
271  * - Properties are set to plugin via the `SetConfig` method.
272  * - Extensions are set to plugin via the `AddExtension` method.
273  *
274  * @param xmlConfigFile A path to .xml file with plugins to register.
275  */
276  void RegisterPlugins(const std::string& xmlConfigFile);
277 
278  /**
279  * @brief Create a new shared context object on specified accelerator device
280  * using specified plugin-specific low level device API parameters (device handle, pointer, etc.)
281  * @param deviceName Name of a device to create new shared context on.
282  * @param params Map of device-specific shared context parameters.
283  * @return A shared pointer to a created remote context.
284  */
285  RemoteContext::Ptr CreateContext(const std::string& deviceName, const ParamMap& params);
286 
287  /**
288  * @brief Get a pointer to default(plugin-supplied) shared context object for specified accelerator device.
289  * @param deviceName - A name of a device to get create shared context from.
290  * @return A shared pointer to a default remote context.
291  */
292  RemoteContext::Ptr GetDefaultContext(const std::string& deviceName);
293 };
294 } // namespace InferenceEngine
This class represents Inference Engine Core entity.
Definition: ie_core.hpp:29
Inference Engine API.
Definition: ie_argmax_layer.hpp:15
std::string name
Layer name.
Definition: ie_layers.h:42
This is a header file for the IE RemoteContext and RemoteBlob classes.
std::shared_ptr< const Blob > CPtr
A smart pointer to the const Blob object.
Definition: ie_blob.h:47
This is the main interface to describe the NN topology.
Definition: ie_icnn_network.hpp:43
wrapper over IExecutableNetwork
Definition: ie_executable_network.hpp:30
std::shared_ptr< IExtension > IExtensionPtr
A shared pointer to a IExtension interface.
Definition: ie_iextension.h:359
This class contains all the information about the Neural Network and the related binary information...
Definition: ie_cnn_network.h:38
std::map< std::string, Parameter > ParamMap
An std::map object containing low-level object parameters of classes that are derived from RemoteBlob...
Definition: ie_remote_context.hpp:26
std::map< std::string, std::string > params
Map of pairs: (parameter name, parameter value)
Definition: ie_layers.h:367
This is a header file with functions related to filesystem operations.
This class represents an object to work with different parameters.
Definition: ie_parameter.hpp:37
This class represents a custom error listener.
Definition: ie_error.hpp:18
This is a header file for the Inference Engine plugin C++ API.
Responce structure encapsulating information about supported layer.
Definition: ie_plugin.hpp:54
std::shared_ptr< RemoteContext > Ptr
A smart pointer to the RemoteContext object.
Definition: ie_remote_context.hpp:99
A header file that defines a wrapper class for handling extension instantiation and releasing resourc...