ie_plugin.hpp
Go to the documentation of this file.
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief A header file for Main Inference Engine API
7  * @file ie_plugin.hpp
8  */
9 #pragma once
10 
11 #include <ie_icnn_network.hpp>
12 #include <ie_iextension.h>
13 #include "ie_api.h"
14 #include "details/ie_no_copy.hpp"
15 #include "ie_error.hpp"
16 #include "ie_version.hpp"
18 #include <string>
19 #include <vector>
20 #include <memory>
21 #include <map>
22 #include <set>
23 
24 /**
25  * @def INFERENCE_PLUGIN_API(type)
26  * @brief Defines Inference Engine Plugin API method
27  */
28 
29 #if defined(_WIN32)
30  #ifdef IMPLEMENT_INFERENCE_ENGINE_PLUGIN
31  #define INFERENCE_PLUGIN_API(type) extern "C" __declspec(dllexport) type
32  #else
33  #define INFERENCE_PLUGIN_API(type) extern "C" type
34  #endif
35 #elif(__GNUC__ >= 4)
36  #ifdef IMPLEMENT_INFERENCE_ENGINE_PLUGIN
37  #define INFERENCE_PLUGIN_API(type) extern "C" __attribute__((visibility("default"))) type
38  #else
39  #define INFERENCE_PLUGIN_API(type) extern "C" type
40  #endif
41 #else
42  #define INFERENCE_PLUGIN_API(TYPE) extern "C" TYPE
43 #endif
44 
45 namespace InferenceEngine {
46 
47 /**
48  * @brief Responce structure encapsulating information about supported layer
49  */
50 struct INFERENCE_ENGINE_API_CLASS(QueryNetworkResult) {
51  /**
52  * @deprecated Use QueryNetworkResult::supportedLayersMap which provides layer -> device mapping
53  * @brief Set of supported layers by specific device
54  */
55  INFERENCE_ENGINE_DEPRECATED
56  std::set<std::string> supportedLayers;
57 
58  /**
59  * @brief A map of supported layers:
60  * - key - a layer name
61  * - value - a device name on which layer is assigned
62  */
63  std::map<std::string, std::string> supportedLayersMap;
64 
65  /**
66  * @brief A status code
67  */
69 
70  /**
71  * @brief Response mssage
72  */
74 
75  /**
76  * @brief A default constructor
77  */
79 
80  /**
81  * @brief A copy constructor
82  * @param q Value to copy from
83  */
85 
86  /**
87  * @brief A copy assignment operator
88  * @param q A value to copy from
89  * @return A copied object
90  */
91  const QueryNetworkResult & operator= (const QueryNetworkResult & q);
92 
93  /**
94  * @brief A move assignment operator
95  * @param q A value to move from
96  * @return A moved object
97  */
98  QueryNetworkResult & operator= (QueryNetworkResult && q);
99 
100  /**
101  * @brief A desctructor
102  */
104 };
105 
106 /**
107  * @brief This class is a main plugin interface
108  */
109 class IInferencePlugin : public details::IRelease {
110 public:
111  /**
112  * @brief Returns plugin version information
113  * @param versionInfo Pointer to version info. Is set by plugin
114  */
115  virtual void GetVersion(const Version *&versionInfo) noexcept = 0;
116 
117  /**
118  * @brief Sets logging callback
119  * Logging is used to track what is going on inside
120  * @param listener Logging sink
121  */
122  virtual void SetLogCallback(IErrorListener &listener) noexcept = 0;
123 
124  /**
125  * @deprecated Use IInferencePlugin::LoadNetwork(IExecutableNetwork::Ptr &, ICNNNetwork &, const std::map<std::string, std::string> &, ResponseDesc *)
126  * @brief Loads a pre-built network with weights to the engine. In case of success the plugin will
127  * be ready to infer
128  * @param network Network object acquired from CNNNetReader
129  * @param resp Pointer to the response message that holds a description of an error if any occurred
130  * @return Status code of the operation. OK if succeeded
131  */
132  INFERENCE_ENGINE_DEPRECATED
133  virtual StatusCode LoadNetwork(ICNNNetwork &network, ResponseDesc *resp) noexcept = 0;
134 
135  /**
136  * @brief Creates an executable network from a network object. User can create as many networks as they need and use
137  * them simultaneously (up to the limitation of the hardware resources)
138  * @param ret Reference to a shared ptr of the returned network interface
139  * @param network Network object acquired from CNNNetReader
140  * @param config Map of pairs: (config parameter name, config parameter value) relevant only for this load operation
141  * @param resp Pointer to the response message that holds a description of an error if any occurred
142  * @return Status code of the operation. OK if succeeded
143  */
144  virtual StatusCode
145  LoadNetwork(IExecutableNetwork::Ptr &ret, ICNNNetwork &network, const std::map<std::string, std::string> &config,
146  ResponseDesc *resp) noexcept = 0;
147 
148  /**
149  * @brief Creates an executable network from a previously exported network
150  * @param ret Reference to a shared ptr of the returned network interface
151  * @param modelFileName Path to the location of the exported file
152  * @param config Map of pairs: (config parameter name, config parameter value) relevant only for this load operation*
153  * @param resp Pointer to the response message that holds a description of an error if any occurred
154  * @return Status code of the operation. OK if succeeded
155  */
156  virtual StatusCode
157  ImportNetwork(IExecutableNetwork::Ptr &ret, const std::string &modelFileName,
158  const std::map<std::string, std::string> &config, ResponseDesc *resp) noexcept = 0;
159 
160  /**
161  * @deprecated Load IExecutableNetwork to create IInferRequest
162  * @brief Infers an image(s).
163  * Input and output dimensions depend on the topology.
164  * As an example for classification topologies use a 4D Blob as input (batch, channels, width,
165  * height) and get a 1D blob as output (scoring probability vector). To Infer a batch,
166  * use a 4D Blob as input and get a 2D blob as output in both cases the method will
167  * allocate the resulted blob
168  * @param input Any TBlob<> object that contains the data to infer. The type of TBlob must match the network input precision and size.
169  * @param result Related TBlob<> object that contains the result of the inference action, typically this is a float blob.
170  The blob does not need to be allocated or initialized, the engine allocates the relevant data.
171  * @param resp Pointer to the response message that holds a description of an error if any occurred
172  * @return Status code of the operation. OK if succeeded
173  */
174  INFERENCE_ENGINE_DEPRECATED
175  virtual StatusCode Infer(const Blob &input, Blob &result, ResponseDesc *resp) noexcept = 0;
176 
177  /**
178  * @deprecated Load IExecutableNetwork to create IInferRequest.
179  * @brief Infers tensors. Input and output dimensions depend on the topology.
180  * As an example for classification topologies use a 4D Blob as input (batch, channels, width,
181  * height) and get a 1D blob as output (scoring probability vector). To Infer a batch,
182  * use a 4D Blob as input and get a 2D blob as output in both cases the method will
183  * allocate the resulted blob
184  * @param input Map of input blobs accessed by input names
185  * @param result Map of output blobs accessed by output names
186  * @param resp Pointer to the response message that holds a description of an error if any occurred
187  * @return Status code of the operation. OK if succeeded
188  */
189  INFERENCE_ENGINE_DEPRECATED
190  virtual StatusCode Infer(const BlobMap &input, BlobMap &result, ResponseDesc *resp) noexcept = 0;
191 
192  /**
193  * @deprecated Use IInferRequest to get performance measures
194  * @brief Queries performance measures per layer to get feedback of what is the most time consuming layer
195  * Note: not all plugins provide meaningful data
196  * @param perfMap Map of layer names to profiling information for that layer
197  * @param resp Pointer to the response message that holds a description of an error if any occurred
198  * @return Status code of the operation. OK if succeeded
199  */
200  INFERENCE_ENGINE_DEPRECATED
201  virtual StatusCode GetPerformanceCounts(std::map<std::string, InferenceEngineProfileInfo> &perfMap,
202  ResponseDesc *resp) const noexcept = 0;
203 
204  /**
205  * @brief Registers extension within the plugin
206  * @param extension Pointer to already loaded extension
207  * @param resp Pointer to the response message that holds a description of an error if any occurred
208  * @return Status code of the operation. OK if succeeded
209  */
210  virtual StatusCode AddExtension(InferenceEngine::IExtensionPtr extension,
211  InferenceEngine::ResponseDesc *resp) noexcept = 0;
212 
213  /**
214  * @brief Sets configuration for plugin, acceptable keys can be found in ie_plugin_config.hpp
215  * @param config Map of pairs: (config parameter name, config parameter value)
216  * @param resp Pointer to the response message that holds a description of an error if any occurred
217  * @return Status code of the operation. OK if succeeded
218  */
219  virtual StatusCode SetConfig(const std::map<std::string, std::string> &config, ResponseDesc *resp) noexcept = 0;
220 
221 
222  /**
223  * @deprecated Use IInferencePlugin::QueryNetwork(const ICNNNetwork&, const std::map<std::string, std::string> &, QueryNetworkResult&) const
224  * @brief Query plugin if it supports specified network
225  * @param network Network object to query
226  * @param res Reference to query network result
227  */
228  INFERENCE_ENGINE_DEPRECATED
229  virtual void QueryNetwork(const ICNNNetwork& network, QueryNetworkResult& res) const noexcept {
230  (void)network;
231  res.rc = InferenceEngine::NOT_IMPLEMENTED;
232  }
233 
234  /**
235  * @brief Query plugin if it supports specified network with specified configuration
236  * @param network Network object to query
237  * @param config Map of pairs: (config parameter name, config parameter value)
238  * @param res Reference to query network result
239  */
240  virtual void QueryNetwork(const ICNNNetwork& network,
241  const std::map<std::string, std::string> & config, QueryNetworkResult& res) const noexcept {
242  (void)network;
243  (void)config;
244  res.rc = InferenceEngine::NOT_IMPLEMENTED;
245  }
246 };
247 
248 /**
249  * @brief Creates the default instance of the interface (per plugin)
250  * @param plugin Pointer to the plugin
251  * @param resp Pointer to the response message that holds a description of an error if any occurred
252  * @return Status code of the operation. OK if succeeded
253  */
255 } // namespace InferenceEngine
std::map< std::string, std::string > supportedLayersMap
A map of supported layers:
Definition: ie_plugin.hpp:63
A header file that provides versioning information for the inference engine shared library...
A header file for a plugin logging mechanism.
Definition: ie_argmax_layer.hpp:11
Represents version information that describes plugins and the inference engine runtime library...
Definition: ie_version.hpp:20
a header file for IExecutableNetwork interface
ResponseDesc resp
Response mssage.
Definition: ie_plugin.hpp:73
StatusCode
This enum contains codes for all possible return values of the interface functions.
Definition: ie_common.h:205
This is a header file for the ICNNNetwork class.
Represents detailed information for an error.
Definition: ie_common.h:228
StatusCode CreatePluginEngine(IInferencePlugin *&plugin, ResponseDesc *resp) noexcept
Creates the default instance of the interface (per plugin)
This class is a main plugin interface.
Definition: ie_plugin.hpp:109
This is the main interface to describe the NN topology.
Definition: ie_icnn_network.hpp:35
std::set< std::string > supportedLayers
Set of supported layers by specific device.
Definition: ie_plugin.hpp:56
StatusCode rc
A status code.
Definition: ie_plugin.hpp:68
std::map< std::string, Blob::Ptr > BlobMap
This is a convenient type for working with a map containing pairs(string, pointer to a Blob instance)...
Definition: ie_blob.h:478
The macro defines a symbol import/export mechanism essential for Microsoft Windows(R) OS...
This class represents a universal container in the Inference Engine.
Definition: ie_blob.h:35
#define INFERENCE_PLUGIN_API(TYPE)
Defines Inference Engine Plugin API method.
Definition: ie_plugin.hpp:42
This is a header file for Inference Engine Extension Interface.
This class represents a custom error listener. Plugin consumers can provide it via InferenceEngine::S...
Definition: ie_error.hpp:16
header file for no_copy class
virtual void QueryNetwork(const ICNNNetwork &network, QueryNetworkResult &res) const noexcept
Query plugin if it supports specified network.
Definition: ie_plugin.hpp:229
virtual void QueryNetwork(const ICNNNetwork &network, const std::map< std::string, std::string > &config, QueryNetworkResult &res) const noexcept
Query plugin if it supports specified network with specified configuration.
Definition: ie_plugin.hpp:240
std::shared_ptr< IExecutableNetwork > Ptr
A smart pointer to the current IExecutableNetwork object.
Definition: ie_iexecutable_network.hpp:38
Responce structure encapsulating information about supported layer.
Definition: ie_plugin.hpp:50