ie_plugin.hpp
Go to the documentation of this file.
1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief A header file for Main Inference Engine API
7  * @file ie_plugin.hpp
8  */
9 #pragma once
10 
11 #include <ie_icnn_network.hpp>
12 #include <ie_iextension.h>
13 #include "ie_api.h"
14 #include "details/ie_no_copy.hpp"
15 #include "ie_error.hpp"
16 #include "ie_version.hpp"
18 #include <string>
19 #include <vector>
20 #include <memory>
21 #include <map>
22 #include <set>
23 
24 
25 #if defined(_WIN32)
26  #ifdef IMPLEMENT_INFERENCE_ENGINE_PLUGIN
27  #define INFERENCE_PLUGIN_API(type) extern "C" __declspec(dllexport) type
28  #else
29  #define INFERENCE_PLUGIN_API(type) extern "C" type
30  #endif
31 #elif(__GNUC__ >= 4)
32  #ifdef IMPLEMENT_INFERENCE_ENGINE_PLUGIN
33  #define INFERENCE_PLUGIN_API(type) extern "C" __attribute__((visibility("default"))) type
34  #else
35  #define INFERENCE_PLUGIN_API(type) extern "C" type
36  #endif
37 #else
38  #define INFERENCE_PLUGIN_API(TYPE) extern "C" TYPE
39 #endif
40 
41 namespace InferenceEngine {
42 
43 /**
44  * @brief Responce structure encapsulating information about supported layer
45  */
47  std::set<std::string> supportedLayers;
48  StatusCode rc;
49  ResponseDesc resp;
50 };
51 
52 /**
53  * @brief This class is a main plugin interface
54  */
55 class IInferencePlugin : public details::IRelease {
56 public:
57  /**
58  * @brief Returns plugin version information
59  * @param versionInfo Pointer to version info. Is set by plugin
60  */
61  virtual void GetVersion(const Version *&versionInfo) noexcept = 0;
62 
63  /**
64  * @brief Sets logging callback
65  * Logging is used to track what is going on inside
66  * @param listener Logging sink
67  */
68  virtual void SetLogCallback(IErrorListener &listener) noexcept = 0;
69 
70  /**
71  * @deprecated use LoadNetwork with four parameters (executable network, cnn network, config, response)
72  * @brief Loads a pre-built network with weights to the engine. In case of success the plugin will
73  * be ready to infer
74  * @param network Network object acquired from CNNNetReader
75  * @param resp Pointer to the response message that holds a description of an error if any occurred
76  * @return Status code of the operation. OK if succeeded
77  */
78  virtual StatusCode LoadNetwork(ICNNNetwork &network, ResponseDesc *resp) noexcept = 0;
79 
80  /**
81  * @brief Creates an executable network from a network object. User can create as many networks as they need and use
82  * them simultaneously (up to the limitation of the hardware resources)
83  * @param ret Reference to a shared ptr of the returned network interface
84  * @param network Network object acquired from CNNNetReader
85  * @param config Map of pairs: (config parameter name, config parameter value) relevant only for this load operation
86  * @param resp Pointer to the response message that holds a description of an error if any occurred
87  * @return Status code of the operation. OK if succeeded
88  */
89  virtual StatusCode
90  LoadNetwork(IExecutableNetwork::Ptr &ret, ICNNNetwork &network, const std::map<std::string, std::string> &config,
91  ResponseDesc *resp) noexcept = 0;
92 
93  /**
94  * @brief Creates an executable network from a previously exported network
95  * @param ret Reference to a shared ptr of the returned network interface
96  * @param modelFileName Path to the location of the exported file
97  * @param config Map of pairs: (config parameter name, config parameter value) relevant only for this load operation*
98  * @param resp Pointer to the response message that holds a description of an error if any occurred
99  * @return Status code of the operation. OK if succeeded
100  */
101  virtual StatusCode
102  ImportNetwork(IExecutableNetwork::Ptr &ret, const std::string &modelFileName,
103  const std::map<std::string, std::string> &config, ResponseDesc *resp) noexcept = 0;
104 
105  /**
106  * @deprecated Uses Infer() working with multiple inputs and outputs
107  * @brief Infers an image(s).
108  * Input and output dimensions depend on the topology.
109  * As an example for classification topologies use a 4D Blob as input (batch, channels, width,
110  * height) and get a 1D blob as output (scoring probability vector). To Infer a batch,
111  * use a 4D Blob as input and get a 2D blob as output in both cases the method will
112  * allocate the resulted blob
113  * @param input Any TBlob<> object that contains the data to infer. The type of TBlob must match the network input precision and size.
114  * @param result Related TBlob<> object that contains the result of the inference action, typically this is a float blob.
115  The blob does not need to be allocated or initialized, the engine allocates the relevant data.
116  * @param resp Pointer to the response message that holds a description of an error if any occurred
117  * @return Status code of the operation. OK if succeeded
118  */
119  virtual StatusCode Infer(const Blob &input, Blob &result, ResponseDesc *resp) noexcept = 0;
120 
121  /**
122  * @deprecated Loads IExecutableNetwork to create IInferRequest.
123  * @brief Infers tensors. Input and output dimensions depend on the topology.
124  * As an example for classification topologies use a 4D Blob as input (batch, channels, width,
125  * height) and get a 1D blob as output (scoring probability vector). To Infer a batch,
126  * use a 4D Blob as input and get a 2D blob as output in both cases the method will
127  * allocate the resulted blob
128  * @param input Map of input blobs accessed by input names
129  * @param result Map of output blobs accessed by output names
130  * @param resp Pointer to the response message that holds a description of an error if any occurred
131  * @return Status code of the operation. OK if succeeded
132  */
133  virtual StatusCode Infer(const BlobMap &input, BlobMap &result, ResponseDesc *resp) noexcept = 0;
134 
135  /**
136  * @deprecated Uses IInferRequest to get performance measures
137  * @brief Queries performance measures per layer to get feedback of what is the most time consuming layer
138  * Note: not all plugins provide meaningful data
139  * @param perfMap Map of layer names to profiling information for that layer
140  * @param resp Pointer to the response message that holds a description of an error if any occurred
141  * @return Status code of the operation. OK if succeeded
142  */
143  virtual StatusCode GetPerformanceCounts(std::map<std::string, InferenceEngineProfileInfo> &perfMap,
144  ResponseDesc *resp) const noexcept = 0;
145 
146  /**
147  * @brief Registers extension within the plugin
148  * @param extension Pointer to already loaded extension
149  * @param resp Pointer to the response message that holds a description of an error if any occurred
150  * @return Status code of the operation. OK if succeeded
151  */
152  virtual StatusCode AddExtension(InferenceEngine::IExtensionPtr extension,
153  InferenceEngine::ResponseDesc *resp) noexcept = 0;
154 
155  /**
156  * @brief Sets configuration for plugin, acceptable keys can be found in ie_plugin_config.hpp
157  * @param config Map of pairs: (config parameter name, config parameter value)
158  * @param resp Pointer to the response message that holds a description of an error if any occurred
159  */
160  virtual StatusCode SetConfig(const std::map<std::string, std::string> &config, ResponseDesc *resp) noexcept = 0;
161 
162 
163  /**
164  * @depricated Use the version with config parameter
165  * @brief Query plugin if it supports specified network
166  * @param network Network object to query
167  * @param resp Pointer to the response message that holds a description of an error if any occurred
168  */
169  virtual void QueryNetwork(const ICNNNetwork& /*network*/, QueryNetworkResult& res) const noexcept {
170  res.rc = InferenceEngine::NOT_IMPLEMENTED;
171  }
172 
173  /**
174  * @brief Query plugin if it supports specified network with specified configuration
175  * @param network Network object to query
176  * @param config Map of pairs: (config parameter name, config parameter value)
177  * @param resp Pointer to the response message that holds a description of an error if any occurred
178  */
179  virtual void QueryNetwork(const ICNNNetwork& /*network*/,
180  const std::map<std::string, std::string> &/*config*/, QueryNetworkResult& res) const noexcept {
181  res.rc = InferenceEngine::NOT_IMPLEMENTED;
182  }
183 };
184 
185 /**
186  * @brief Creates the default instance of the interface (per plugin)
187  * @param plugin Pointer to the plugin
188  * @param resp Pointer to the response message that holds a description of an error if any occurred
189  * @return Status code of the operation. OK if succeeded
190  */
191 INFERENCE_PLUGIN_API(StatusCode) CreatePluginEngine(IInferencePlugin *&plugin, ResponseDesc *resp) noexcept;
192 } // namespace InferenceEngine
A header file that provides versioning information for the inference engine shared library...
A header file for a plugin logging mechanism.
Definition: ie_argmax_layer.hpp:11
Represents version information that describes plugins and the inference engine runtime library...
Definition: ie_version.hpp:20
a header file for IExecutableNetwork interface
StatusCode
This enum contains codes for all possible return values of the interface functions.
Definition: ie_common.h:175
This is a header file for the ICNNNetwork class.
Represents detailed information for an error.
Definition: ie_common.h:198
StatusCode CreatePluginEngine(IInferencePlugin *&plugin, ResponseDesc *resp) noexcept
Creates the default instance of the interface (per plugin)
virtual void QueryNetwork(const ICNNNetwork &, QueryNetworkResult &res) const noexcept
Query plugin if it supports specified network.
Definition: ie_plugin.hpp:169
This class is a main plugin interface.
Definition: ie_plugin.hpp:55
This is the main interface to describe the NN topology.
Definition: ie_icnn_network.hpp:35
virtual void QueryNetwork(const ICNNNetwork &, const std::map< std::string, std::string > &, QueryNetworkResult &res) const noexcept
Query plugin if it supports specified network with specified configuration.
Definition: ie_plugin.hpp:179
std::map< std::string, Blob::Ptr > BlobMap
This is a convenient type for working with a map containing pairs(string, pointer to a Blob instance)...
Definition: ie_blob.h:267
The macro defines a symbol import/export mechanism essential for Microsoft Windows(R) OS...
This class implements a container object that represents a tensor in memory (host and remote/accelera...
Definition: ie_blob.h:33
This is a header file for Inference Engine Extension Interface.
This class represents a custom error listener. Plugin consumers can provide it via InferenceEngine::S...
Definition: ie_error.hpp:16
header file for no_copy class
std::shared_ptr< IExecutableNetwork > Ptr
A smart pointer to the current IExecutableNetwork object.
Definition: ie_iexecutable_network.hpp:37
Responce structure encapsulating information about supported layer.
Definition: ie_plugin.hpp:46