ie_iextension.h
Go to the documentation of this file.
1 // Copyright (C) 2018 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief This is a header file for Inference Engine Extension Interface
7  * @file ie_iextension.h
8  */
9 #pragma once
10 
11 #include "ie_api.h"
12 #include "ie_device.hpp"
13 #include "ie_layers.h"
14 #include "ie_error.hpp"
15 #include "ie_version.hpp"
16 #include <vector>
17 #include <string>
18 #include <memory>
19 #include <map>
20 
21 #include "details/ie_no_copy.hpp"
22 
23 
24 #if defined(_WIN32) && defined(IMPLEMENT_INFERENCE_EXTENSION_API)
25 #define INFERENCE_EXTENSION_API(TYPE) extern "C" __declspec(dllexport) TYPE
26 #else
27 #define INFERENCE_EXTENSION_API(TYPE) INFERENCE_ENGINE_API(TYPE)
28 #endif
29 
30 
31 namespace InferenceEngine {
32 
33 /**
34  * @struct DataConfig
35  * @brief This structure describes data configuration
36  */
37 struct DataConfig {
38  /**
39  * @brief Format of memory descriptor
40  */
42  /**
43  * @brief Index of in-place memory. If -1 memory cannot be in-place
44  */
45  int inPlace = -1;
46  /**
47  * @brief Flag for determination of the constant memory. If layer contains all constant memory we can calculate it on the load stage.
48  */
49  bool constant = false;
50 };
51 
52 /**
53  * @struct LayerConfig
54  * @brief This structure describes Layer configuration
55  */
56 struct LayerConfig {
57  /**
58  * @brief Supported dynamic batch. If false, dynamic batch is not supported
59  */
60  bool dynBatchSupport = false;
61  /**
62  * @brief Vector of input data configs
63  */
64  std::vector<DataConfig> inConfs;
65  /**
66  * @brief Vector of output data configs
67  */
68  std::vector<DataConfig> outConfs;
69 };
70 
71 /**
72  * @brief This class provides interface for extension implementations
73  */
74 class ILayerImpl {
75 public:
76  using Ptr = std::shared_ptr<ILayerImpl>;
77 
78  /**
79  * @brief Destructor
80  */
81  virtual ~ILayerImpl() = default;
82 
83  /**
84  * @brief Gets all supported configurations for the current layer
85  * @param conf Vector with supported configurations
86  * @param resp Response descriptor
87  * @return Status code
88  */
89  virtual StatusCode getSupportedConfigurations(std::vector<LayerConfig>& conf, ResponseDesc* resp) noexcept = 0;
90 
91  /**
92  * @brief Initializes the implementation
93  * @param config Selected supported configuration
94  * @param resp Response descriptor
95  * @return Status code
96  */
97  virtual StatusCode init(LayerConfig& config, ResponseDesc* resp) noexcept = 0;
98 };
99 
100 /**
101  * @brief This class provides interface for the implementation with the custom execution code
102  */
103 class ILayerExecImpl : public ILayerImpl {
104 public:
105  /**
106  * @brief Execute method
107  * @param inputs Vector of blobs with input memory
108  * @param outputs Vector of blobs with output memory
109  * @param resp Response descriptor
110  * @return Status code
111  */
112  virtual StatusCode execute(std::vector<Blob::Ptr>& inputs,
113  std::vector<Blob::Ptr>& outputs, ResponseDesc* resp) noexcept = 0;
114 };
115 
116 /**
117  * @brief This class provides interface for extension factories
118  */
120 public:
121  using Ptr = std::shared_ptr<ILayerImplFactory>;
122  using ImplCreator = std::function<ILayerImpl*()>;
123 
124  /**
125  * @brief Destructor
126  */
127  virtual ~ILayerImplFactory() = default;
128 
129  /**
130  * @deprecated
131  * @brief Sets output shapes by input shapes.
132  * @param inShapes Shapes of all inputs coming in this layer
133  * @param outShapes Generated shapes coming from this layer given the input
134  * @param resp Response descriptor
135  * @return Status code
136  */
137  virtual StatusCode getShapes(const std::vector<TensorDesc>& /*inShapes*/, std::vector<TensorDesc>& /*outShapes*/,
138  ResponseDesc* /*resp*/) noexcept {
139  return NOT_IMPLEMENTED;
140  }
141 
142  /**
143  * @brief Gets all possible implementations for the given cnn Layer
144  * @param impls the vector with implementations which is ordered by priority
145  * @param resp response descriptor
146  * @return status code
147  */
148  virtual StatusCode getImplementations(std::vector<ILayerImpl::Ptr>& impls, ResponseDesc* resp) noexcept = 0;
149 };
150 
151 /**
152  * @class IShapeInferImpl
153  * @brief This class provides interface for the implementation with the custom execution code
154  */
156 public:
157  using Ptr = std::shared_ptr<IShapeInferImpl>;
158 
159  virtual ~IShapeInferImpl() = default;
160 
161  /**
162  * @brief check that reshape can be applied, that parameters and shapes are valid
163  */
164  virtual StatusCode inferShapes(const std::vector<SizeVector>& inShapes,
165  const std::map<std::string, std::string>& params,
166  const std::map<std::string, Blob::Ptr>& blobs,
167  std::vector<SizeVector>& outShapes,
168  ResponseDesc* resp) noexcept = 0;
169 };
170 
171 /**
172  * @class IShapeInferExtension
173  * @brief This class is the reader extension interface to provide implementation for shape propagation
174  */
175 class IShapeInferExtension : public InferenceEngine::details::IRelease {
176 public:
177  /**
178  * @brief Sets logging callback.
179  * Logging is used to track what is going on inside.
180  * @param listener Logging sink
181  */
182  virtual void SetLogCallback(InferenceEngine::IErrorListener& listener) noexcept = 0;
183 
184  /**
185  * @brief Gets extension version information and stores in versionInfo
186  * @param versionInfo Pointer to version info, will be set by plugin
187  */
188  virtual void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept = 0;
189 
190  /**
191  * @brief Cleans resources up
192  */
193  virtual void Unload() noexcept = 0;
194 
195  /**
196  * @brief Fills passed array with types of layers which shape infer implementations are included in the extension
197  * @param types Array to store the layer types
198  * @param size Size of the layer types array
199  * @param resp Response descriptor
200  * @return Status code
201  */
202  virtual StatusCode getShapeInferTypes(char**& types, unsigned int& size, ResponseDesc* resp) noexcept = 0;
203 
204  /**
205  * @brief Gets shape propagation implementation for the given string-type of cnn Layer
206  * @param impl the vector with implementations which is ordered by priority
207  * @param resp response descriptor
208  * @return status code
209  */
210  virtual StatusCode getShapeInferImpl(IShapeInferImpl::Ptr& impl,
211  const char* type,
212  ResponseDesc* resp) noexcept = 0;
213 };
214 
215 /**
216  * @brief This class is the main extension interface
217  */
219 public:
220  virtual StatusCode getFactoryFor(ILayerImplFactory*& factory, const CNNLayer* cnnLayer,
221  ResponseDesc* resp) noexcept = 0;
222 
223  /**
224  * @brief Fills passed array with types of layers which kernel implementations are included in the extension
225  * @param types Array to store the layer types
226  * @param size Size of the layer types array
227  * @param resp Response descriptor
228  * @return Status code
229  */
230  virtual StatusCode getPrimitiveTypes(char**& types, unsigned int& size, ResponseDesc* resp) noexcept = 0;
231 
232  StatusCode getShapeInferTypes(char**&, unsigned int&, ResponseDesc*) noexcept override {
233  return NOT_IMPLEMENTED;
234  };
235 
236  StatusCode getShapeInferImpl(IShapeInferImpl::Ptr&, const char*, ResponseDesc*) noexcept override {
237  return NOT_IMPLEMENTED;
238  };
239 };
240 
241 using IExtensionPtr = std::shared_ptr<IExtension>;
242 using IShapeInferExtensionPtr = std::shared_ptr<IShapeInferExtension>;
243 
244 /**
245  * @brief Creates the default instance of the extension
246  * @param ext Extension interface
247  * @param resp Response description
248  * @return Status code
249  */
250 INFERENCE_EXTENSION_API(StatusCode) CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept;
251 
252 /**
253  * @brief Creates the default instance of the shape infer extension
254  * @param ext Shape Infer Extension interface
255  * @param resp Response description
256  * @return Status code
257  */
258 INFERENCE_EXTENSION_API(StatusCode) CreateShapeInferExtension(IShapeInferExtension*& ext, ResponseDesc* resp) noexcept;
259 
260 
261 } // namespace InferenceEngine
StatusCode CreateExtension(IExtension *&ext, ResponseDesc *resp) noexcept
Creates the default instance of the extension.
This class provides interface for the implementation with the custom execution code.
Definition: ie_iextension.h:155
A header file that provides versioning information for the inference engine shared library...
A header file for a plugin logging mechanism.
StatusCode getShapeInferTypes(char **&, unsigned int &, ResponseDesc *) noexcept override
Fills passed array with types of layers which shape infer implementations are included in the extensi...
Definition: ie_iextension.h:232
StatusCode getShapeInferImpl(IShapeInferImpl::Ptr &, const char *, ResponseDesc *) noexcept override
Gets shape propagation implementation for the given string-type of cnn Layer.
Definition: ie_iextension.h:236
Definition: ie_argmax_layer.hpp:11
Represents version information that describes plugins and the inference engine runtime library...
Definition: ie_version.hpp:20
StatusCode
This enum contains codes for all possible return values of the interface functions.
Definition: ie_common.h:172
std::vector< DataConfig > inConfs
Vector of input data configs.
Definition: ie_iextension.h:64
virtual StatusCode getShapes(const std::vector< TensorDesc > &, std::vector< TensorDesc > &, ResponseDesc *) noexcept
Sets output shapes by input shapes.
Definition: ie_iextension.h:137
This class provides interface for extension factories.
Definition: ie_iextension.h:119
This structure describes data configuration.
Definition: ie_iextension.h:37
This structure describes Layer configuration.
Definition: ie_iextension.h:56
int inPlace
Index of in-place memory. If -1 memory cannot be in-place.
Definition: ie_iextension.h:45
This class defines Tensor description.
Definition: ie_layouts.h:143
Represents detailed information for an error.
Definition: ie_common.h:195
std::vector< DataConfig > outConfs
Vector of output data configs.
Definition: ie_iextension.h:68
a header file for internal Layers structure to describe layers information
TensorDesc desc
Format of memory descriptor.
Definition: ie_iextension.h:41
The macro defines a symbol import/export mechanism essential for Microsoft Windows(R) OS...
This header file contains aspects of working on different devices like CPU, GEN, FPGA, etc.
This class provides interface for extension implementations.
Definition: ie_iextension.h:74
This is a base abstraction Layer - all DNN Layers inherit from this class.
Definition: ie_layers.h:40
This class represents a custom error listener. Plugin consumers can provide it via InferenceEngine::S...
Definition: ie_error.hpp:16
header file for no_copy class
This class is the reader extension interface to provide implementation for shape propagation.
Definition: ie_iextension.h:175
This class is the main extension interface.
Definition: ie_iextension.h:218
bool constant
Flag for determination of the constant memory. If layer contains all constant memory we can calculate...
Definition: ie_iextension.h:49
This class provides interface for the implementation with the custom execution code.
Definition: ie_iextension.h:103
StatusCode CreateShapeInferExtension(IShapeInferExtension *&ext, ResponseDesc *resp) noexcept
Creates the default instance of the shape infer extension.