ie_infer_request.hpp
Go to the documentation of this file.
1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief A header file that provides wrapper classes for infer requests and callbacks.
7  *
8  * @file ie_infer_request.hpp
9  */
10 #pragma once
11 
12 #include <map>
13 #include <memory>
14 #include <string>
15 
17 #include "ie_iinfer_request.hpp"
18 #include "ie_plugin_ptr.hpp"
19 
20 namespace InferenceEngine {
21 
22 namespace details {
23 
24 class ICompletionCallbackWrapper {
25 public:
26  virtual ~ICompletionCallbackWrapper() = default;
27 
28  virtual void call(InferenceEngine::IInferRequest::Ptr request, InferenceEngine::StatusCode code) const noexcept = 0;
29 };
30 
31 template <class T>
32 class CompletionCallbackWrapper : public ICompletionCallbackWrapper {
33  T lambda;
34 
35 public:
36  explicit CompletionCallbackWrapper(const T& lambda): lambda(lambda) {}
37 
38  void call(InferenceEngine::IInferRequest::Ptr /*request*/, InferenceEngine::StatusCode /*code*/) const
39  noexcept override {
40  lambda();
41  }
42 };
43 
44 template <>
45 class CompletionCallbackWrapper<IInferRequest::CompletionCallback> : public ICompletionCallbackWrapper {
47 
48 public:
49  explicit CompletionCallbackWrapper(const IInferRequest::CompletionCallback& callBack): callBack(callBack) {}
50 
51  void call(InferenceEngine::IInferRequest::Ptr request, InferenceEngine::StatusCode code) const noexcept override {
52  callBack(request, code);
53  }
54 };
55 
56 } // namespace details
57 
58 /**
59  * @copybrief IInferRequest
60  *
61  * Wraps IInferRequest
62  * It can throw exceptions safely for the application, where it is properly handled.
63  */
64 class InferRequest {
65  IInferRequest::Ptr actual;
67  std::shared_ptr<details::ICompletionCallbackWrapper> callback;
68 
69  static void callWrapper(InferenceEngine::IInferRequest::Ptr request, InferenceEngine::StatusCode code) {
70  details::ICompletionCallbackWrapper* pWrapper = nullptr;
71  ResponseDesc dsc;
72  request->GetUserData(reinterpret_cast<void**>(&pWrapper), &dsc);
73  pWrapper->call(request, code);
74  }
75 
76 public:
77  /**
78  * @brief Default constructor
79  */
80  InferRequest() = default;
81 
82  /**
83  * @brief Destructor
84  */
86  actual = nullptr;
87  }
88 
89  /**
90  * @brief Sets input/output data to infer
91  *
92  * @note Memory allocation does not happen
93  * @param name Name of input or output blob.
94  * @param data Reference to input or output blob. The type of a blob must match the network input precision and
95  * size.
96  */
97  void SetBlob(const std::string& name, const Blob::Ptr& data) {
98  CALL_STATUS_FNC(SetBlob, name.c_str(), data);
99  }
100 
101  /**
102  * @copybrief IInferRequest::GetBlob
103  *
104  * Wraps IInferRequest::GetBlob
105  * @param name A name of Blob to get
106  * @return A shared pointer to a Blob with a name @p name. If a blob is not found, an exception is thrown.
107  */
108  Blob::Ptr GetBlob(const std::string& name) {
109  Blob::Ptr data;
110  CALL_STATUS_FNC(GetBlob, name.c_str(), data);
111  std::string error = "Internal error: blob with name `" + name + "` is not allocated!";
112  auto blobPtr = data.get();
113  if (blobPtr == nullptr) THROW_IE_EXCEPTION << error;
114  if (blobPtr->buffer() == nullptr) THROW_IE_EXCEPTION << error;
115  return data;
116  }
117 
118  /**
119  * @brief Sets blob with a pre-process information
120  * @note Returns an error in case if data blob is output
121  * @param name Name of input blob.
122  * @param data A reference to input. The type of Blob must correspond to the network input precision and size.
123  * @param info Preprocess info for blob.
124  */
125  void SetBlob(const std::string &name, const Blob::Ptr &data, const PreProcessInfo& info) {
126  CALL_STATUS_FNC(SetBlob, name.c_str(), data, info);
127  }
128 
129  /**
130  * @brief Gets pre-process for input data
131  * @param name Name of input blob.
132  * @return pointer to pre-process info of blob with name
133  */
134  const PreProcessInfo& GetPreProcess(const std::string& name) const {
135  const PreProcessInfo* info = nullptr;
136  CALL_STATUS_FNC(GetPreProcess, name.c_str(), &info);
137  return *info;
138  }
139 
140  /**
141  * @copybrief IInferRequest::Infer
142  * @note blocks all methods of InferRequest while request is ongoing (running or waiting in queue)
143  *
144  * Wraps IInferRequest::Infer
145  */
146  void Infer() {
147  CALL_STATUS_FNC_NO_ARGS(Infer);
148  }
149 
150  /**
151  * @copybrief IInferRequest::GetPerformanceCounts
152  *
153  * Wraps IInferRequest::GetPerformanceCounts
154  * @return Map of layer names to profiling information for that layer
155  */
156  std::map<std::string, InferenceEngineProfileInfo> GetPerformanceCounts() const {
157  std::map<std::string, InferenceEngineProfileInfo> perfMap;
158  CALL_STATUS_FNC(GetPerformanceCounts, perfMap);
159  return perfMap;
160  }
161 
162  /**
163  * @brief Sets input data to infer
164  *
165  * @note Memory allocation doesn't happen
166  * @param inputs A reference to a map of input blobs accessed by input names.
167  * The type of Blob must correspond to the network input precision and size.
168  */
169  void SetInput(const BlobMap& inputs) {
170  for (auto&& input : inputs) {
171  CALL_STATUS_FNC(SetBlob, input.first.c_str(), input.second);
172  }
173  }
174 
175  /**
176  * @brief Sets data that will contain result of the inference
177  *
178  * @note Memory allocation doesn't happen
179  * @param results - a reference to a map of result blobs accessed by output names.
180  * The type of Blob must correspond to the network output precision and size.
181  */
182  void SetOutput(const BlobMap& results) {
183  for (auto&& result : results) {
184  CALL_STATUS_FNC(SetBlob, result.first.c_str(), result.second);
185  }
186  }
187 
188  /**
189  * @brief Sets new batch size when dynamic batching is enabled in executable network that created this request.
190  *
191  * @param batch new batch size to be used by all the following inference calls for this request.
192  */
193  void SetBatch(const int batch) {
194  CALL_STATUS_FNC(SetBatch, batch);
195  }
196 
197  /**
198  * constructs InferRequest from the initialized shared_pointer
199  * @param request Initialized shared pointer to IInferRequest interface
200  * @param plg Plugin to use. This is required to ensure that InferRequest can work properly even if plugin object is destroyed.
201  */
202  explicit InferRequest(IInferRequest::Ptr request, InferenceEnginePluginPtr plg = {}): actual(request), plg(plg) {
203  // plg can be null, but not the actual
204  if (actual == nullptr) {
205  THROW_IE_EXCEPTION << "InferRequest wrapper was not initialized.";
206  }
207  }
208 
209  /**
210  * @brief Start inference of specified input(s) in asynchronous mode
211  *
212  * @note It returns immediately. Inference starts also immediately.
213  */
214  void StartAsync() {
215  CALL_STATUS_FNC_NO_ARGS(StartAsync);
216  }
217 
218  /**
219  * @copybrief IInferRequest::Wait
220  *
221  * Wraps IInferRequest::Wait
222  * @param millis_timeout Maximum duration in milliseconds to block for
223  * @note There are special cases when millis_timeout is equal some value of the WaitMode enum:
224  * * STATUS_ONLY - immediately returns inference status (IInferRequest::RequestStatus). It does not block or
225  * interrupt current thread
226  * * RESULT_READY - waits until inference result becomes available
227  * @return A status code of operation
228  */
229  StatusCode Wait(int64_t millis_timeout) {
230  ResponseDesc resp;
231  if (actual == nullptr) {
232  THROW_IE_EXCEPTION << "InferRequest wrapper was not initialized.";
233  }
234  auto res = actual->Wait(millis_timeout, &resp);
235  if (res != OK && res != RESULT_NOT_READY && res != INFER_NOT_STARTED) {
236  InferenceEngine::details::extract_exception(res, resp.msg);
237  }
238  return res;
239  }
240 
241  /**
242  * @copybrief IInferRequest::SetCompletionCallback
243  *
244  * Wraps IInferRequest::SetCompletionCallback
245  *
246  * @param callbackToSet Lambda callback object which will be called on processing finish.
247  */
248  template <class T>
249  void SetCompletionCallback(const T& callbackToSet) {
250  callback.reset(new details::CompletionCallbackWrapper<T>(callbackToSet));
251  CALL_STATUS_FNC(SetUserData, callback.get());
252  actual->SetCompletionCallback(callWrapper);
253  }
254 
255  /**
256  * @brief IInferRequest pointer to be used directly in CreateInferRequest functions
257  * @return A shared pointer to underlying IInferRequest interface
258  */
259  operator IInferRequest::Ptr&() {
260  return actual;
261  }
262 
263  /**
264  * @brief Checks if current InferRequest object is not initialized
265  * @return true if current InferRequest object is not initialized, false - otherwise
266  */
267  bool operator!() const noexcept {
268  return !actual;
269  }
270 
271  /**
272  * @brief Checks if current InferRequest object is initialized
273  * @return true if current InferRequest object is initialized, false - otherwise
274  */
275  explicit operator bool() const noexcept {
276  return !!actual;
277  }
278 
279  /**
280  * @brief A smart pointer to the InferRequest object
281  */
282  using Ptr = std::shared_ptr<InferRequest>;
283 };
284 
285 namespace details {
286 
287 template <>
288 class CompletionCallbackWrapper<std::function<void(InferRequest, StatusCode)>> : public ICompletionCallbackWrapper {
289  std::function<void(InferRequest, StatusCode)> lambda;
290 
291 public:
292  explicit CompletionCallbackWrapper(const std::function<void(InferRequest, InferenceEngine::StatusCode)>& lambda)
293  : lambda(lambda) {}
294 
295  void call(InferenceEngine::IInferRequest::Ptr request, InferenceEngine::StatusCode code) const noexcept override {
296  lambda(InferRequest(request), code);
297  }
298 };
299 
300 } // namespace details
301 } // namespace InferenceEngine
InferRequest(IInferRequest::Ptr request, InferenceEnginePluginPtr plg={})
Definition: ie_infer_request.hpp:202
#define THROW_IE_EXCEPTION
A macro used to throw the exception with a notable description.
Definition: ie_exception.hpp:25
void StartAsync()
Start inference of specified input(s) in asynchronous mode.
Definition: ie_infer_request.hpp:214
bool operator!() const noexcept
Checks if current InferRequest object is not initialized.
Definition: ie_infer_request.hpp:267
InferenceEngine::details::SOPointer< IInferencePlugin > InferenceEnginePluginPtr
A C++ helper to work with objects created by the plugin.
Definition: ie_plugin_ptr.hpp:43
std::map< std::string, Blob::Ptr > BlobMap
This is a convenient type for working with a map containing pairs(string, pointer to a Blob instance)...
Definition: ie_blob.h:464
Inference Engine API.
Definition: ie_argmax_layer.hpp:15
Blob::Ptr GetBlob(const std::string &name)
Definition: ie_infer_request.hpp:108
StatusCode Wait(int64_t millis_timeout)
Definition: ie_infer_request.hpp:229
A header file that provides macros to handle no exception methods.
std::string name
Layer name.
Definition: ie_layers.h:42
void SetInput(const BlobMap &inputs)
Sets input data to infer.
Definition: ie_infer_request.hpp:169
A header file contains a wrapper class for handling plugin instantiation and releasing resources...
void SetOutput(const BlobMap &results)
Sets data that will contain result of the inference.
Definition: ie_infer_request.hpp:182
This class stores pre-process information for the input.
Definition: ie_preprocess.hpp:55
void SetBlob(const std::string &name, const Blob::Ptr &data)
Sets input/output data to infer.
Definition: ie_infer_request.hpp:97
a header file for IInferRequest interface
Definition: ie_infer_request.hpp:64
void SetCompletionCallback(const T &callbackToSet)
Definition: ie_infer_request.hpp:249
std::shared_ptr< InferRequest > Ptr
A smart pointer to the InferRequest object.
Definition: ie_infer_request.hpp:282
Represents detailed information for an error.
Definition: ie_common.h:247
StatusCode
This enum contains codes for all possible return values of the interface functions.
Definition: ie_common.h:224
char msg[4096]
A character buffer that holds the detailed information for an error.
Definition: ie_common.h:251
std::shared_ptr< Blob > Ptr
A smart pointer containing Blob object.
Definition: ie_blob.h:42
const PreProcessInfo & GetPreProcess(const std::string &name) const
Gets pre-process for input data.
Definition: ie_infer_request.hpp:134
void(* CompletionCallback)(InferenceEngine::IInferRequest::Ptr context, InferenceEngine::StatusCode code)
Completion callback definition as pointer to a function.
Definition: ie_iinfer_request.hpp:142
std::map< std::string, InferenceEngineProfileInfo > GetPerformanceCounts() const
Definition: ie_infer_request.hpp:156
void Infer()
Definition: ie_infer_request.hpp:146
void SetBlob(const std::string &name, const Blob::Ptr &data, const PreProcessInfo &info)
Sets blob with a pre-process information.
Definition: ie_infer_request.hpp:125
void SetBatch(const int batch)
Sets new batch size when dynamic batching is enabled in executable network that created this request...
Definition: ie_infer_request.hpp:193
~InferRequest()
Destructor.
Definition: ie_infer_request.hpp:85
std::shared_ptr< IInferRequest > Ptr
A shared pointer to the IInferRequest object.
Definition: ie_iinfer_request.hpp:44