ie_c_api.h
Go to the documentation of this file.
1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @file ie_c_api.h
7  * C API of Inference Engine bridge unlocks using of OpenVINO Inference Engine
8  * library and all its plugins in native applications disabling usage
9  * of C++ API. The scope of API covers significant part of C++ API and includes
10  * an ability to read model from the disk, modify input and output information
11  * to correspond their runtime representation like data types or memory layout,
12  * load in-memory model to Inference Engine on different devices including
13  * heterogeneous and multi-device modes, manage memory where input and output
14  * is allocated and manage inference flow.
15 **/
16 
17 #ifndef IE_C_API_H
18 #define IE_C_API_H
19 
20 #include <stdint.h>
21 #include <stdio.h>
22 
23 #ifdef __cplusplus
24  #define INFERENCE_ENGINE_C_API_EXTERN extern "C"
25 #else
26  #define INFERENCE_ENGINE_C_API_EXTERN
27 #endif
28 
29 #if defined(__GNUC__) && (__GNUC__ < 4)
30  #define INFERENCE_ENGINE_C_API(...) INFERENCE_ENGINE_C_API_EXTERN __VA_ARGS__
31 #else
32  #if defined(_WIN32)
33  #ifdef inference_engine_c_api_EXPORTS
34  #define INFERENCE_ENGINE_C_API(...) INFERENCE_ENGINE_C_API_EXTERN __declspec(dllexport) __VA_ARGS__ __cdecl
35  #else
36  #define INFERENCE_ENGINE_C_API(...) INFERENCE_ENGINE_C_API_EXTERN __declspec(dllimport) __VA_ARGS__ __cdecl
37  #endif
38  #else
39  #define INFERENCE_ENGINE_C_API(...) INFERENCE_ENGINE_C_API_EXTERN __attribute__((visibility("default"))) __VA_ARGS__
40  #endif
41 #endif
42 
43 typedef struct ie_core ie_core_t;
44 typedef struct ie_network ie_network_t;
45 typedef struct ie_executable ie_executable_network_t;
46 typedef struct ie_infer_request ie_infer_request_t;
47 typedef struct ie_blob ie_blob_t;
48 
49 /**
50  * @struct ie_core_version
51  * @brief Represents version information that describes devices and the inference engine runtime library
52  */
53 typedef struct ie_core_version {
54  size_t major;
55  size_t minor;
56  const char *device_name;
57  const char *build_number;
58  const char *description;
60 
61 /**
62  * @struct ie_core_versions
63  * @brief Represents all versions information that describes all devices and the inference engine runtime library
64  */
65 typedef struct ie_core_versions {
66  ie_core_version_t *versions;
67  size_t num_vers;
69 
70 /**
71  * @struct ie_config
72  * @brief Represents configuration information that describes devices
73  */
74 typedef struct ie_config {
75  const char *name;
76  const char *value;
77  struct ie_config *next;
79 
80 /**
81  * @struct ie_param
82  * @brief metric and config parameters.
83  */
84 typedef struct ie_param {
85  union {
86  char *params;
87  unsigned int number;
88  unsigned int range_for_async_infer_request[3];
89  unsigned int range_for_streams[2];
90  };
91 }ie_param_t;
92 
93 /**
94  * @struct ie_param_config
95  * @brief Represents configuration parameter information
96  */
97 typedef struct ie_param_config {
98  char *name;
99  ie_param_t *param;
101 
102 /**
103  * @struct desc
104  * @brief Represents detailed information for an error
105  */
106 typedef struct desc {
107  char msg[256];
108 }desc_t;
109 
110 /**
111  * @struct dimensions
112  * @brief Represents dimensions for input or output data
113  */
114 typedef struct dimensions {
115  size_t ranks;
116  size_t dims[8];
117 }dimensions_t;
118 
119 /**
120  * @enum layout_e
121  * @brief Layouts that the inference engine supports
122  */
123 typedef enum {
124  ANY = 0, // "any" layout
125 
126  // I/O data layouts
127  NCHW = 1,
128  NHWC = 2,
129  NCDHW = 3,
130  NDHWC = 4,
131 
132  // weight layouts
133  OIHW = 64,
134 
135  // Scalar
136  SCALAR = 95,
137 
138  // bias layouts
139  C = 96,
140 
141  // Single image layout (for mean image)
142  CHW = 128,
143 
144  // 2D
145  HW = 192,
146  NC = 193,
147  CN = 194,
148 
149  BLOCKED = 200,
150 }layout_e;
151 
152 /**
153  * @enum precision_e
154  * @brief Precisions that the inference engine supports
155  */
156 typedef enum {
157  UNSPECIFIED = 255, /**< Unspecified value. Used by default */
158  MIXED = 0, /**< Mixed value. Can be received from network. No applicable for tensors */
159  FP32 = 10, /**< 32bit floating point value */
160  FP16 = 11, /**< 16bit floating point value */
161  Q78 = 20, /**< 16bit specific signed fixed point precision */
162  I16 = 30, /**< 16bit signed integer value */
163  U8 = 40, /**< 8bit unsigned integer value */
164  I8 = 50, /**< 8bit signed integer value */
165  U16 = 60, /**< 16bit unsigned integer value */
166  I32 = 70, /**< 32bit signed integer value */
167  I64 = 72, /**< 64bit signed integer value */
168  BIN = 71, /**< 1bit integer value */
169  CUSTOM = 80 /**< custom precision has it's own name and size of elements */
170 }precision_e;
171 
172 /**
173  * @struct tensor_desc
174  * @brief Represents detailed information for a tensor
175  */
176 typedef struct tensor_desc {
177  layout_e layout;
178  dimensions_t dims;
179  precision_e precision;
181 
182 /**
183  * @enum colorformat_e
184  * @brief Extra information about input color format for preprocessing
185  */
186 typedef enum {
187  RAW = 0u, ///< Plain blob (default), no extra color processing required
188  RGB, ///< RGB color format
189  BGR, ///< BGR color format, default in DLDT
190  RGBX, ///< RGBX color format with X ignored during inference
191  BGRX, ///< BGRX color format with X ignored during inference
192  NV12, ///< NV12 color format represented as compound Y+UV blob
194 
195 /**
196  * @enum resize_alg_e
197  * @brief Represents the list of supported resize algorithms.
198  */
199 typedef enum {
200  NO_RESIZE = 0,
201  RESIZE_BILINEAR,
202  RESIZE_AREA
203 }resize_alg_e;
204 
205 /**
206  * @enum IEStatusCode
207  * @brief This enum contains codes for all possible return values of the interface functions
208  */
209 typedef enum {
210  OK = 0,
211  GENERAL_ERROR = -1,
212  NOT_IMPLEMENTED = -2,
213  NETWORK_NOT_LOADED = -3,
214  PARAMETER_MISMATCH = -4,
215  NOT_FOUND = -5,
216  OUT_OF_BOUNDS = -6,
217  /*
218  * @brief exception not of std::exception derived type was thrown
219  */
220  UNEXPECTED = -7,
221  REQUEST_BUSY = -8,
222  RESULT_NOT_READY = -9,
223  NOT_ALLOCATED = -10,
224  INFER_NOT_STARTED = -11,
225  NETWORK_NOT_READ = -12
226 }IEStatusCode;
227 
228 /**
229  * @struct roi_t
230  * @brief This structure describes roi data.
231  */
232 typedef struct roi {
233  size_t id; // ID of a roi
234  size_t posX; // W upper left coordinate of roi
235  size_t posY; // H upper left coordinate of roi
236  size_t sizeX; // W size of roi
237  size_t sizeY; // H size of roi
238 }roi_t;
239 
240 /**
241  * @struct input_shape
242  * @brief Represents shape for input data
243  */
244 typedef struct input_shape {
245  char *name;
246  dimensions_t shape;
248 
249 /**
250  * @struct input_shapes
251  * @brief Represents shapes for all input data
252  */
253 typedef struct input_shapes {
254  input_shape_t *shapes;
255  size_t shape_num;
257 
258 /**
259  * @struct ie_blob_buffer
260  * @brief Represents copied data from the given blob.
261  */
262 typedef struct ie_blob_buffer {
263  union {
264  void *buffer; // buffer can be written
265  const void *cbuffer; // cbuffer is read-only
266  };
268 
269 /**
270  * @struct ie_complete_call_back
271  * @brief Completion callback definition about the function and args
272  */
273 typedef struct ie_complete_call_back {
274  void (*completeCallBackFunc)(void *args);
275  void *args;
277 
278 /**
279  * @brief Returns number of version that is exported.
280  * @return Version number of the API.
281  */
282 INFERENCE_ENGINE_C_API(const char *) ie_c_api_version(void);
283 
284 
285 // Core
286 
287 /**
288  * @defgroup Core Core
289  * Set of functions dedicated to working with registered plugins and loading
290  * network to the registered devices.
291  * @{
292  */
293 
294 /**
295  * @brief Constructs Inference Engine Core instance using XML configuration file with devices description.
296  * See RegisterPlugins for more details. Use the ie_core_free() method to free memory.
297  * @ingroup Core
298  * @param xml_config_file A path to .xml file with devices to load from. If XML configuration file is not specified,
299  * then default Inference Engine devices are loaded from the default plugin.xml file.
300  * @param core A pointer to the newly created ie_core_t.
301  * @return Status code of the operation: OK(0) for success.
302  */
303 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_create(const char *xml_config_file, ie_core_t **core);
304 
305 /**
306  * @brief Releases memory occupied by core.
307  * @ingroup Core
308  * @param core A pointer to the core to free memory.
309  * @return Status code of the operation: OK(0) for success.
310  */
311 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_free(ie_core_t **core);
312 
313 /**
314  * @brief Gets version information of the device specified. Use the ie_core_versions_free() method to free memory.
315  * @ingroup Core
316  * @param core A pointer to ie_core_t instance.
317  * @param device_name Name to indentify device.
318  * @param versions A pointer to versions corresponding to device_name.
319  * @return Status code of the operation: OK(0) for success.
320  */
321 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_get_versions(const ie_core_t *core, const char *device_name, ie_core_versions_t *versions);
322 
323 /**
324  * @brief Releases memory occupied by ie_core_versions.
325  * @ingroup Core
326  * @param vers A pointer to the ie_core_versions to free memory.
327  * @return Status code of the operation: OK(0) for success.
328  */
329 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_versions_free(ie_core_versions_t *vers);
330 
331 /**
332  * @brief Reads the model from the .xml and .bin files of the IR. Use the ie_network_free() method to free memory.
333  * @ingroup Core
334  * @param core A pointer to ie_core_t instance.
335  * @param xml .xml file's path of the IR.
336  * @param weights_file .bin file's path of the IR, if path is empty, will try to read bin file with the same name as xml and
337  * if bin file with the same name was not found, will load IR without weights.
338  * @param network A pointer to the newly created network.
339  * @return Status code of the operation: OK(0) for success.
340  */
341 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_read_network(ie_core_t *core, const char *xml, const char *weights_file, ie_network_t **network);
342 
343 /**
344  * @brief Creates an executable network from a network object. Users can create as many networks as they need and use
345  * them simultaneously (up to the limitation of the hardware resources). Use the ie_exec_network_free() method to free memory.
346  * @ingroup Core
347  * @param core A pointer to ie_core_t instance.
348  * @param network A pointer to ie_network instance.
349  * @param device_name Name of device to load network to.
350  * @param config Device configuration.
351  * @param exe_network A pointer to the newly created executable network.
352  * @return Status code of the operation: OK(0) for success.
353  */
354 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_load_network(ie_core_t *core, const ie_network_t *network, const char *device_name, \
355  const ie_config_t *config, ie_executable_network_t **exe_network);
356 
357 /**
358  * @brief Sets configuration for device.
359  * @ingroup Core
360  * @param core A pointer to ie_core_t instance.
361  * @param ie_core_config Device configuration.
362  * @param device_name An optinal name of a device. If device name is not specified,
363  * the config is set for all the registered devices.
364  * @return Status code of the operation: OK(0) for success.
365  */
366 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_set_config(ie_core_t *core, const ie_config_t *ie_core_config, const char *device_name);
367 
368 /**
369  * @brief Registers a new device and a plugin which implement this device inside Inference Engine.
370  * @ingroup Core
371  * @param core A pointer to ie_core_t instance.
372  * @param plugin_name A name of a plugin. Depending on a platform, plugin_name is wrapped with
373  * a shared library suffix and a prefix to identify a full name of the library.
374  * @param device_name A device name to register plugin for. If not specified, the method registers
375  * a plugin with the default name.
376  * @return Status code of the operation: OK(0) for success.
377  */
378 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_register_plugin(ie_core_t *core, const char *plugin_name, const char *device_name);
379 
380 /**
381  * @brief Registers plugins specified in an ".xml" configuration file.
382  * @ingroup Core
383  * @param core A pointer to ie_core_t instance.
384  * @param xml_config_file A full path to ".xml" file containing plugins configuration.
385  * @return Status code of the operation: OK(0) for success.
386  */
387 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_register_plugins(ie_core_t *core, const char *xml_config_file);
388 
389 /**
390  * @brief Unregisters a plugin with a specified device name.
391  * @ingroup Core
392  * @param core A pointer to ie_core_t instance.
393  * @param device_name A device name of the device to unregister.
394  * @return Status code of the operation: OK(0) for success.
395  */
396 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_unregister_plugin(ie_core_t *core, const char *device_name);
397 
398 /**
399  * @brief Loads extension library to the device with a specified device name.
400  * @ingroup Core
401  * @param core A pointer to ie_core_t instance.
402  * @param extension_path Path to the extensions library file to load to a device.
403  * @param device_name A device name of a device to load the extensions to.
404  * @return Status code of the operation: OK(0) for success.
405  */
406 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_add_extension(ie_core_t *core, const char *extension_path, const char *device_name);
407 
408 /**
409  * @brief Gets general runtime metric for dedicated hardware. The method is needed to request common device properties
410  * which are executable network agnostic. It can be device name, temperature, other devices-specific values.
411  * @ingroup Core
412  * @param core A pointer to ie_core_t instance.
413  * @param device_name A name of a device to get a metric value.
414  * @param metric_name A metric name to request.
415  * @param param_result A metric value corresponding to the metric_name.
416  * @return Status code of the operation: OK(0) for success.
417  */
418 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_get_metric(const ie_core_t *core, const char *device_name, const char *metric_name, ie_param_t *param_result);
419 
420 /**
421  * @brief Gets configuration dedicated to device behaviour. The method is targeted to extract information
422  * which can be set via SetConfig method.
423  * @ingroup Core
424  * @param core A pointer to ie_core_t instance.
425  * @param device_name A name of a device to get a configuration value.
426  * @param config_name Name of a configuration.
427  * @param param_result A configuration value corresponding to the config_name.
428  * @return Status code of the operation: OK(0) for success.
429  */
430 INFERENCE_ENGINE_C_API(IEStatusCode) ie_core_get_config(const ie_core_t *core, const char *device_name, const char *config_name, ie_param_t *param_result);
431 
432 /** @} */ // end of Core
433 
434 // ExecutableNetwork
435 
436 /**
437  * @defgroup ExecutableNetwork ExecutableNetwork
438  * Set of functions representing of neural networks been loaded to device.
439  * @{
440  */
441 
442 /**
443  * @brief Releases memory occupied by ExecutableNetwork.
444  * @ingroup ExecutableNetwork
445  * @param ie_exec_network A pointer to the ExecutableNetwork to free memory.
446  * return Status code of the operation: OK(0) for success.
447  */
448 INFERENCE_ENGINE_C_API(IEStatusCode) ie_exec_network_free(ie_executable_network_t **ie_exec_network);
449 
450 /**
451  * @brief Creates an inference request instance used to infer the network. The created request has allocated input
452  * and output blobs (that can be changed later). Use the ie_infer_request_free() method to free memory.
453  * @ingroup ExecutableNetwork
454  * @param ie_exec_network A pointer to ie_executable_network_t instance.
455  * @param request A pointer to the newly created ie_infer_request_t instance
456  * @return Status code of the operation: OK(0) for success.
457  */
458 INFERENCE_ENGINE_C_API(IEStatusCode) ie_exec_network_create_infer_request(ie_executable_network_t *ie_exec_network, ie_infer_request_t **request);
459 
460 /**
461  * @brief Gets general runtime metric for an executable network. It can be network name, actual device ID on which executable network is running
462  * or all other properties which cannot be changed dynamically.
463  * @ingroup ExecutableNetwork
464  * @param ie_exec_network A pointer to ie_executable_network_t instance.
465  * @param metric_name A metric name to request.
466  * @param param_result A metric value corresponding to the metric_name.
467  * @return Status code of the operation: OK(0) for success.
468  */
469 INFERENCE_ENGINE_C_API(IEStatusCode) ie_exec_network_get_metric(const ie_executable_network_t *ie_exec_network, \
470  const char *metric_name, ie_param_t *param_result);
471 
472 /**
473  * @brief Sets configuration for current executable network. Currently, the method can be used
474  * when the network run on the Multi device and the configuration paramter is only can be "MULTI_DEVICE_PRIORITIES"
475  * @ingroup ExecutableNetwork
476  * @param ie_exec_network A pointer to ie_executable_network_t instance.
477  * @param param_config A pointer to device configuration..
478  * @return Status code of the operation: OK(0) for success.
479  */
480 INFERENCE_ENGINE_C_API(IEStatusCode) ie_exec_network_set_config(ie_executable_network_t *ie_exec_network, const ie_config_t *param_config);
481 
482 /**
483  * @brief Gets configuration for current executable network. The method is responsible to
484  * extract information which affects executable network execution.
485  * @ingroup ExecutableNetwork
486  * @param ie_exec_network A pointer to ie_executable_network_t instance.
487  * @param metric_config A configuration parameter name to request.
488  * @param param_result A configuration value corresponding to a configuration paramter name.
489  * @return Status code of the operation: OK(0) for success.
490  */
491 INFERENCE_ENGINE_C_API(IEStatusCode) ie_exec_network_get_config(const ie_executable_network_t *ie_exec_network, \
492  const char *metric_config, ie_param_t *param_result);
493 
494 /** @} */ // end of ExecutableNetwork
495 
496 // InferRequest
497 
498 /**
499  * @defgroup InferRequest InferRequest
500  * Set of functions responsible for dedicated inference for certain
501  * ExecutableNetwork.
502  * @{
503  */
504 
505 /**
506  * @brief Releases memory occupied by ie_infer_request_t instance.
507  * @ingroup InferRequest
508  * @param infer_request A pointer to the ie_infer_request_t to free memory.
509  * @return Status code of the operation: OK(0) for success.
510  */
511 INFERENCE_ENGINE_C_API(IEStatusCode) ie_infer_request_free(ie_infer_request_t **infer_request);
512 
513 /**
514  * @brief Gets input/output data for inference
515  * @ingroup InferRequest
516  * @param infer_request A pointer to ie_infer_request_t instance.
517  * @param name Name of input or output blob.
518  * @param blob A pointer to input or output blob. The type of Blob must match the network input precision and size.
519  * @return Status code of the operation: OK(0) for success.
520  */
521 INFERENCE_ENGINE_C_API(IEStatusCode) ie_infer_request_get_blob(ie_infer_request_t *infer_request, const char *name, ie_blob_t **blob);
522 
523 /**
524  * @brief Sets input/output data to inference.
525  * @ingroup InferRequest
526  * @param infer_request A pointer to ie_infer_request_t instance.
527  * @param name Name of input or output blob.
528  * @param blob Reference to input or output blob. The type of a blob must match the network input precision and size.
529  * @return Status code of the operation: OK(0) for success.
530  */
531 INFERENCE_ENGINE_C_API(IEStatusCode) ie_infer_request_set_blob(ie_infer_request_t *infer_request, const char *name, const ie_blob_t *blob);
532 
533 /**
534  * @brief Starts synchronous inference of the infer request and fill outputs.
535  * @ingroup InferRequest
536  * @param infer_request A pointer to ie_infer_request_t instance.
537  * @return Status code of the operation: OK(0) for success.
538  */
539 INFERENCE_ENGINE_C_API(IEStatusCode) ie_infer_request_infer(ie_infer_request_t *infer_request);
540 
541 /**
542  * @brief Starts asynchronous inference of the infer request and fill outputs.
543  * @ingroup InferRequest
544  * @param infer_request A pointer to ie_infer_request_t instance.
545  * @return Status code of the operation: OK(0) for success.
546  */
547 INFERENCE_ENGINE_C_API(IEStatusCode) ie_infer_request_infer_async(ie_infer_request_t *infer_request);
548 
549 /**
550  * @brief Sets a callback function that will be called on success or failure of asynchronous request
551  * @ingroup InferRequest
552  * @param infer_request A pointer to ie_infer_request_t instance.
553  * @param callback A function to be called.
554  * @return Status code of the operation: OK(0) for success.
555  */
556 INFERENCE_ENGINE_C_API(IEStatusCode) ie_infer_set_completion_callback(ie_infer_request_t *infer_request, ie_complete_call_back_t *callback);
557 
558 /**
559  * @brief Waits for the result to become available. Blocks until specified timeout elapses or the result becomes available, whichever comes first.
560  * @ingroup InferRequest
561  * @param infer_request A pointer to ie_infer_request_t instance.
562  * @param timeout Maximum duration in milliseconds to block for
563  * @note There are special cases when timeout is equal some value of the WaitMode enum:
564  * * 0 - Immediately returns the inference status. It does not block or interrupt execution.
565  * * -1 - waits until inference result becomes available
566  * @return Status code of the operation: OK(0) for success.
567  */
568 INFERENCE_ENGINE_C_API(IEStatusCode) ie_infer_request_wait(ie_infer_request_t *infer_request, const int64_t timeout);
569 
570 /**
571  * @brief Sets new batch size for certain infer request when dynamic batching is enabled in executable network that created this request.
572  * @ingroup InferRequest
573  * @param infer_request A pointer to ie_infer_request_t instance.
574  * @param size New batch size to be used by all the following inference calls for this request.
575  * @return Status code of the operation: OK(0) for success.
576  */
577 INFERENCE_ENGINE_C_API(IEStatusCode) ie_infer_request_set_batch(ie_infer_request_t *infer_request, const size_t size);
578 
579 /** @} */ // end of InferRequest
580 
581 // Network
582 
583 /**
584  * @defgroup Network Network
585  * Set of functions managing network been read from the IR before loading
586  * of it to the device.
587  * @{
588  */
589 
590 /**
591  * @brief When netowrk is loaded into the Infernece Engine, it is not required anymore and should be released
592  * @ingroup Network
593  * @param network The pointer to the instance of the ie_network_t to free.
594  * @return Status code of the operation: OK(0) for success.
595  */
596 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_free(ie_network_t **network);
597 
598 /**
599  * @brief Gets number of inputs for the network.
600  * @ingroup Network
601  * @param network A pointer to the instance of the ie_network_t to get number of input information.
602  * @param size_result A number of the instance's input information.
603  * @return Status code of the operation: OK(0) for success.
604  */
605 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_inputs_number(const ie_network_t *network, size_t *size_result);
606 
607 /**
608  * @brief Gets name corresponding to the "number". Use the ie_network_name_free() method to free memory.
609  * @ingroup Network
610  * @param network A pointer to theinstance of the ie_network_t to get input information.
611  * @param number An id of input information .
612  * @param name Input name corresponding to the number.
613  * @return status Status code of the operation: OK(0) for success.
614  */
615 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_input_name(const ie_network_t *network, size_t number, char **name);
616 
617 /**
618  * @brief Gets a precision of the input data provided by user.
619  * @ingroup Network
620  * @param network A pointer to ie_network_t instance.
621  * @param input_name Name of input data.
622  * @param prec_result A pointer to the precision used for input blob creation.
623  * @return Status code of the operation: OK(0) for success.
624  */
625 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_input_precision(const ie_network_t *network, const char *input_name, precision_e *prec_result);
626 
627 /**
628  * @brief Changes the precision of the input data provided by the user.
629  * This function should be called before loading the network to the device.
630  * @ingroup Network
631  * @param network A pointer to ie_network_t instance.
632  * @param input_name Name of input data.
633  * @param p A new precision of the input data to set (eg. precision_e.FP16).
634  * @return Status code of the operation: OK(0) for success.
635  */
636 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_set_input_precision(ie_network_t *network, const char *input_name, const precision_e p);
637 
638 /**
639  * @brief Gets a layout of the input data.
640  * @ingroup Network
641  * @param network A pointer to ie_network_t instance.
642  * @param input_name Name of input data.
643  * @param layout_result A pointer to the layout used for input blob creation.
644  * @return Status code of the operation: OK(0) for success.
645  */
646 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_input_layout(const ie_network_t *network, const char *input_name, layout_e *layout_result);
647 
648 /**
649  * @brief Changes the layout of the input data named "input_name".
650  * This function should be called before loading the network to the device.
651  * @ingroup Network
652  * @param network A pointer to ie_network_t instance.
653  * @param input_name Name of input data.
654  * @param l A new layout of the input data to set.
655  * @return Status code of the operation: OK(0) for success.
656  */
657 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_set_input_layout(ie_network_t *network, const char *input_name, const layout_e l);
658 
659 /**
660  * @Gets dimensions/shape of the input data with reversed order.
661  * @ingroup Network
662  * @param network A pointer to ie_network_t instance.
663  * @param input_name Name of input data.
664  * @param dims_result A pointer to the dimensions used for input blob creation.
665  * @return Status code of the operation: OK(0) for success.
666  */
667 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_input_dims(const ie_network_t *network, const char *input_name, dimensions_t *dims_result);
668 
669 /**
670  * @brief Gets pre-configured resize algorithm.
671  * @ingroup Network
672  * @param network A pointer to ie_network_t instance.
673  * @param input_name Name of input data.
674  * @parm resize_alg_result The pointer to the resize algorithm used for input blob creation.
675  * @return Status code of the operation: OK(0) for success.
676  */
677 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_input_resize_algorithm(const ie_network_t *network, const char *input_name, \
678  resize_alg_e *resize_alg_result);
679 
680 /**
681  * @brief Sets resize algorithm to be used during pre-processing
682  * @ingroup Network
683  * @param network A pointer to ie_network_t instance.
684  * @param input_name Name of input data.
685  * @param resize_algo Resize algorithm.
686  * @return Status code of the operation: OK(0) for success.
687  */
688 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_set_input_resize_algorithm(ie_network_t *network, const char *input_name, const resize_alg_e resize_algo);
689 
690 /**
691  * @brief Gets color format of the input data.
692  * @ingroup Network
693  * @param network A pointer to ie_network_t instance.
694  * @param input_name Name of input data.
695  * @param colformat_result The pointer to the color format used for input blob creation.
696  * @reutrn Status code of the operation: OK(0) for success.
697  */
698 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_color_format(const ie_network_t *network, const char *input_name, colorformat_e *colformat_result);
699 
700 /**
701  * @brief Changes the color format of the input data.
702  * @ingroup Network
703  * @param network A pointer to ie_network_t instance.
704  * @param input_name Name of input data.
705  * @param color_format Color format of the input data.
706  * @reutrn Status code of the operation: OK(0) for success.
707  */
708 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_set_color_format(ie_network_t *network, const char *input_name, const colorformat_e color_format);
709 
710 /**
711  * @brief Helper method collect all input shapes with input names of corresponding input data.
712  * Use the ie_network_input_shapes_free() method to free memory.
713  * @ingroup Network
714  * @param network A pointer to the instance of the ie_network_t to get input shapes.
715  * @param shapes A pointer to the input_shapes.
716  * @return Status code of the operation: OK(0) for success.
717  */
718 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_input_shapes(ie_network_t *network, input_shapes_t *shapes);
719 
720 /**
721  * @brief Run shape inference with new input shapes for the network.
722  * @ingroup Network
723  * @param network A pointer to the instance of the ie_network_t to reshape.
724  * @param shapes A new input shapes to set for the network.
725  * @return Status code of the operation: OK(0) for success.
726  */
727 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_reshape(ie_network_t *network, const input_shapes_t shapes);
728 
729 /**
730  * @brief Gets number of output for the network.
731  * @ingroup Network
732  * @param network A pointer to the instance of the ie_network_t to get number of ouput information.
733  * @param size_result A number of the network's output information.
734  * @return Status code of the operation: OK(0) for success.
735  */
736 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_outputs_number(const ie_network_t *network, size_t *size_result);
737 
738 /**
739  * @brief Gets name corresponding to the "number". Use the ie_network_name_free() method to free memory.
740  * @ingroup Network
741  * @param network A pointer to theinstance of the ie_network_t to get output information.
742  * @param number An id of output information .
743  * @param name Output name corresponding to the number.
744  * @return Status code of the operation: OK(0) for success.
745  */
746 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_output_name(const ie_network_t *network, const size_t number, char **name);
747 
748 /**
749  * @brief Gets a precision of the output data named "output_name".
750  * @ingroup Network
751  * @param network A pointer to ie_network_t instance.
752  * @param output_name Name of output data.
753  * @param prec_result A pointer to the precision used for output blob creation.
754  * @return Status code of the operation: OK(0) for success.
755  */
756 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_output_precision(const ie_network_t *network, const char *output_name, precision_e *prec_result);
757 
758 /**
759  * @brief Changes the precision of the output data named "output_name".
760  * @ingroup Network
761  * @param network A pointer to ie_network_t instance.
762  * @param output_name Name of output data.
763  * @param p A new precision of the output data to set (eg. precision_e.FP16).
764  * @return Status code of the operation: OK(0) for success.
765  */
766 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_set_output_precision(ie_network_t *network, const char *output_name, const precision_e p);
767 
768 /**
769  * @brief Gets a layout of the output data.
770  * @ingroup Network
771  * @param network A pointer to ie_network_t instance.
772  * @param output_name Name of output data.
773  * @param layout_result A pointer to the layout used for output blob creation.
774  * @return Status code of the operation: OK(0) for success.
775  */
776 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_output_layout(const ie_network_t *network, const char *output_name, layout_e *layout_result);
777 
778 /**
779  * @brief Changes the layout of the output data named "output_name".
780  * @ingroup Network
781  * @param network A pointer to ie_network_t instance.
782  * @param output_name Name of output data.
783  * @param l A new layout of the output data to set.
784  * @return Status code of the operation: OK(0) for success.
785  */
786 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_set_output_layout(ie_network_t *network, const char *output_name, const layout_e l);
787 
788 /**
789  * @brief Gets dimensions/shape of the output data with reversed order.
790  * @ingroup Network
791  * @param network A pointer to ie_network_t instance.
792  * @param output_name Name of output data.
793  * @param dims_result A pointer to the dimensions used for output blob creation.
794  * @return Status code of the operation: OK(0) for success.
795  */
796 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_get_output_dims(const ie_network_t *network, const char *output_name, dimensions_t *dims_result);
797 
798 /**
799  * @brief Releases memory occupied by input_shapes.
800  * @ingroup Network
801  * @param inputShapes A pointer to the input_shapes to free memory.
802  * @return Status code of the operation: OK(0) for success.
803  */
804 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_input_shapes_free(input_shapes_t *inputShapes);
805 
806 /**
807  * @brief Releases momory occupied by input_name or output_name.
808  * @ingroup Network
809  * @param name A pointer to the input_name or output_name to free memory.
810  * @return Status code of the operation: OK(0) for success.
811  */
812 INFERENCE_ENGINE_C_API(IEStatusCode) ie_network_name_free(char **name);
813 
814 /** @} */ // end of InferRequest
815 
816 // Blob
817 
818 /**
819  * @defgroup Blob Blob
820  * Set of functions allowing to research memory from infer requests or make new
821  * memory objects to be passed to InferRequests.
822  * @{
823  */
824 
825 /**
826  * @brief Creates a blob with the specified dimensions, layout and to allocate memory.
827  * @ingroup Blob
828  * @param tensorDesc Tensor descriptor for Blob creation.
829  * @param blob A pointer to the newly created blob.
830  * @return Status code of the operation: OK(0) for success.
831  */
832 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_make_memory(const tensor_desc_t *tensorDesc, ie_blob_t **blob);
833 
834 /**
835  * @brief Creates a blob with the given tensor descriptor from the pointer to the pre-allocated memory.
836  * @ingroup Blob
837  * @param tensorDesc Tensor descriptor for Blob creation.
838  * @param ptr Pointer to the pre-allocated memory.
839  * @param size Length of the pre-allocated array.
840  * @param blob A pointer to the newly created blob.
841  * @return Status code of the operation: OK(0) for success.
842  */
843 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_make_memory_from_preallocated(const tensor_desc_t *tensorDesc, void *ptr, size_t size, ie_blob_t **blob);
844 
845 /**
846  * @brief Creates a blob describing given roi_t instance based on the given blob with pre-allocated memory.
847  * @ingroup Blob
848  * @param inputBlob original blob with pre-allocated memory.
849  * @param roi A roi_tinstance inside of the original blob.
850  * @param blob A pointer to the newly created blob.
851  * @return Status code of the operation: OK(0) for success.
852  */
853 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_make_memory_with_roi(const ie_blob_t *inputBlob, const roi_t *roi, ie_blob_t **blob);
854 
855 /**
856  * @brief Gets the total number of elements, which is a product of all the dimensions.
857  * @ingroup Blob
858  * @param blob A pointer to the blob.
859  * @param size_result The total number of elements.
860  * @return Status code of the operation: OK(0) for success.
861  */
862 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_size(ie_blob_t *blob, int *size_result);
863 
864 /**
865  * @brief Gets the size of the current Blob in bytes.
866  * @ingroup Blob
867  * @param blob A pointer to the blob.
868  * @param bsize_result The size of the current blob in bytes.
869  * @return Status code of the operation: OK(0) for success.
870  */
871 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_byte_size(ie_blob_t *blob, int *bsize_result);
872 
873 /**
874  * @brief Releases previously allocated data
875  * @ingroup Blob
876  * @param blob A pointer to the blob to free memory.
877  * @return Status code of the operation: OK(0) for success.
878  */
879 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_deallocate(ie_blob_t **blob);
880 
881 /**
882  * @brief Gets access to the allocated memory .
883  * @ingroup Blob
884  * @param blob A pointer to the blob.
885  * @param blob_buffer A pointer to the copied data from the given blob.
886  * @return Status code of the operation: OK(0) for success.
887  */
888 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_get_buffer(const ie_blob_t *blob, ie_blob_buffer_t *blob_buffer);
889 
890 /**
891  * @brief Gets read-only access to the allocated memory.
892  * @ingroup Blob
893  * @param blob A pointer to the blob.
894  * @param blob_cbuffer A pointer to the coped data from the given pointer to the blob and the data is read-only.
895  * @return Status code of the operation: OK(0) for success
896  */
897 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_get_cbuffer(const ie_blob_t *blob, ie_blob_buffer_t *blob_cbuffer);
898 
899 /**
900  * @brief Gets dimensions of blob's tensor.
901  * @ingroup Blob
902  * @param blob A pointer to the blob.
903  * @param dims_result A pointer to the dimensions of blob's tensor.
904  * @return Status code of the operation: OK(0) for success.
905  */
906 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_get_dims(const ie_blob_t *blob, dimensions_t *dims_result);
907 
908 /**
909  * @brief Gets layout of blob's tensor.
910  * @ingroup Blob
911  * @param blob A pointer to the blob.
912  * @param layout_result A pointer to the layout of blob's tensor.
913  * @return Status code of the operation: OK(0) for success.
914  */
915 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_get_layout(const ie_blob_t *blob, layout_e *layout_result);
916 
917 /**
918  * @brief Gets precision of blob's tensor.
919  * @ingroup Blob
920  * @param blob A pointer to the blob.
921  * @param prec_result A pointer to the precision of blob's tensor.
922  * @return Status code of the operation: OK(0) for success.
923  */
924 INFERENCE_ENGINE_C_API(IEStatusCode) ie_blob_get_precision(const ie_blob_t *blob, precision_e *prec_result);
925 
926 /** @} */ // end of Blob
927 
928 #endif // IE_C_API_H
IEStatusCode ie_infer_request_free(ie_infer_request_t **infer_request)
Releases memory occupied by ie_infer_request_t instance.
IEStatusCode ie_blob_get_cbuffer(const ie_blob_t *blob, ie_blob_buffer_t *blob_cbuffer)
Gets read-only access to the allocated memory.
IEStatusCode ie_network_reshape(ie_network_t *network, const input_shapes_t shapes)
Run shape inference with new input shapes for the network.
Definition: ie_c_api.h:160
Definition: ie_c_api.h:161
This structure describes roi data.
IEStatusCode ie_infer_request_infer(ie_infer_request_t *infer_request)
Starts synchronous inference of the infer request and fill outputs.
colorformat_e
Extra information about input color format for preprocessing.
Definition: ie_c_api.h:186
IEStatusCode ie_blob_size(ie_blob_t *blob, int *size_result)
Gets the total number of elements, which is a product of all the dimensions.
resize_alg_e
Represents the list of supported resize algorithms.
Definition: ie_c_api.h:199
IEStatusCode ie_network_get_input_layout(const ie_network_t *network, const char *input_name, layout_e *layout_result)
Gets a layout of the input data.
IEStatusCode ie_network_get_inputs_number(const ie_network_t *network, size_t *size_result)
Gets number of inputs for the network.
IEStatusCode ie_core_set_config(ie_core_t *core, const ie_config_t *ie_core_config, const char *device_name)
Sets configuration for device.
IEStatusCode ie_exec_network_create_infer_request(ie_executable_network_t *ie_exec_network, ie_infer_request_t **request)
Creates an inference request instance used to infer the network. The created request has allocated in...
Represents shape for input data.
Definition: ie_c_api.h:244
IEStatusCode ie_network_set_input_layout(ie_network_t *network, const char *input_name, const layout_e l)
Changes the layout of the input data named "input_name". This function should be called before loadin...
Represents configuration information that describes devices.
Definition: ie_c_api.h:74
IEStatusCode ie_network_get_outputs_number(const ie_network_t *network, size_t *size_result)
Gets number of output for the network.
Represents copied data from the given blob.
Definition: ie_c_api.h:262
IEStatusCode ie_exec_network_get_metric(const ie_executable_network_t *ie_exec_network, const char *metric_name, ie_param_t *param_result)
Gets general runtime metric for an executable network. It can be network name, actual device ID on wh...
IEStatusCode ie_core_read_network(ie_core_t *core, const char *xml, const char *weights_file, ie_network_t **network)
Reads the model from the .xml and .bin files of the IR. Use the ie_network_free() method to free memo...
IEStatusCode ie_core_register_plugins(ie_core_t *core, const char *xml_config_file)
Registers plugins specified in an ".xml" configuration file.
IEStatusCode ie_blob_byte_size(ie_blob_t *blob, int *bsize_result)
Gets the size of the current Blob in bytes.
IEStatusCode ie_core_create(const char *xml_config_file, ie_core_t **core)
Constructs Inference Engine Core instance using XML configuration file with devices description...
IEStatusCode ie_network_get_output_precision(const ie_network_t *network, const char *output_name, precision_e *prec_result)
Gets a precision of the output data named "output_name".
IEStatusCode ie_core_register_plugin(ie_core_t *core, const char *plugin_name, const char *device_name)
Registers a new device and a plugin which implement this device inside Inference Engine.
IEStatusCode ie_blob_make_memory_from_preallocated(const tensor_desc_t *tensorDesc, void *ptr, size_t size, ie_blob_t **blob)
Creates a blob with the given tensor descriptor from the pointer to the pre-allocated memory...
IEStatusCode ie_exec_network_set_config(ie_executable_network_t *ie_exec_network, const ie_config_t *param_config)
Sets configuration for current executable network. Currently, the method can be used when the network...
BGR color format, default in DLDT.
Definition: ie_c_api.h:189
IEStatusCode ie_core_load_network(ie_core_t *core, const ie_network_t *network, const char *device_name, const ie_config_t *config, ie_executable_network_t **exe_network)
Creates an executable network from a network object. Users can create as many networks as they need a...
RGB color format.
Definition: ie_c_api.h:188
IEStatusCode ie_core_free(ie_core_t **core)
Releases memory occupied by core.
BGRX color format with X ignored during inference.
Definition: ie_c_api.h:191
IEStatusCode ie_network_set_color_format(ie_network_t *network, const char *input_name, const colorformat_e color_format)
Changes the color format of the input data.
IEStatusCode ie_network_get_input_resize_algorithm(const ie_network_t *network, const char *input_name, resize_alg_e *resize_alg_result)
Gets pre-configured resize algorithm.
NV12 color format represented as compound Y+UV blob.
Definition: ie_c_api.h:192
IEStatusCode ie_network_get_output_layout(const ie_network_t *network, const char *output_name, layout_e *layout_result)
Gets a layout of the output data.
IEStatusCode ie_blob_make_memory(const tensor_desc_t *tensorDesc, ie_blob_t **blob)
Creates a blob with the specified dimensions, layout and to allocate memory.
IEStatusCode ie_network_get_input_name(const ie_network_t *network, size_t number, char **name)
Gets name corresponding to the "number". Use the ie_network_name_free() method to free memory...
RGBX color format with X ignored during inference.
Definition: ie_c_api.h:190
Plain blob (default), no extra color processing required.
Definition: ie_c_api.h:187
IEStatusCode ie_network_get_output_dims(const ie_network_t *network, const char *output_name, dimensions_t *dims_result)
Gets dimensions/shape of the output data with reversed order.
IEStatusCode ie_blob_get_precision(const ie_blob_t *blob, precision_e *prec_result)
Gets precision of blob&#39;s tensor.
IEStatusCode ie_network_input_shapes_free(input_shapes_t *inputShapes)
Releases memory occupied by input_shapes.
Represents detailed information for an error.
Definition: ie_c_api.h:106
IEStatusCode ie_infer_set_completion_callback(ie_infer_request_t *infer_request, ie_complete_call_back_t *callback)
Sets a callback function that will be called on success or failure of asynchronous request...
IEStatusCode ie_infer_request_get_blob(ie_infer_request_t *infer_request, const char *name, ie_blob_t **blob)
Gets input/output data for inference.
Represents detailed information for a tensor.
Definition: ie_c_api.h:176
IEStatusCode ie_network_set_output_layout(ie_network_t *network, const char *output_name, const layout_e l)
Changes the layout of the output data named "output_name".
Represents configuration parameter information.
Definition: ie_c_api.h:97
metric and config parameters.
Definition: ie_c_api.h:84
IEStatusCode ie_blob_make_memory_with_roi(const ie_blob_t *inputBlob, const roi_t *roi, ie_blob_t **blob)
Creates a blob describing given roi_t instance based on the given blob with pre-allocated memory...
IEStatusCode ie_network_get_input_dims(const ie_network_t *network, const char *input_name, dimensions_t *dims_result)
IEStatusCode ie_exec_network_free(ie_executable_network_t **ie_exec_network)
Releases memory occupied by ExecutableNetwork.
Definition: ie_c_api.h:169
Definition: ie_c_api.h:165
Definition: ie_c_api.h:157
IEStatusCode ie_network_set_input_precision(ie_network_t *network, const char *input_name, const precision_e p)
Changes the precision of the input data provided by the user. This function should be called before l...
IEStatusCode ie_infer_request_wait(ie_infer_request_t *infer_request, const int64_t timeout)
Waits for the result to become available. Blocks until specified timeout elapses or the result become...
IEStatusCode ie_blob_deallocate(ie_blob_t **blob)
Releases previously allocated data.
IEStatusCode ie_infer_request_set_blob(ie_infer_request_t *infer_request, const char *name, const ie_blob_t *blob)
Sets input/output data to inference.
Definition: ie_c_api.h:159
Definition: ie_c_api.h:166
Definition: ie_c_api.h:164
IEStatusCode ie_blob_get_dims(const ie_blob_t *blob, dimensions_t *dims_result)
Gets dimensions of blob&#39;s tensor.
IEStatusCode ie_exec_network_get_config(const ie_executable_network_t *ie_exec_network, const char *metric_config, ie_param_t *param_result)
Gets configuration for current executable network. The method is responsible to extract information w...
IEStatusCode ie_core_get_config(const ie_core_t *core, const char *device_name, const char *config_name, ie_param_t *param_result)
Gets configuration dedicated to device behaviour. The method is targeted to extract information which...
Definition: ie_c_api.h:158
IEStatusCode ie_infer_request_set_batch(ie_infer_request_t *infer_request, const size_t size)
Sets new batch size for certain infer request when dynamic batching is enabled in executable network ...
IEStatusCode ie_core_unregister_plugin(ie_core_t *core, const char *device_name)
Unregisters a plugin with a specified device name.
IEStatusCode ie_core_get_versions(const ie_core_t *core, const char *device_name, ie_core_versions_t *versions)
Gets version information of the device specified. Use the ie_core_versions_free() method to free memo...
Represents shapes for all input data.
Definition: ie_c_api.h:253
Definition: ie_c_api.h:167
IEStatusCode ie_network_name_free(char **name)
Releases momory occupied by input_name or output_name.
IEStatusCode ie_network_get_color_format(const ie_network_t *network, const char *input_name, colorformat_e *colformat_result)
Gets color format of the input data.
precision_e
Precisions that the inference engine supports.
Definition: ie_c_api.h:156
IEStatusCode ie_blob_get_buffer(const ie_blob_t *blob, ie_blob_buffer_t *blob_buffer)
Gets access to the allocated memory .
Represents all versions information that describes all devices and the inference engine runtime libra...
Definition: ie_c_api.h:65
IEStatusCode ie_network_get_input_precision(const ie_network_t *network, const char *input_name, precision_e *prec_result)
Gets a precision of the input data provided by user.
Completion callback definition about the function and args.
Definition: ie_c_api.h:273
const char * ie_c_api_version(void)
Returns number of version that is exported.
Definition: ie_c_api.h:232
IEStatusCode
This enum contains codes for all possible return values of the interface functions.
Definition: ie_c_api.h:209
IEStatusCode ie_network_set_output_precision(ie_network_t *network, const char *output_name, const precision_e p)
Changes the precision of the output data named "output_name".
IEStatusCode ie_core_versions_free(ie_core_versions_t *vers)
Releases memory occupied by ie_core_versions.
Definition: ie_c_api.h:163
IEStatusCode ie_network_get_output_name(const ie_network_t *network, const size_t number, char **name)
Gets name corresponding to the "number". Use the ie_network_name_free() method to free memory...
Represents dimensions for input or output data.
Definition: ie_c_api.h:114
IEStatusCode ie_blob_get_layout(const ie_blob_t *blob, layout_e *layout_result)
Gets layout of blob&#39;s tensor.
IEStatusCode ie_core_add_extension(ie_core_t *core, const char *extension_path, const char *device_name)
Loads extension library to the device with a specified device name.
IEStatusCode ie_core_get_metric(const ie_core_t *core, const char *device_name, const char *metric_name, ie_param_t *param_result)
Gets general runtime metric for dedicated hardware. The method is needed to request common device pro...
IEStatusCode ie_network_free(ie_network_t **network)
When netowrk is loaded into the Infernece Engine, it is not required anymore and should be released...
Definition: ie_c_api.h:162
layout_e
Layouts that the inference engine supports.
Definition: ie_c_api.h:123
Represents version information that describes devices and the inference engine runtime library...
Definition: ie_c_api.h:53
IEStatusCode ie_network_set_input_resize_algorithm(ie_network_t *network, const char *input_name, const resize_alg_e resize_algo)
Sets resize algorithm to be used during pre-processing.
IEStatusCode ie_infer_request_infer_async(ie_infer_request_t *infer_request)
Starts asynchronous inference of the infer request and fill outputs.
Definition: ie_c_api.h:168
IEStatusCode ie_network_get_input_shapes(ie_network_t *network, input_shapes_t *shapes)
Helper method collect all input shapes with input names of corresponding input data. Use the ie_network_input_shapes_free() method to free memory.