hddl_plugin_config.hpp
Go to the documentation of this file.
1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @deprecated Use vpu/hddl_config.hpp instead.
7  * @brief A header that defines advanced related properties for VPU plugins.
8  * These properties should be used in SetConfig() and LoadNetwork() methods of plugins
9  *
10  * @file hddl_plugin_config.hpp
11  */
12 
13 #pragma once
14 
15 #include "ie_api.h"
16 #include "ie_plugin_config.hpp"
17 
18 //
19 // Options
20 //
21 
22 /**
23  * @def VPU_HDDL_CONFIG_KEY(name)
24  * @brief Shortcut for defining VPU HDDL configuration key
25  */
26 #define VPU_HDDL_CONFIG_KEY(name) InferenceEngine::VPUConfigParams::_CONFIG_KEY(VPU_HDDL_##name)
27 /**
28  * @def VPU_HDDL_CONFIG_VALUE(name)
29  * @brief Shortcut for defining VPU HDDL configuration value
30  */
31 #define VPU_HDDL_CONFIG_VALUE(name) InferenceEngine::VPUConfigParams::VPU_HDDL_##name
32 
33 #define DECLARE_VPU_HDDL_CONFIG_KEY(name) DECLARE_CONFIG_KEY(VPU_HDDL_##name)
34 #define DECLARE_VPU_HDDL_CONFIG_VALUE(name) DECLARE_CONFIG_VALUE(VPU_HDDL_##name)
35 
36 //
37 // Metrics
38 //
39 
40 /**
41  * @def VPU_HDDL_METRIC(name)
42  * @brief Shortcut for defining VPU HDDL metric
43  */
44 #define VPU_HDDL_METRIC(name) METRIC_KEY(VPU_HDDL_##name)
45 #define DECLARE_VPU_HDDL_METRIC(name, ...) DECLARE_METRIC_KEY(VPU_HDDL_##name, __VA_ARGS__)
46 
47 namespace InferenceEngine {
48 
49 namespace Metrics {
50 
51 /**
52 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_NUM instead
53 * @brief Metric to get a int of the device number, String value is METRIC_VPU_HDDL_DEVICE_NUM
54 */
55 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_NUM instead")
56 DECLARE_VPU_HDDL_METRIC(DEVICE_NUM, int);
57 
58 /**
59 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_NAME instead
60 * @brief Metric to get a std::vector<std::string> of device names, String value is METRIC_VPU_HDDL_DEVICE_NAME
61 */
62 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_NAME instead")
63 DECLARE_VPU_HDDL_METRIC(DEVICE_NAME, std::vector<std::string>);
64 
65 /**
66 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_THERMAL instead
67 * @brief Metric to get a std::vector<float> of device thermal, String value is METRIC_VPU_HDDL_DEVICE_THERMAL
68 */
69 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_THERMAL instead")
70 DECLARE_VPU_HDDL_METRIC(DEVICE_THERMAL, std::vector<float>);
71 
72 /**
73 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_ID instead
74 * @brief Metric to get a std::vector<uint32> of device ids, String value is METRIC_VPU_HDDL_DEVICE_ID
75 */
76 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_ID instead")
77 DECLARE_VPU_HDDL_METRIC(DEVICE_ID, std::vector<unsigned int>);
78 
79 /**
80 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_SUBCLASS instead
81 * @brief Metric to get a std::vector<int> of device subclasses, String value is METRIC_VPU_HDDL_DEVICE_SUBCLASS
82 */
83 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_SUBCLASS instead")
84 DECLARE_VPU_HDDL_METRIC(DEVICE_SUBCLASS, std::vector<int>);
85 
86 /**
87 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_MEMORY_TOTAL instead
88 * @brief Metric to get a std::vector<uint32> of device total memory, String value is METRIC_VPU_HDDL_MEMORY_TOTAL
89 */
90 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_MEMORY_TOTAL instead")
91 DECLARE_VPU_HDDL_METRIC(DEVICE_MEMORY_TOTAL, std::vector<unsigned int>);
92 
93 /**
94 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_MEMORY_USED instead
95 * @brief Metric to get a std::vector<uint32> of device used memory, String value is METRIC_VPU_HDDL_DEVICE_MEMORY_USED
96 */
97 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_MEMORY_USED instead")
98 DECLARE_VPU_HDDL_METRIC(DEVICE_MEMORY_USED, std::vector<unsigned int>);
99 
100 /**
101 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_UTILIZATION instead
102 * @brief Metric to get a std::vector<float> of device utilization, String value is METRIC_VPU_HDDL_DEVICE_UTILIZATION
103 */
104 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_UTILIZATION instead")
105 DECLARE_VPU_HDDL_METRIC(DEVICE_UTILIZATION, std::vector<float>);
106 
107 /**
108 * @deprecated Use InferenceEngine::METRIC_HDDL_STREAM_ID instead
109 * @brief Metric to get a std::vector<std::string> of stream ids, String value is METRIC_VPU_HDDL_DEVICE_STREAM_ID
110 */
111 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_STREAM_ID instead")
112 DECLARE_VPU_HDDL_METRIC(STREAM_ID, std::vector<std::string>);
113 
114 /**
115 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_TAG instead
116 * @brief Metric to get a std::vector<std::string> of device tags, String value is METRIC_VPU_HDDL_DEVICE_TAG
117 */
118 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_TAG instead")
119 DECLARE_VPU_HDDL_METRIC(DEVICE_TAG, std::vector<std::string>);
120 
121 /**
122 * @deprecated Use InferenceEngine::METRIC_HDDL_GROUP_ID instead
123 * @brief Metric to get a std::vector<int> of group ids, String value is METRIC_VPU_HDDL_GROUP_ID
124 */
125 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_GROUP_ID instead")
126 DECLARE_VPU_HDDL_METRIC(GROUP_ID, std::vector<int>);
127 
128 /**
129 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_GROUP_USING_NUM instead
130 * @brief Metric to get a int number of device be using for group, String value is METRIC_VPU_HDDL_DEVICE_GROUP_USING_NUM
131 */
132 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_GROUP_USING_NUM instead")
133 DECLARE_VPU_HDDL_METRIC(DEVICE_GROUP_USING_NUM, int);
134 
135 /**
136 * @deprecated Use InferenceEngine::METRIC_HDDL_DEVICE_TOTAL_NUM instead
137 * @brief Metric to get a int number of total device, String value is METRIC_VPU_HDDL_DEVICE_TOTAL_NUM
138 */
139 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::METRIC_HDDL_DEVICE_TOTAL_NUM instead")
140 DECLARE_VPU_HDDL_METRIC(DEVICE_TOTAL_NUM, int);
141 
142 } // namespace Metrics
143 
144 namespace VPUConfigParams {
145 
146 /**
147  * @deprecated Use InferenceEngine::HDDL_GRAPH_TAG instead
148  * @brief [Only for HDDLPlugin]
149  * Type: Arbitrary non-empty string. If empty (""), equals no set, default: "";
150  * This option allows to specify the number of MYX devices used for inference a specific Executable network.
151  * Note: Only one network would be allocated to one device.
152  * The number of devices for the tag is specified in the hddl_service.config file.
153  * Example:
154  * "service_settings":
155  * {
156  * "graph_tag_map":
157  * {
158  * "tagA":3
159  * }
160  * }
161  * It means that an executable network marked with tagA will be executed on 3 devices
162  */
163 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::HDDL_GRAPH_TAG instead")
164 DECLARE_VPU_HDDL_CONFIG_KEY(GRAPH_TAG);
165 
166 /**
167  * @deprecated Use InferenceEngine::HDDL_STREAM_ID instead
168  * @brief [Only for HDDLPlugin]
169  * Type: Arbitrary non-empty string. If empty (""), equals no set, default: "";
170  * This config makes the executable networks to be allocated on one certain device (instead of multiple devices).
171  * And all inference through this executable network, will be done on this device.
172  * Note: Only one network would be allocated to one device.
173  * The number of devices which will be used for stream-affinity must be specified in hddl_service.config file.
174  * Example:
175  * "service_settings":
176  * {
177  * "stream_device_number":5
178  * }
179  * It means that 5 device will be used for stream-affinity
180  */
181 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::HDDL_STREAM_ID instead")
182 DECLARE_VPU_HDDL_CONFIG_KEY(STREAM_ID);
183 
184 /**
185  * @deprecated Use InferenceEngine::HDDL_DEVICE_TAG instead
186  * @brief [Only for HDDLPlugin]
187  * Type: Arbitrary non-empty string. If empty (""), equals no set, default: "";
188  * This config allows user to control device flexibly. This config gives a "tag" for a certain device while
189  * allocating a network to it. Afterward, user can allocating/deallocating networks to this device with this "tag".
190  * Devices used for such use case is controlled by a so-called "Bypass Scheduler" in HDDL backend, and the number
191  * of such device need to be specified in hddl_service.config file.
192  * Example:
193  * "service_settings":
194  * {
195  * "bypass_device_number": 5
196  * }
197  * It means that 5 device will be used for Bypass scheduler.
198  */
199 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::HDDL_DEVICE_TAG instead")
200 DECLARE_VPU_HDDL_CONFIG_KEY(DEVICE_TAG);
201 
202 /**
203  * @deprecated Use InferenceEngine::HDDL_BIND_DEVICE instead
204  * @brief [Only for HDDLPlugin]
205  * Type: "YES/NO", default is "NO".
206  * This config is a sub-config of DEVICE_TAG, and only available when "DEVICE_TAG" is set. After a user load a
207  * network, the user got a handle for the network.
208  * If "YES", the network allocated is bind to the device (with the specified "DEVICE_TAG"), which means all afterwards
209  * inference through this network handle will be executed on this device only.
210  * If "NO", the network allocated is not bind to the device (with the specified "DEVICE_TAG"). If the same network
211  * is allocated on multiple other devices (also set BIND_DEVICE to "False"), then inference through any handle of these
212  * networks may be executed on any of these devices those have the network loaded.
213  */
214 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::HDDL_BIND_DEVICE instead")
215 DECLARE_VPU_HDDL_CONFIG_KEY(BIND_DEVICE);
216 
217 /**
218  * @deprecated Use InferenceEngine::HDDL_RUNTIME_PRIORITY instead
219  * @brief [Only for HDDLPlugin]
220  * Type: A signed int wrapped in a string, default is "0".
221  * This config is a sub-config of DEVICE_TAG, and only available when "DEVICE_TAG" is set and "BIND_DEVICE" is "False".
222  * When there are multiple devices running a certain network (a same network running on multiple devices in Bypass Scheduler),
223  * the device with a larger number has a higher priority, and more inference tasks will be fed to it with priority.
224  */
225 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::HDDL_RUNTIME_PRIORITY instead")
226 DECLARE_VPU_HDDL_CONFIG_KEY(RUNTIME_PRIORITY);
227 
228 /**
229  * @deprecated Use InferenceEngine::HDDL_USE_SGAD instead
230  * @brief [Only for HDDLPlugin]
231  * Type: "YES/NO", default is "NO".
232  * SGAD is short for "Single Graph All Device". With this scheduler, once application allocates 1 network, all devices
233  * (managed by SGAD scheduler) will be loaded with this graph. The number of network that can be loaded to one device
234  * can exceed one. Once application deallocates 1 network from device, all devices will unload the network from them.
235  */
236 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::HDDL_USE_SGAD instead")
237 DECLARE_VPU_HDDL_CONFIG_KEY(USE_SGAD);
238 
239 /**
240  * @deprecated Use InferenceEngine::HDDL_GROUP_DEVICE instead
241  * @brief [Only for HDDLPlugin]
242  * Type: A signed int wrapped in a string, default is "0".
243  * This config gives a "group id" for a certain device when this device has been reserved for certain client, client
244  * can use this device grouped by calling this group id while other client can't use this device
245  * Each device has their own group id. Device in one group shares same group id.
246  */
247 INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::HDDL_GROUP_DEVICE instead")
248 DECLARE_VPU_HDDL_CONFIG_KEY(GROUP_DEVICE);
249 
250 } // namespace VPUConfigParams
251 
252 } // namespace InferenceEngine
The macro defines a symbol import/export mechanism essential for Microsoft Windows(R) OS.
A header for advanced hardware related properties for IE plugins To use in SetConfig,...
Inference Engine C++ API.
Definition: cldnn_config.hpp:15