Namespaces | Macros | Variables
ie_plugin_config.hpp File Reference

A header for advanced hardware related properties for IE plugins To use in SetConfig, LoadNetwork, ImportNetwork methods of plugins. More...

#include <string>
#include <tuple>
#include <vector>

Go to the source code of this file.

Namespaces

 InferenceEngine::Metrics
 Metrics
 
 InferenceEngine::PluginConfigParams
 Generic plugin configuration.
 

Macros

#define METRIC_KEY(name)   InferenceEngine::Metrics::METRIC_##name
 shortcut for defining common Inference Engine metrics
 
#define EXEC_NETWORK_METRIC_KEY(name)   METRIC_KEY(name)
 shortcut for defining common Inference Engine ExecutableNetwork metrics
 
#define METRIC_VALUE(name)   InferenceEngine::Metrics::name
 shortcut for defining metric values
 
#define CONFIG_KEY(name)   InferenceEngine::PluginConfigParams::_CONFIG_KEY(name)
 shortcut for defining configuration keys
 
#define CONFIG_VALUE(name)   InferenceEngine::PluginConfigParams::name
 shortcut for defining configuration values
 

Variables

static constexpr auto InferenceEngine::Metrics::METRIC_AVAILABLE_DEVICES = "AVAILABLE_DEVICES"
 Metric to get a std::vector<std::string> of available device IDs. String value is "AVAILABLE_DEVICES".
 
static constexpr auto InferenceEngine::Metrics::METRIC_SUPPORTED_METRICS = "SUPPORTED_METRICS"
 Metric to get a std::vector<std::string> of supported metrics. String value is "SUPPORTED_METRICS". More...
 
static constexpr auto InferenceEngine::Metrics::METRIC_SUPPORTED_CONFIG_KEYS = "SUPPORTED_CONFIG_KEYS"
 Metric to get a std::vector<std::string> of supported config keys. String value is "SUPPORTED_CONFIG_KEYS". More...
 
static constexpr auto InferenceEngine::Metrics::METRIC_FULL_DEVICE_NAME = "FULL_DEVICE_NAME"
 Metric to get a std::string value representing a full device name. String value is "FULL_DEVICE_NAME".
 
static constexpr auto InferenceEngine::Metrics::METRIC_OPTIMIZATION_CAPABILITIES = "OPTIMIZATION_CAPABILITIES"
 Metric to get a std::vector<std::string> of optimization options per device. String value is "OPTIMIZATION_CAPABILITIES". More...
 
static constexpr auto InferenceEngine::Metrics::FP32 = "FP32"
 
static constexpr auto InferenceEngine::Metrics::BF16 = "BF16"
 
static constexpr auto InferenceEngine::Metrics::FP16 = "FP16"
 
static constexpr auto InferenceEngine::Metrics::INT8 = "INT8"
 
static constexpr auto InferenceEngine::Metrics::BIN = "BIN"
 
static constexpr auto InferenceEngine::Metrics::WINOGRAD = "WINOGRAD"
 
static constexpr auto InferenceEngine::Metrics::BATCHED_BLOB = "BATCHED_BLOB"
 
static constexpr auto InferenceEngine::Metrics::METRIC_RANGE_FOR_STREAMS = "RANGE_FOR_STREAMS"
 Metric to provide information about a range for streams on platforms where streams are supported. More...
 
static constexpr auto InferenceEngine::Metrics::METRIC_RANGE_FOR_ASYNC_INFER_REQUESTS = "RANGE_FOR_ASYNC_INFER_REQUESTS"
 Metric to provide a hint for a range for number of async infer requests. If device supports streams, the metric provides range for number of IRs per stream. More...
 
static constexpr auto InferenceEngine::Metrics::METRIC_NUMBER_OF_WAITING_INFER_REQUESTS = "NUMBER_OF_WAITING_INFER_REQUESTS"
 Metric to get an unsigned int value of number of waiting infer request. More...
 
static constexpr auto InferenceEngine::Metrics::METRIC_NUMBER_OF_EXEC_INFER_REQUESTS = "NUMBER_OF_EXEC_INFER_REQUESTS"
 Metric to get an unsigned int value of number of infer request in execution stage. More...
 
static constexpr auto InferenceEngine::Metrics::METRIC_NETWORK_NAME = "NETWORK_NAME"
 Metric to get a name of network. String value is "NETWORK_NAME".
 
static constexpr auto InferenceEngine::Metrics::METRIC_DEVICE_THERMAL = "DEVICE_THERMAL"
 Metric to get a float of device thermal. String value is "DEVICE_THERMAL".
 
static constexpr auto InferenceEngine::Metrics::METRIC_OPTIMAL_NUMBER_OF_INFER_REQUESTS = "OPTIMAL_NUMBER_OF_INFER_REQUESTS"
 Metric to get an unsigned integer value of optimal number of executable network infer requests.
 
static constexpr auto InferenceEngine::PluginConfigParams::YES = "YES"
 generic boolean values
 
static constexpr auto InferenceEngine::PluginConfigParams::NO = "NO"
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_CPU_THREADS_NUM = "CPU_THREADS_NUM"
 Limit #threads that are used by Inference Engine for inference on the CPU.
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_CPU_BIND_THREAD = "CPU_BIND_THREAD"
 The name for setting CPU affinity per thread option. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::NUMA = "NUMA"
 
static constexpr auto InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_NUMA = "CPU_THROUGHPUT_NUMA"
 Optimize CPU execution to maximize throughput. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO = "CPU_THROUGHPUT_AUTO"
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS = "CPU_THROUGHPUT_STREAMS"
 
static constexpr auto InferenceEngine::PluginConfigParams::GPU_THROUGHPUT_AUTO = "GPU_THROUGHPUT_AUTO"
 Optimize GPU plugin execution to maximize throughput. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS = "GPU_THROUGHPUT_STREAMS"
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_PERF_COUNT = "PERF_COUNT"
 The name for setting performance counters option. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_LIMIT = "DYN_BATCH_LIMIT"
 The key defines dynamic limit of batch processing. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED = "DYN_BATCH_ENABLED"
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_DUMP_QUANTIZED_GRAPH_AS_DOT = "DUMP_QUANTIZED_GRAPH_AS_DOT"
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_DUMP_QUANTIZED_GRAPH_AS_IR = "DUMP_QUANTIZED_GRAPH_AS_IR"
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_SINGLE_THREAD = "SINGLE_THREAD"
 The key controls threading inside Inference Engine. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_CONFIG_FILE = "CONFIG_FILE"
 This key directs the plugin to load a configuration file. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_DUMP_KERNELS = "DUMP_KERNELS"
 This key enables dumping of the kernels used by the plugin for custom layers. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_TUNING_MODE = "TUNING_MODE"
 This key controls performance tuning done or used by the plugin. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::TUNING_CREATE = "TUNING_CREATE"
 
static constexpr auto InferenceEngine::PluginConfigParams::TUNING_USE_EXISTING = "TUNING_USE_EXISTING"
 
static constexpr auto InferenceEngine::PluginConfigParams::TUNING_DISABLED = "TUNING_DISABLED"
 
static constexpr auto InferenceEngine::PluginConfigParams::TUNING_UPDATE = "TUNING_UPDATE"
 
static constexpr auto InferenceEngine::PluginConfigParams::TUNING_RETUNE = "TUNING_RETUNE"
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_TUNING_FILE = "TUNING_FILE"
 This key defines the tuning data filename to be created/used.
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_LOG_LEVEL = "LOG_LEVEL"
 the key for setting desirable log level. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::LOG_NONE = "LOG_NONE"
 
static constexpr auto InferenceEngine::PluginConfigParams::LOG_ERROR = "LOG_ERROR"
 
static constexpr auto InferenceEngine::PluginConfigParams::LOG_WARNING = "LOG_WARNING"
 
static constexpr auto InferenceEngine::PluginConfigParams::LOG_INFO = "LOG_INFO"
 
static constexpr auto InferenceEngine::PluginConfigParams::LOG_DEBUG = "LOG_DEBUG"
 
static constexpr auto InferenceEngine::PluginConfigParams::LOG_TRACE = "LOG_TRACE"
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_DEVICE_ID = "DEVICE_ID"
 the key for setting of required device to execute on values: device id starts from "0" - first device, "1" - second device, etc
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_EXCLUSIVE_ASYNC_REQUESTS = "EXCLUSIVE_ASYNC_REQUESTS"
 the key for enabling exclusive mode for async requests of different executable networks and the same plugin. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_DUMP_EXEC_GRAPH_AS_DOT = "DUMP_EXEC_GRAPH_AS_DOT"
 This key enables dumping of the internal primitive graph. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_ENFORCE_BF16 = "ENFORCE_BF16"
 The name for setting to execute in bfloat16 precision whenever it is possible. More...
 
static constexpr auto InferenceEngine::PluginConfigParams::KEY_CACHE_DIR = "CACHE_DIR"
 This key defines the directory which will be used to store any data cached by plugins. More...
 

Detailed Description

A header for advanced hardware related properties for IE plugins To use in SetConfig, LoadNetwork, ImportNetwork methods of plugins.