ie_core.hpp
Go to the documentation of this file.
1 // Copyright (C) 2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief This is a header file for the Inference Engine Core class C++ API
7  * @file ie_core.hpp
8  */
9 #pragma once
10 
11 #include <string>
12 #include <memory>
13 #include <map>
14 #include <vector>
15 
16 #include "cpp/ie_plugin_cpp.hpp"
17 #include "ie_extension.h"
18 
19 namespace InferenceEngine {
20 
21 /**
22  * @brief This class represents Inference Engine Core entity.
23  * It can throw exceptions safely for the application, where it is properly handled.
24  */
25 class INFERENCE_ENGINE_API_CLASS(Core) {
26  class Impl;
27  std::shared_ptr<Impl> _impl;
28 public:
29  /** @brief Constructs Inference Engine Core instance using XML configuration file with
30  * plugins description. See RegisterPlugins for more details.
31  * @param xmlConfigFile A path to .xml file with plugins to load from. If XML configuration file is not specified,
32  * then default Inference Engine plugins are loaded from the default plugin.xml file.
33  */
34  explicit Core(const std::string & xmlConfigFile = std::string());
35 
36  /**
37  * @brief Returns plugins version information
38  * @param deviceName Device name to indentify plugin
39  * @return A vector of versions
40  */
41  std::map<std::string, Version> GetVersions(const std::string & deviceName) const;
42 
43  /**
44  * @brief Sets logging callback
45  * Logging is used to track what is going on inside the plugins, Inference Engine library
46  * @param listener Logging sink
47  */
48  void SetLogCallback(IErrorListener &listener) const;
49 
50  /**
51  * @brief Creates an executable network from a network object. Users can create as many networks as they need and use
52  * them simultaneously (up to the limitation of the hardware resources)
53  * @param network CNNNetwork object acquired from CNNNetReader
54  * @param deviceName Name of device to load network to
55  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation
56  * @return An executable network reference
57  */
58  ExecutableNetwork LoadNetwork(CNNNetwork network, const std::string & deviceName,
59  const std::map<std::string, std::string> & config = std::map<std::string, std::string>());
60 
61  /**
62  * @brief Registers extension for the specified plugin
63  * @param deviceName Device name to indentify plugin to add an extension in
64  * @param extension Pointer to already loaded extension
65  */
66  void AddExtension(IExtensionPtr extension, const std::string & deviceName);
67 
68  /**
69  * @brief Creates an executable network from a previously exported network
70  * @param deviceName Name of device load executable network on
71  * @param modelFileName Path to the location of the exported file
72  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load operation*
73  * @return An executable network reference
74  */
75  ExecutableNetwork ImportNetwork(const std::string &modelFileName, const std::string & deviceName,
76  const std::map<std::string, std::string> &config = std::map<std::string, std::string>());
77 
78  /**
79  * @brief Query device if it supports specified network with specified configuration
80  * @param deviceName A name of a device to query
81  * @param network Network object to query
82  * @param config Optional map of pairs: (config parameter name, config parameter value)
83  * @return Pointer to the response message that holds a description of an error if any occurred
84  */
85  QueryNetworkResult QueryNetwork(const ICNNNetwork &network, const std::string & deviceName,
86  const std::map<std::string, std::string> & config = std::map<std::string, std::string>()) const;
87 
88  /**
89  * @brief Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp
90  * @param deviceName An optinal name of a device. If device name is not specified, the config is set for all the registered devices.
91  * @param config Map of pairs: (config parameter name, config parameter value)
92  */
93  void SetConfig(const std::map<std::string, std::string> &config, const std::string & deviceName = std::string());
94 
95  /**
96  * @brief Gets configuration dedicated to device behaviour. The method is targeted to extract information
97  * which can be set via SetConfig method.
98  * @param deviceName - A name of a device to get a configuration value.
99  * @param name - value of config corresponding to config key.
100  * @return Value of config corresponding to config key.
101  */
102  Parameter GetConfig(const std::string & deviceName, const std::string & name) const;
103 
104  /**
105  * @brief Gets general runtime metric for dedicated hardware. The method is needed to request common device properties
106  * which are executable network agnostic. It can be device name, temperature, other devices-specific values.
107  * @param deviceName - A name of a device to get a metric value.
108  * @param name - metric name to request.
109  * @return Metric value corresponding to metric key.
110  */
111  Parameter GetMetric(const std::string & deviceName, const std::string & name) const;
112 
113  /**
114  * @brief Returns devices available for neural networks inference
115  * @return A vector of devices. The devices are returned as { CPU, FPGA.0, FPGA.1, MYRIAD }
116  If there more than one device of specific type, they are enumerated with .# suffix.
117  */
118  std::vector<std::string> GetAvailableDevices() const;
119 
120  /**
121  * @brief Register new device and plugin which implement this device inside Inference Engine.
122  * @param pluginName A name of plugin. Depending on platform pluginName is wrapped with shared library suffix and prefix to identify library full name
123  * @param deviceName A device name to register plugin for. If device name is not specified, then it's taken from plugin
124  * using InferenceEnginePluginPtr::GetName function
125  */
126  void RegisterPlugin(const std::string & pluginName, const std::string & deviceName);
127 
128  /**
129  * @brief Removes plugin with specified name from Inference Engine
130  * @param deviceName Device name identifying plugin to remove from Inference Engine
131  */
132  void UnregisterPlugin(const std::string & deviceName);
133 
134  /** @brief Registers plugin to Inference Engine Core instance using XML configuration file with
135  * plugins description. XML file has the following structure:
136  *
137  * ```xml
138  * <ie>
139  * <plugins>
140  * <plugin name="" location="">
141  * <extensions>
142  * <extension location=""/>
143  * </extensions>
144  * <properties>
145  * <property key="" value=""/>
146  * </properties>
147  * </plugin>
148  * </plugins>
149  * </ie>
150  * ```
151  *
152  * - `name` identifies name of device enabled by plugin
153  * - `location` specifies absolute path to dynamic library with plugin. A path can also be relative to inference engine shared library.
154  * It allows to have common config for different systems with different configurations.
155  * - Properties are set to plugin via the `SetConfig` method.
156  * - Extensions are set to plugin via the `AddExtension` method.
157  * @param xmlConfigFile A path to .xml file with plugins to register.
158  */
159  void RegisterPlugins(const std::string & xmlConfigFile);
160 };
161 } // namespace InferenceEngine
This class represents Inference Engine Core entity. It can throw exceptions safely for the applicatio...
Definition: ie_core.hpp:25
Definition: ie_argmax_layer.hpp:11
This is the main interface to describe the NN topology.
Definition: ie_icnn_network.hpp:35
wrapper over IExecutableNetwork
Definition: ie_executable_network.hpp:28
This class contains all the information about the Neural Network and the related binary information...
Definition: ie_cnn_network.h:29
This class represents an object to work with different parameters.
Definition: ie_parameter.hpp:27
This class represents a custom error listener. Plugin consumers can provide it via InferenceEngine::S...
Definition: ie_error.hpp:16
This is a header file for the Inference Engine plugin C++ API.
Responce structure encapsulating information about supported layer.
Definition: ie_plugin.hpp:50
A header file that defines a wrapper class for handling extension instantiation and releasing resourc...