ie_icore.hpp
Go to the documentation of this file.
1 // Copyright (C) 2018-2021 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief A header file for ICore interface
7  * @file ie_icore.hpp
8  */
9 
10 #pragma once
11 
12 #include <array>
13 #include <memory>
14 #include <string>
15 
16 #include <ie_parameter.hpp>
17 #include <cpp/ie_cnn_network.h>
18 #include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp"
19 
21 
22 namespace InferenceEngine {
23 
24 /**
25  * @interface ICore
26  * @brief Minimal ICore interface to allow plugin to get information from Core Inference Engine class.
27  * @ingroup ie_dev_api_plugin_api
28  */
29 class ICore {
30 public:
31  /**
32  * @brief Returns global to Inference Engine class task executor
33  * @return Reference to task executor
34  */
35  virtual std::shared_ptr<ITaskExecutor> GetTaskExecutor() const = 0;
36 
37  /**
38  * @brief Reads IR xml and bin (with the same name) files
39  * @param model string with IR
40  * @param weights shared pointer to constant blob with weights
41  * @return CNNNetwork
42  */
43  virtual CNNNetwork ReadNetwork(const std::string& model, const Blob::CPtr& weights) const = 0;
44 
45  /**
46  * @brief Reads IR xml and bin files
47  * @param modelPath path to IR file
48  * @param binPath path to bin file, if path is empty, will try to read bin file with the same name as xml and
49  * if bin file with the same name was not found, will load IR without weights.
50  * @return CNNNetwork
51  */
52  virtual CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath) const = 0;
53 
54  /**
55  * @brief Creates an executable network from a network object.
56  *
57  * Users can create as many networks as they need and use
58  * them simultaneously (up to the limitation of the hardware resources)
59  *
60  * @param network CNNNetwork object acquired from Core::ReadNetwork
61  * @param deviceName Name of device to load network to
62  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
63  * operation
64  * @return An executable network reference
65  */
67  const std::string& deviceName,
68  const std::map<std::string, std::string>& config = {}) = 0;
69 
70  /**
71  * @brief Creates an executable network from a model file.
72  *
73  * Users can create as many networks as they need and use
74  * them simultaneously (up to the limitation of the hardware resources)
75  *
76  * @param modelPath Path to model
77  * @param deviceName Name of device to load network to
78  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
79  * operation
80  * @return An executable network reference
81  */
82  virtual SoExecutableNetworkInternal LoadNetwork(const std::string& modelPath,
83  const std::string& deviceName,
84  const std::map<std::string, std::string>& config) = 0;
85 
86  /**
87  * @brief Creates an executable network from a previously exported network
88  * @param networkModel network model stream
89  * @param deviceName Name of device load executable network on
90  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
91  * operation*
92  * @return An executable network reference
93  */
94  virtual SoExecutableNetworkInternal ImportNetwork(std::istream& networkModel,
95  const std::string& deviceName = {},
96  const std::map<std::string, std::string>& config = {}) = 0;
97 
98  /**
99  * @brief Query device if it supports specified network with specified configuration
100  *
101  * @param deviceName A name of a device to query
102  * @param network Network object to query
103  * @param config Optional map of pairs: (config parameter name, config parameter value)
104  * @return An object containing a map of pairs a layer name -> a device name supporting this layer.
105  */
106  virtual QueryNetworkResult QueryNetwork(const CNNNetwork& network, const std::string& deviceName,
107  const std::map<std::string, std::string>& config) const = 0;
108 
109  /**
110  * @brief Gets general runtime metric for dedicated hardware.
111  *
112  * The method is needed to request common device properties
113  * which are executable network agnostic. It can be device name, temperature, other devices-specific values.
114  *
115  * @param deviceName - A name of a device to get a metric value.
116  * @param name - metric name to request.
117  * @return Metric value corresponding to metric key.
118  */
119  virtual Parameter GetMetric(const std::string& deviceName, const std::string& name) const = 0;
120 
121  /**
122  * @brief Returns devices available for neural networks inference
123  *
124  * @return A vector of devices. The devices are returned as { CPU, FPGA.0, FPGA.1, MYRIAD }
125  * If there more than one device of specific type, they are enumerated with .# suffix.
126  */
127  virtual std::vector<std::string> GetAvailableDevices() const = 0;
128 
129  /**
130  * @brief Checks whether device supports Export & Import functionality of network
131  *
132  * @param deviceName - A name of a device to get a metric value.
133  * @return True if device has IMPORT_EXPORT_SUPPORT metric in SUPPORTED_METRICS and
134  * this metric returns 'true', False otherwise.
135  */
136  virtual bool DeviceSupportsImportExport(const std::string& deviceName) const = 0;
137 
138  /**
139  * @brief Default virtual destructor
140  */
141  virtual ~ICore() = default;
142 };
143 
144 /**
145  * @private
146  */
147 class INFERENCE_ENGINE_API_CLASS(DeviceIDParser) {
148  std::string deviceName;
149  std::string deviceID;
150 public:
151  explicit DeviceIDParser(const std::string& deviceNameWithID);
152 
153  std::string getDeviceID() const;
154  std::string getDeviceName() const;
155 
156  static std::vector<std::string> getHeteroDevices(std::string fallbackDevice);
157  static std::vector<std::string> getMultiDevices(std::string devicesList);
158 };
159 
160 } // namespace InferenceEngine
std::shared_ptr< const Blob > CPtr
Minimal ICore interface to allow plugin to get information from Core Inference Engine class.
Definition: ie_icore.hpp:29
virtual SoExecutableNetworkInternal ImportNetwork(std::istream &networkModel, const std::string &deviceName={}, const std::map< std::string, std::string > &config={})=0
Creates an executable network from a previously exported network.
virtual bool DeviceSupportsImportExport(const std::string &deviceName) const =0
Checks whether device supports Export & Import functionality of network.
virtual Parameter GetMetric(const std::string &deviceName, const std::string &name) const =0
Gets general runtime metric for dedicated hardware.
virtual SoExecutableNetworkInternal LoadNetwork(const CNNNetwork &network, const std::string &deviceName, const std::map< std::string, std::string > &config={})=0
Creates an executable network from a network object.
virtual std::vector< std::string > GetAvailableDevices() const =0
Returns devices available for neural networks inference.
virtual std::shared_ptr< ITaskExecutor > GetTaskExecutor() const =0
Returns global to Inference Engine class task executor.
virtual ~ICore()=default
Default virtual destructor.
virtual SoExecutableNetworkInternal LoadNetwork(const std::string &modelPath, const std::string &deviceName, const std::map< std::string, std::string > &config)=0
Creates an executable network from a model file.
virtual QueryNetworkResult QueryNetwork(const CNNNetwork &network, const std::string &deviceName, const std::map< std::string, std::string > &config) const =0
Query device if it supports specified network with specified configuration.
virtual CNNNetwork ReadNetwork(const std::string &model, const Blob::CPtr &weights) const =0
Reads IR xml and bin (with the same name) files.
virtual CNNNetwork ReadNetwork(const std::string &modelPath, const std::string &binPath) const =0
Reads IR xml and bin files.
A header file for Inference Engine Task Executor Interface.
Inference Engine Plugin API namespace.
details::SOPointer< IExecutableNetworkInternal > SoExecutableNetworkInternal
SOPointer to IExecutableNetworkInternal.
Definition: ie_iexecutable_network_internal.hpp:156