ie_iexecutable_network.hpp
Go to the documentation of this file.
1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief a header file for IExecutableNetwork interface
7  *
8  * @file ie_iexecutable_network.hpp
9  */
10 #pragma once
11 
12 #include <map>
13 #include <memory>
14 #include <string>
15 #include <vector>
16 
17 #include "ie_common.h"
18 #include "ie_icnn_network.hpp"
19 #include "ie_iinfer_request.hpp"
20 #include "ie_imemory_state.hpp"
21 #include "ie_input_info.hpp"
22 #include "ie_parameter.hpp"
23 #include "ie_remote_context.hpp"
24 
25 namespace InferenceEngine {
26 
27 /**
28  * @brief A collection that contains string as key, and const Data smart pointer as value
29  */
30 using ConstOutputsDataMap = std::map<std::string, CDataPtr>;
31 
32 /**
33  * @brief This is an interface of an executable network
34  */
35 class IExecutableNetwork : public details::IRelease {
36 public:
37  /**
38  * @brief A smart pointer to the current IExecutableNetwork object
39  */
40  using Ptr = std::shared_ptr<IExecutableNetwork>;
41 
42  /**
43  * @brief Gets the Executable network output Data node information.
44  *
45  * The received info is stored in the given InferenceEngine::ConstOutputsDataMap node.
46  * This method need to be called to find output names for using them later
47  * when calling InferenceEngine::InferRequest::GetBlob or InferenceEngine::InferRequest::SetBlob
48  *
49  * @param out Reference to the InferenceEngine::ConstOutputsDataMap object
50  * @param resp Optional: pointer to an already allocated object to contain information in case of failure
51  * @return Status code of the operation: InferenceEngine::OK (0) for success
52  */
53  virtual StatusCode GetOutputsInfo(ConstOutputsDataMap& out, ResponseDesc* resp) const noexcept = 0;
54 
55  /**
56  * @brief Gets the executable network input Data node information.
57  *
58  * The received info is stored in the given InferenceEngine::ConstInputsDataMap object.
59  * This method need to be called to find out input names for using them later
60  * when calling InferenceEngine::InferRequest::SetBlob
61  *
62  * @param inputs Reference to InferenceEngine::ConstInputsDataMap object.
63  * @param resp Optional: pointer to an already allocated object to contain information in case of failure
64  * @return Status code of the operation: InferenceEngine::OK (0) for success
65  */
66  virtual StatusCode GetInputsInfo(ConstInputsDataMap& inputs, ResponseDesc* resp) const noexcept = 0;
67 
68  /**
69  * @brief Creates an inference request object used to infer the network.
70  *
71  * The created request has allocated input and output blobs (that can be changed later).
72  *
73  * @param req Shared pointer to the created request object
74  * @param resp Optional: pointer to an already allocated object to contain information in case of failure
75  * @return Status code of the operation: InferenceEngine::OK (0) for success
76  */
77  virtual StatusCode CreateInferRequest(IInferRequest::Ptr& req, ResponseDesc* resp) noexcept = 0;
78 
79  /**
80  * @brief Exports the current executable network.
81  *
82  * @see Core::ImportNetwork
83  *
84  * @param modelFileName Full path to the location of the exported file
85  * @param resp Optional: pointer to an already allocated object to contain information in case of failure
86  * @return Status code of the operation: InferenceEngine::OK (0) for success
87  */
88  virtual StatusCode Export(const std::string& modelFileName, ResponseDesc* resp) noexcept = 0;
89 
90  /**
91  * @brief Exports the current executable network.
92  *
93  * @see Core::ImportNetwork
94  *
95  * @param networkModel Network model output stream
96  * @param resp Optional: pointer to an already allocated object to contain information in case of failure
97  * @return Status code of the operation: InferenceEngine::OK (0) for success
98  */
99  virtual StatusCode Export(std::ostream& networkModel, ResponseDesc* resp) noexcept = 0;
100 
101  IE_SUPPRESS_DEPRECATED_START
102  /**
103  * @deprecated Use InferenceEngine::ExecutableNetwork::GetExecGraphInfo instead
104  * @brief Get executable graph information from a device
105  *
106  * @param graphPtr network ptr to store executable graph information
107  * @param resp Optional: pointer to an already allocated object to contain information in case of failure
108  * @return Status code of the operation: InferenceEngine::OK (0) for success
109  */
110  // INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::ExecutableNetwork::GetExecGraphInfo instead")
111  virtual StatusCode GetExecGraphInfo(ICNNNetwork::Ptr& graphPtr, ResponseDesc* resp) noexcept = 0;
112  IE_SUPPRESS_DEPRECATED_END
113 
114  IE_SUPPRESS_DEPRECATED_START
115  /**
116  * @deprecated Use InferRequest::QueryState instead
117  * @brief Gets state control interface for given executable network.
118  *
119  * State control essential for recurrent networks
120  *
121  * @param pState reference to a pointer that receives internal states
122  * @param idx requested index for receiving memory state
123  * @param resp Optional: pointer to an already allocated object to contain information in case of failure
124  * @return Status code of the operation: InferenceEngine::OK (0) for success, OUT_OF_BOUNDS (-6) no memory state for
125  * given index
126  */
127  INFERENCE_ENGINE_DEPRECATED("Use InferRequest::QueryState instead")
128  virtual StatusCode QueryState(IVariableState::Ptr& pState, size_t idx, ResponseDesc* resp) noexcept = 0;
129  IE_SUPPRESS_DEPRECATED_END
130 
131  /**
132  * @brief Sets configuration for current executable network
133  *
134  * @param config Map of pairs: (config parameter name, config parameter value)
135  * @param resp Pointer to the response message that holds a description of an error if any occurred
136  * @return code of the operation. InferenceEngine::OK if succeeded
137  */
138  virtual StatusCode SetConfig(const std::map<std::string, Parameter>& config, ResponseDesc* resp) noexcept = 0;
139 
140  /** @brief Gets configuration for current executable network.
141  *
142  * The method is responsible to extract information
143  * which affects executable network execution. The list of supported configuration values can be extracted via
144  * ExecutableNetwork::GetMetric with the SUPPORTED_CONFIG_KEYS key, but some of these keys cannot be changed
145  * dymanically, e.g. DEVICE_ID cannot changed if an executable network has already been compiled for particular
146  * device.
147  *
148  * @param name config key, can be found in ie_plugin_config.hpp
149  * @param result value of config corresponding to config key
150  * @param resp Pointer to the response message that holds a description of an error if any occurred
151  * @return code of the operation. InferenceEngine::OK if succeeded
152  */
153  virtual StatusCode GetConfig(const std::string& name, Parameter& result, ResponseDesc* resp) const noexcept = 0;
154 
155  /**
156  * @brief Gets general runtime metric for an executable network.
157  *
158  * It can be network name, actual device ID on
159  * which executable network is running or all other properties which cannot be changed dynamically.
160  *
161  * @param name metric name to request
162  * @param result metric value corresponding to metric key
163  * @param resp Pointer to the response message that holds a description of an error if any occurred
164  * @return code of the operation. InferenceEngine::OK if succeeded
165  */
166  virtual StatusCode GetMetric(const std::string& name, Parameter& result, ResponseDesc* resp) const noexcept = 0;
167 
168  /**
169  * @brief Gets shared context used to create an executable network.
170  *
171  * @param pContext Reference to a pointer that will receive resulting shared context object ptr
172  * @param resp Pointer to the response message that holds a description of an error if any occurred
173  * @return code of the operation. InferenceEngine::OK if succeeded
174  */
175  virtual StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept = 0;
176 };
177 
178 } // namespace InferenceEngine
std::shared_ptr< ICNNNetwork > Ptr
A shared pointer to a ICNNNetwork interface.
Definition: ie_icnn_network.hpp:53
This is an interface of an executable network.
Definition: ie_iexecutable_network.hpp:35
virtual StatusCode CreateInferRequest(IInferRequest::Ptr &req, ResponseDesc *resp) noexcept=0
Creates an inference request object used to infer the network.
virtual StatusCode QueryState(IVariableState::Ptr &pState, size_t idx, ResponseDesc *resp) noexcept=0
Gets state control interface for given executable network.
virtual StatusCode Export(std::ostream &networkModel, ResponseDesc *resp) noexcept=0
Exports the current executable network.
virtual StatusCode GetOutputsInfo(ConstOutputsDataMap &out, ResponseDesc *resp) const noexcept=0
Gets the Executable network output Data node information.
virtual StatusCode GetContext(RemoteContext::Ptr &pContext, ResponseDesc *resp) const noexcept=0
Gets shared context used to create an executable network.
virtual StatusCode GetMetric(const std::string &name, Parameter &result, ResponseDesc *resp) const noexcept=0
Gets general runtime metric for an executable network.
virtual StatusCode GetExecGraphInfo(ICNNNetwork::Ptr &graphPtr, ResponseDesc *resp) noexcept=0
Get executable graph information from a device.
virtual StatusCode GetInputsInfo(ConstInputsDataMap &inputs, ResponseDesc *resp) const noexcept=0
Gets the executable network input Data node information.
virtual StatusCode SetConfig(const std::map< std::string, Parameter > &config, ResponseDesc *resp) noexcept=0
Sets configuration for current executable network.
std::shared_ptr< IExecutableNetwork > Ptr
A smart pointer to the current IExecutableNetwork object.
Definition: ie_iexecutable_network.hpp:40
virtual StatusCode Export(const std::string &modelFileName, ResponseDesc *resp) noexcept=0
Exports the current executable network.
virtual StatusCode GetConfig(const std::string &name, Parameter &result, ResponseDesc *resp) const noexcept=0
Gets configuration for current executable network.
std::shared_ptr< IInferRequest > Ptr
A shared pointer to the IInferRequest object.
Definition: ie_iinfer_request.hpp:44
Manages data for reset operations.
Definition: ie_imemory_state.hpp:26
This class represents an object to work with different parameters.
Definition: ie_parameter.hpp:37
This class represents an Inference Engine abstraction for remote (non-CPU) accelerator device-specifi...
Definition: ie_remote_context.hpp:94
This is a header file with common inference engine definitions.
This is a header file for the ICNNNetwork class.
a header file for IInferRequest interface
a header file for IVariableState interface
a header file for InputInfo class
A header file for the Parameter class.
This is a header file for the IE RemoteContext and RemoteBlob classes.
Inference Engine C++ API.
Definition: cldnn_config.hpp:15
std::map< std::string, CDataPtr > ConstOutputsDataMap
A collection that contains string as key, and const Data smart pointer as value.
Definition: ie_iexecutable_network.hpp:30
StatusCode
This enum contains codes for all possible return values of the interface functions.
Definition: ie_common.h:224
std::map< std::string, InputInfo::CPtr > ConstInputsDataMap
A collection that contains string as key, and const InputInfo smart pointer as value.
Definition: ie_input_info.hpp:170
Represents detailed information for an error.
Definition: ie_common.h:248