18 #include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp"
67 const std::string& deviceName,
68 const std::map<std::string, std::string>& config = {}) = 0;
83 const std::string& deviceName,
84 const std::map<std::string, std::string>& config) = 0;
95 const std::string& deviceName = {},
96 const std::map<std::string, std::string>& config = {}) = 0;
107 const std::map<std::string, std::string>& config)
const = 0;
147 class INFERENCE_ENGINE_API_CLASS(DeviceIDParser) {
148 std::string deviceName;
149 std::string deviceID;
151 explicit DeviceIDParser(
const std::string& deviceNameWithID);
153 std::string getDeviceID()
const;
154 std::string getDeviceName()
const;
156 static std::vector<std::string> getHeteroDevices(std::string fallbackDevice);
157 static std::vector<std::string> getMultiDevices(std::string devicesList);
std::shared_ptr< const Blob > CPtr
Minimal ICore interface to allow plugin to get information from Core Inference Engine class.
Definition: ie_icore.hpp:29
virtual SoExecutableNetworkInternal ImportNetwork(std::istream &networkModel, const std::string &deviceName={}, const std::map< std::string, std::string > &config={})=0
Creates an executable network from a previously exported network.
virtual bool DeviceSupportsImportExport(const std::string &deviceName) const =0
Checks whether device supports Export & Import functionality of network.
virtual Parameter GetMetric(const std::string &deviceName, const std::string &name) const =0
Gets general runtime metric for dedicated hardware.
virtual SoExecutableNetworkInternal LoadNetwork(const CNNNetwork &network, const std::string &deviceName, const std::map< std::string, std::string > &config={})=0
Creates an executable network from a network object.
virtual std::vector< std::string > GetAvailableDevices() const =0
Returns devices available for neural networks inference.
virtual std::shared_ptr< ITaskExecutor > GetTaskExecutor() const =0
Returns global to Inference Engine class task executor.
virtual ~ICore()=default
Default virtual destructor.
virtual SoExecutableNetworkInternal LoadNetwork(const std::string &modelPath, const std::string &deviceName, const std::map< std::string, std::string > &config)=0
Creates an executable network from a model file.
virtual QueryNetworkResult QueryNetwork(const CNNNetwork &network, const std::string &deviceName, const std::map< std::string, std::string > &config) const =0
Query device if it supports specified network with specified configuration.
virtual CNNNetwork ReadNetwork(const std::string &model, const Blob::CPtr &weights) const =0
Reads IR xml and bin (with the same name) files.
virtual CNNNetwork ReadNetwork(const std::string &modelPath, const std::string &binPath) const =0
Reads IR xml and bin files.
A header file for Inference Engine Task Executor Interface.
Inference Engine Plugin API namespace.
details::SOPointer< IExecutableNetworkInternal > SoExecutableNetworkInternal
SOPointer to IExecutableNetworkInternal.
Definition: ie_iexecutable_network_internal.hpp:156