Deprecated List

File hddl_plugin_config.hpp

Use vpu/hddl_config.hpp instead.

Global InferenceEngine::Blob::element_size () const noexcept=0

Cast to MemoryBlob and use its API instead. Blob class can represent compound blob, which do not refer to the only solid memory.

Global InferenceEngine::Blob::product (const SizeVector &dims) noexcept

Cast to MemoryBlob and use its API instead.

Global InferenceEngine::CNNNetwork::CNNNetwork (std::shared_ptr< ICNNNetwork > network)

Don’t use this constructor. It will be removed soon

Global InferenceEngine::CNNNetwork::operator const ICNNNetwork & () const

InferenceEngine::ICNNNetwork interface is deprecated

Global InferenceEngine::CNNNetwork::operator ICNNNetwork & ()

InferenceEngine::ICNNNetwork interface is deprecated

Global InferenceEngine::CNNNetwork::operator ICNNNetwork::Ptr ()

InferenceEngine::ICNNNetwork interface is deprecated

Global InferenceEngine::Core::ImportNetwork (std::istream &networkModel)

Use Core::ImportNetwork with explicit device name

Global InferenceEngine::ExecutableNetwork::CreateInferRequestPtr ()

Use ExecutableNetwork::CreateInferRequest

Global InferenceEngine::ExecutableNetwork::ExecutableNetwork (std::shared_ptr< IExecutableNetwork > exec, std::shared_ptr< details::SharedObjectLoader > splg={})

This ctor will be removed in 2022.1

Global InferenceEngine::ExecutableNetwork::operator std::shared_ptr< IExecutableNetwork > ()

Will be removed. Use operator bool

Global InferenceEngine::ExecutableNetwork::QueryState ()

Use InferRequest::QueryState instead

Global InferenceEngine::ExecutableNetwork::reset (std::shared_ptr< IExecutableNetwork > newActual)

The method Will be removed

Global InferenceEngine::fileNameToString (const file_name_t &str)

Use OS-native conversion utilities

Class InferenceEngine::ICNNNetwork

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::addOutput (const std::string &layerName, size_t outputIndex=0, ResponseDesc *resp=nullptr) noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::getBatchSize () const noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::getFunction () const noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::getFunction () noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::getInput (const std::string &inputName) const noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::getInputsInfo (InputsDataMap &inputs) const noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::getName () const noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::getOutputsInfo (OutputsDataMap &out) const noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::getOVNameForTensor (std::string &ov_name, const std::string &orig_name, ResponseDesc *resp) const noexcept

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::InputShapes

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::layerCount () const noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::Ptr

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::reshape (const InputShapes &inputShapes, ResponseDesc *resp) noexcept

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::serialize (const std::string &xmlPath, const std::string &binPath, ResponseDesc *resp) const noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::ICNNNetwork::setBatchSize (size_t size, ResponseDesc *responseDesc) noexcept=0

Use InferenceEngine::CNNNetwork wrapper instead

Global InferenceEngine::IExecutableNetwork::GetExecGraphInfo (ICNNNetwork::Ptr &graphPtr, ResponseDesc *resp) noexcept=0

Use InferenceEngine::ExecutableNetwork::GetExecGraphInfo instead

Global InferenceEngine::IExecutableNetwork::QueryState (IVariableState::Ptr &pState, size_t idx, ResponseDesc *resp) noexcept=0

Use InferRequest::QueryState instead

Global InferenceEngine::IExecutableNetworkInternal::Export (const std::string &modelFileName)

Use IExecutableNetworkInternal::Export(std::ostream& networkModel)

Global InferenceEngine::IExecutableNetworkInternal::QueryState ()

Need to implement GetVariablesInfo for ExecutableNetwork

Global InferenceEngine::IInferencePlugin::ImportNetwork (const std::string &modelFileName, const std::map< std::string, std::string > &config)

Use ImportNetwork(std::istream& networkModel, const std::map<std::string, std::string>& config)

Class InferenceEngine::IInferRequest

Use InferenceEngine::InferRequest C++ wrapper

Global InferenceEngine::InferRequest::InferRequest (IInferRequest::Ptr request, std::shared_ptr< details::SharedObjectLoader > splg={})

This ctor will be removed in 2022.1

Class InferenceEngine::IVariableState

Use InferenceEngine::VariableState C++ wrapper instead

Global InferenceEngine::IVariableStateInternal::GetLastState () const

Use IVariableStateInternal::GetState method instead

Global InferenceEngine::LowLatency (InferenceEngine::CNNNetwork &network)

This transformation will be removed in 2023.1.

Global InferenceEngine::Parameter::asVariant () const

Use ngraph::Variant directly

Global InferenceEngine::Parameter::operator std::shared_ptr< ngraph::Variant > () const

Use ngraph::Variant directly

Global InferenceEngine::Parameter::Parameter (std::shared_ptr< ngraph::Variant > &var)

Use ngraph::Variant directly

Global InferenceEngine::Parameter::Parameter (const std::shared_ptr< ngraph::Variant > &var)

Use ngraph::Variant directly

Global InferenceEngine::PluginConfigParams::KEY_DUMP_EXEC_GRAPH_AS_DOT

Use InferenceEngine::ExecutableNetwork::GetExecGraphInfo::serialize method

Global InferenceEngine::stringToFileName (const std::string &str)

Use OS-native conversion utilities

Global InferenceEngine::VariableState::GetLastState () const

Use IVariableState::SetState instead

Global InferenceEngine::VariableState::VariableState (std::shared_ptr< IVariableState > pState, std::shared_ptr< details::SharedObjectLoader > plg={})

This ctor will be removed in 2022.1

Class InferenceEngine::Version::ApiVersion

Use IE_VERSION_[MAJOR|MINOR|PATCH] definitions, buildNumber property

File myriad_plugin_config.hpp

Use vpu/myriad_config.hpp instead.

File vpu_plugin_config.hpp

Use vpu/myriad_config.hpp or vpu/hddl_config.hpp instead.