Deprecated List
Global InferenceEngine::Blob::Blob (Precision p)
Please use TensorDesc for Blob initialization
Global InferenceEngine::Blob::Blob (Precision p, Layout l)
Please use TensorDesc for Blob initialization
Global InferenceEngine::Blob::Blob (Precision p, const SizeVector &dims)
Please use TensorDesc for Blob initialization
Global InferenceEngine::Blob::Blob (Precision p, Layout l, const SizeVector &dims)
Please use TensorDesc for Blob initialization
Global InferenceEngine::Blob::dims () const noexcept
Please use TensorDesc for working with dimensions.
Global InferenceEngine::Blob::layout () const noexcept
Please use TensorDesc to get the current layout
Global InferenceEngine::Blob::precision () const noexcept
Please use TensorDesc to get the precision
Global InferenceEngine::Blob::Reshape (const SizeVector &dims, Layout layout=Layout::ANY) noexcept
It works with reversed dimensions. Please use TensorDescriptor.reshape().
Global InferenceEngine::Blob::Resize (const SizeVector &dims, Layout layout=Layout::ANY) noexcept
It works with reversed dimensions. Please create a new blob if you want to change a size.
Global InferenceEngine::Blob::type () const noexcept
Please use TensorDesc to get the precision
Global InferenceEngine::CNNLayer::GetParamsAsBool (const char *param, bool def) const
Use GetParamAsBool function for that functionality
Global InferenceEngine::CNNNetwork::CNNNetwork (ICNNNetwork *actual)
use shared_pointers based version of CNNNetworks constructor
Global InferenceEngine::ConvertLayout (Layout sourceLayout, Layout destLayout, const T *sourceBuffer, T *destBuffer, SizeVector dims)
Please use TensorDescriptors for conversion
Global InferenceEngine::Data::creatorLayer
Deprecated. Please use getCreatorLayer()
Global InferenceEngine::Data::dims
Deprecated. Please use getDims()
Global InferenceEngine::Data::inputTo
Deprecated. Please use getInputTo()
Global InferenceEngine::Data::layout
Deprecated. Please use getFormat()
Global InferenceEngine::Data::name
Deprecated. Please use getName()
Global InferenceEngine::Data::precision
Deprecated. Please use getPrecision()
Global InferenceEngine::Data::setBatchSize (size_t batch_size)
Global InferenceEngine::Data::userObject
Deprecated. Please use getUserObject()
Global InferenceEngine::ICNNNetwork::setBatchSize (const size_t size) noexcept=0
use setBatchSize with ResponseDesc to get error message
Global InferenceEngine::IInferencePlugin::GetPerformanceCounts (std::map< std::string, InferenceEngineProfileInfo > &perfMap, ResponseDesc *resp) const noexcept=0
Uses IInferRequest to get performance measures
Global InferenceEngine::IInferencePlugin::Infer (const Blob &input, Blob &result, ResponseDesc *resp) noexcept=0
Uses Infer() working with multiple inputs and outputs
Global InferenceEngine::IInferencePlugin::Infer (const BlobMap &input, BlobMap &result, ResponseDesc *resp) noexcept=0
Loads IExecutableNetwork to create IInferRequest.
Global InferenceEngine::IInferencePlugin::LoadNetwork (ICNNNetwork &network, ResponseDesc *resp) noexcept=0
use LoadNetwork with four parameters (executable network, cnn network, config, response)
Global InferenceEngine::ILayerImplFactory::getShapes (const std::vector< TensorDesc > &, std::vector< TensorDesc > &, ResponseDesc *) noexcept
Global InferenceEngine::InferencePlugin::Infer (const BlobMap &input, BlobMap &result)
Loads IExecutableNetwork to create IInferRequest.
Global InferenceEngine::InputInfo::getDims () const
Please use getTensorDesc for working with layouts and dimensions
Global InferenceEngine::InputInfo::getInputPrecision () const
it will be removed from public API. Please use getPrecision()
Global InferenceEngine::InputInfo::setInputPrecision (Precision p)
it will be removed from public API. Please use setPrecision()
Global InferenceEngine::IShapeInferImpl::inferShapes (const std::vector< SizeVector > &, const std::map< std::string, std::string > &, const std::map< std::string, Blob::Ptr > &, std::vector< SizeVector > &, ResponseDesc *) noexcept
Class InferenceEngine::LayoutOffsetCounter
Uses TensorDesc working with layouts
Global InferenceEngine::make_shared_blob (Precision p, const SizeVector &dims)
Use TensorDesc to create Blob::Ptr
Global InferenceEngine::make_shared_blob (Precision p, Layout l, SizeVector dims, const std::vector< TypeTo > &arg)
Use TensorDesc in order to create Blob::Ptr.
Global InferenceEngine::make_shared_blob (TBlob< TypeTo > &&arg)
Use TensorDesc in order to create Blob::Ptr.
Global InferenceEngine::make_shared_blob (Precision p, Layout l, const SizeVector &dims)
Use TensorDesc to create Blob::Ptr.
Global InferenceEngine::make_shared_blob (Precision p, Layout l=NCHW)
Use TensorDesc in order to create Blob::Ptr.
Global InferenceEngine::make_shared_blob (Precision p, Layout l, const std::vector< TypeTo > &arg)
Use TensorDesc in order to create Blob::Ptr.
Global InferenceEngine::make_shared_blob (Precision p, const TArg &arg)
Use TensorDesc in order to create Blob::Ptr
Global InferenceEngine::make_shared_blob (Precision p, Layout l, const TArg &arg)
Use TensorDesc to create Blob::Ptr
Global InferenceEngine::make_shared_blob (Precision p, const std::vector< TypeTo > &arg)
Use TensorDesc in order to create Blob::Ptr.
Global InferenceEngine::make_shared_blob (Precision p, const SizeVector &dims, TypeTo *ptr, size_t size=0)
Use TensorDesc in order to create Blob::Ptr.
Global InferenceEngine::make_shared_blob (Precision p, Layout l, const SizeVector &dims, TypeTo *ptr, size_t size=0)
Use TensorDesc in order to create Blob::Ptr.
Global InferenceEngine::NormLayer::_k
Global InferenceEngine::Precision::size () const
: size of precision will be reported in bits in future releases
Global InferenceEngine::TBlob< T, typename >::set (const std::vector< T > &that)
Deprecated to avoid memcpy() calls.
Global InferenceEngine::TBlob< T, typename >::TBlob (Precision p, Layout l, const SizeVector &dims, T *ptr, size_t data_size=0)
Please use TensorDesc for Blob initialization.
Global InferenceEngine::TBlob< T, typename >::TBlob (Precision p, Layout l)
Please use TensorDesc for Blob initialization.
Global InferenceEngine::TBlob< T, typename >::TBlob (Precision p, Layout l, const SizeVector &dims, std::shared_ptr< IAllocator > alloc)
Please use TensorDesc for Blob initialization.
Global InferenceEngine::TBlob< T, typename >::TBlob (Precision p, Layout l, const SizeVector &dims)
Please use TensorDesc for Blob initialization.
Global InferenceEngine::VPUConfigParams::DECLARE_CONFIG_KEY (VPU_INPUT_NORM)
Global InferenceEngine::VPUConfigParams::DECLARE_CONFIG_KEY (VPU_INPUT_BIAS)