24.02
|
#include <Runtime.hpp>
|
Status | LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network) |
| Loads a complete network into the Runtime. More...
|
|
Status | LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage) |
| Load a complete network into the IRuntime. More...
|
|
Status | LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage, const INetworkProperties &networkProperties) |
|
armnn::TensorInfo | GetInputTensorInfo (NetworkId networkId, LayerBindingId layerId) const |
|
armnn::TensorInfo | GetOutputTensorInfo (NetworkId networkId, LayerBindingId layerId) const |
|
std::vector< ImportedInputId > | ImportInputs (NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource) |
|
std::vector< ImportedOutputId > | ImportOutputs (NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource) |
|
void | ClearImportedInputs (NetworkId networkId, const std::vector< ImportedInputId > inputIds) |
|
void | ClearImportedOutputs (NetworkId networkId, const std::vector< ImportedOutputId > outputIds) |
|
Status | EnqueueWorkload (NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={}) |
|
Status | Execute (IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputs, std::vector< ImportedOutputId > preImportedOutputs) |
| This is an experimental function. More...
|
|
Status | UnloadNetwork (NetworkId networkId) |
| Unloads a network from the Runtime. More...
|
|
const IDeviceSpec & | GetDeviceSpec () const |
|
const std::shared_ptr< IProfiler > | GetProfiler (NetworkId networkId) const |
| Gets the profiler corresponding to the given network id. More...
|
|
std::unique_ptr< IWorkingMemHandle > | CreateWorkingMemHandle (NetworkId networkId) |
| Create a new unique WorkingMemHandle object. More...
|
|
void | RegisterDebugCallback (NetworkId networkId, const DebugCallbackFunction &func) |
| Registers a callback function to debug layers performing custom computations on intermediate tensors. More...
|
|
| RuntimeImpl (const IRuntime::CreationOptions &options) |
| Creates a runtime for workload execution. More...
|
|
| ~RuntimeImpl () |
|
void | ReportStructure (arm::pipe::IProfilingService &profilingService) override |
|
void | InitialiseProfilingService (arm::pipe::IProfilingService &profilingService) override |
|
Definition at line 30 of file Runtime.hpp.
◆ RuntimeImpl()
Creates a runtime for workload execution.
Definition at line 323 of file Runtime.cpp.
324 : m_NetworkIdCounter(0)
326 m_ProfilingService = arm::pipe::IProfilingService::CreateProfilingService(
327 arm::pipe::MAX_ARMNN_COUNTER,
329 arm::pipe::ARMNN_SOFTWARE_INFO,
330 arm::pipe::ARMNN_SOFTWARE_VERSION,
331 arm::pipe::ARMNN_HARDWARE_VERSION,
338 "It is not possible to enable timeline reporting without profiling being enabled");
340 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
352 auto backend = factoryFun();
358 customAllocatorMapIterator->second ==
nullptr)
360 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
365 throw armnn::Exception(
"Allocator associated with id " +
id.Get() +
" is null");
373 BackendCapability protectedContentCapability {
"ProtectedContentAllocation",
true};
380 <<
" is not registered as does not support protected content allocation.";
388 if (customAllocatorMapIterator->second->GetMemorySourceType()
391 if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
395 <<
" reported an error when entering protected mode. Backend won't be"
396 <<
" used. ErrorMsg: " << err;
401 m_AllocatorsAddedByThisRuntime.emplace(
id);
405 ARMNN_LOG(error) <<
"The CustomAllocator provided with the runtime options doesn't support "
406 "protected memory. Protected mode can't be activated. The backend "
408 <<
" is not going to be used. MemorySource must be MemorySource::DmaBufProtected";
414 ARMNN_LOG(error) <<
"Protected mode can't be activated for backend: "
416 <<
" no custom allocator was provided to the runtime options.";
426 if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
430 <<
" reported an error when trying to use the provided custom allocator."
431 " Backend won't be used."
432 <<
" ErrorMsg: " << err;
437 m_AllocatorsAddedByThisRuntime.emplace(
id);
450 id, customMemoryOptimizerStrategyMapIterator->second);
452 ARMNN_LOG(info) <<
"MemoryOptimizerStrategy "
453 << customMemoryOptimizerStrategyMapIterator->second->GetName()
454 <<
" set for the backend " <<
id <<
".";
460 std::string memoryOptimizerStrategyName =
"";
463 if (name ==
"MemoryOptimizerStrategy")
465 memoryOptimizerStrategyName = ParseStringBackendOption(value,
"");
468 if (memoryOptimizerStrategyName !=
"")
470 std::shared_ptr<IMemoryOptimizerStrategy> strategy =
475 ARMNN_LOG(warning) <<
"MemoryOptimizerStrategy: " << memoryOptimizerStrategyName
476 <<
" was not found.";
487 ARMNN_LOG(info) <<
"MemoryOptimizerStrategy: "
488 << memoryOptimizerStrategyName <<
" set for the backend " <<
id <<
".";
494 <<
" does not have multi-axis packing capability and cannot support"
495 <<
"MemoryOptimizerStrategy: " << memoryOptimizerStrategyName <<
".";
501 auto context = backend->CreateBackendContext(options);
507 m_BackendContexts.emplace(std::make_pair(
id, std::move(context)));
509 supportedBackends.emplace(
id);
511 unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
512 arm::pipe::IBackendProfiling::CreateBackendProfiling(
514 *m_ProfilingService.get(),
518 auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
520 if (profilingContext)
523 m_ProfilingService->AddBackendProfilingContext(
id, profilingContext);
534 m_ProfilingService->ConfigureProfilingService(
539 m_ProfilingService->WaitForProfilingServiceActivation(3000);
544 ARMNN_LOG(info) <<
"Initialization time: " << std::setprecision(2)
References ARMNN_ASSERT, ARMNN_LOG, ARMNN_VERSION, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), arm::pipe::ConvertExternalProfilingOptions(), DynamicBackendUtils::DeregisterDynamicBackends(), armnn::DmaBufProtected, armnn::error, DeviceSpec::GetDynamicBackends(), BackendRegistry::GetFactory(), armnn::GetMemBlockStrategyTypeName(), armnn::GetMemoryOptimizerStrategy(), armnn::GetTimeNow(), armnn::HasMatchingCapability(), armnn::info, IRuntime::CreationOptions::m_BackendOptions, IRuntime::CreationOptions::m_CustomAllocatorMap, IRuntime::CreationOptions::m_DynamicBackendsPath, IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling, IRuntime::CreationOptions::m_MemoryOptimizerStrategyMap, IRuntime::CreationOptions::m_ProfilingOptions, IRuntime::CreationOptions::m_ProtectedMode, IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled, armnn::ParseOptions(), BackendRegistry::RegisterAllocator(), BackendRegistry::RegisterMemoryOptimizerStrategy(), and armnn::warning.
◆ ~RuntimeImpl()
Definition at line 548 of file Runtime.cpp.
551 std::vector<int> networkIDs;
555 std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(),
556 std::back_inserter(networkIDs),
557 [](
const auto &pair) { return pair.first; });
559 catch (
const std::exception& e)
564 std::cerr <<
"WARNING: An error has occurred when getting the IDs of the networks to unload: " << e.what()
565 <<
"\nSome of the loaded networks may not be unloaded" << std::endl;
570 for (
auto networkID : networkIDs)
578 catch (
const std::exception& e)
583 std::cerr <<
"WARNING: An error has occurred when unloading network " << networkID <<
": " << e.what()
587 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
592 m_BackendContexts.clear();
598 for_each(m_AllocatorsAddedByThisRuntime.begin(), m_AllocatorsAddedByThisRuntime.end(),
599 [](
BackendId id) {BackendRegistryInstance().DeregisterAllocator(id);});
601 ARMNN_LOG(info) <<
"Shutdown time: " << std::setprecision(2)
References ARMNN_LOG, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), DynamicBackendUtils::DeregisterDynamicBackends(), DeviceSpec::GetDynamicBackends(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, BackendRegistry::SetProfilingService(), and RuntimeImpl::UnloadNetwork().
◆ ClearImportedInputs()
◆ ClearImportedOutputs()
◆ CreateWorkingMemHandle()
◆ EnqueueWorkload()
Definition at line 644 of file Runtime.cpp.
652 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
656 ARMNN_LOG(error) <<
"A Network with an id of " << networkId <<
" does not exist.";
661 ARMNN_LOG(error) <<
"Network " << networkId <<
" is async enabled.";
668 static thread_local
NetworkId lastId = networkId;
669 if (lastId != networkId)
678 auto status = loadedNetwork->
EnqueueWorkload(inputTensors, outputTensors,
679 preImportedInputIds, preImportedOutputIds);
683 if (!preImportedInputIds.empty() || !preImportedOutputIds.empty())
686 for (
auto&& context : m_BackendContexts)
688 context.second->AfterEnqueueWorkload(networkId);
691 ARMNN_LOG(info) <<
"Execution time: " << std::setprecision(2)
References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, LoadedNetwork::EnqueueWorkload(), armnn::error, armnn::Failure, LoadedNetwork::FreeWorkingMemory(), ProfilerManager::GetInstance(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.
◆ Execute()
This is an experimental function.
Evaluates a network using input in inputTensors and outputs filled into outputTensors. This function performs a thread safe execution of the network. Returns once execution is complete. Will block until this and any other thread using the same workingMem object completes.
Definition at line 696 of file Runtime.cpp.
704 NetworkId networkId = iWorkingMemHandle.GetNetworkId();
705 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
709 ARMNN_LOG(error) <<
"A Network with an id of " << networkId <<
" does not exist.";
714 ARMNN_LOG(error) <<
"Attempting execute " << networkId <<
" when it is not async enabled.";
721 auto status = loadedNetwork->
Execute(inputTensors,
727 ARMNN_LOG(info) <<
"Execution time: " << std::setprecision(2)
References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, armnn::error, LoadedNetwork::Execute(), armnn::Failure, ProfilerManager::GetInstance(), IWorkingMemHandle::GetNetworkId(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.
◆ GetDeviceSpec()
Definition at line 90 of file Runtime.hpp.
90 {
return m_DeviceSpec; }
◆ GetInputTensorInfo()
◆ GetOutputTensorInfo()
◆ GetProfiler()
Gets the profiler corresponding to the given network id.
- Parameters
-
networkId | The id of the network for which to get the profile. |
- Returns
- A pointer to the requested profiler, or nullptr if not found.
Definition at line 290 of file Runtime.cpp.
292 auto it = m_LoadedNetworks.find(networkId);
293 if (it != m_LoadedNetworks.end())
295 auto& loadedNetwork = it->second;
296 return loadedNetwork->GetProfiler();
◆ ImportInputs()
◆ ImportOutputs()
◆ InitialiseProfilingService()
void InitialiseProfilingService |
( |
arm::pipe::IProfilingService & |
profilingService | ) |
|
|
override |
◆ LoadNetwork() [1/3]
Loads a complete network into the Runtime.
- Parameters
-
[out] | networkIdOut | - Unique identifier for the network is returned in this reference. |
[in] | network | - Complete network to load into the Runtime. The runtime takes ownership of the network once passed in. |
- Returns
- armnn::Status
Definition at line 166 of file Runtime.cpp.
168 std::string ignoredErrorMessage;
169 return LoadNetwork(networkIdOut, std::move(inNetwork), ignoredErrorMessage);
Referenced by RuntimeImpl::LoadNetwork().
◆ LoadNetwork() [2/3]
Load a complete network into the IRuntime.
- Parameters
-
[out] | networkIdOut | Unique identifier for the network is returned in this reference. |
[in] | network | Complete network to load into the IRuntime. |
[out] | errorMessage | Error message if there were any errors. The runtime takes ownership of the network once passed in. |
- Returns
- armnn::Status
Definition at line 172 of file Runtime.cpp.
178 return LoadNetwork(networkIdOut, std::move(inNetwork), errorMessage, networkProperties);
References RuntimeImpl::LoadNetwork(), and armnn::Undefined.
◆ LoadNetwork() [3/3]
Definition at line 181 of file Runtime.cpp.
187 auto profiler = inNetwork->GetProfiler();
192 networkIdOut = GenerateNetworkId();
194 for (
auto&& context : m_BackendContexts)
196 context.second->BeforeLoadNetwork(networkIdOut);
200 std::unique_ptr<IOptimizedNetwork>(rawNetwork),
203 m_ProfilingService.get());
211 #if !defined(ARMNN_DISABLE_THREADS)
212 std::lock_guard<std::mutex> lockGuard(m_Mutex);
216 m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork);
219 for (
auto&& context : m_BackendContexts)
221 context.second->AfterLoadNetwork(networkIdOut);
224 if (m_ProfilingService->IsProfilingEnabled())
226 m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_LOADS);
References armnn::Failure, ProfilerManager::GetInstance(), LoadedNetwork::MakeLoadedNetwork(), ProfilerManager::RegisterProfiler(), and armnn::Success.
◆ RegisterDebugCallback()
Registers a callback function to debug layers performing custom computations on intermediate tensors.
- Parameters
-
networkId | The id of the network to register the callback. |
func | callback function to pass to the debug layer. |
Definition at line 766 of file Runtime.cpp.
768 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
References LoadedNetwork::RegisterDebugCallback().
◆ ReportStructure()
void ReportStructure |
( |
arm::pipe::IProfilingService & |
profilingService | ) |
|
|
override |
Definition at line 302 of file Runtime.cpp.
304 if (profilingService.IsProfilingEnabled())
306 LoadedNetworks::iterator it = m_LoadedNetworks.begin();
307 while (it != m_LoadedNetworks.end())
309 auto& loadedNetwork = it->second;
310 loadedNetwork->SendNetworkStructure(profilingService);
◆ UnloadNetwork()
Unloads a network from the Runtime.
At the moment this only removes the network from the m_Impl->m_Network. This might need more work in the future to be AndroidNN compliant.
- Parameters
-
[in] | networkId | Unique identifier for the network to be unloaded. Generated in LoadNetwork(). |
- Returns
- armnn::Status
Definition at line 232 of file Runtime.cpp.
234 bool unloadOk =
true;
235 for (
auto&& context : m_BackendContexts)
237 unloadOk &= context.second->BeforeUnloadNetwork(networkId);
242 ARMNN_LOG(warning) <<
"RuntimeImpl::UnloadNetwork(): failed to unload "
243 "network with ID:" << networkId <<
" because BeforeUnloadNetwork failed";
247 std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
248 arm::pipe::TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService.get());
250 #if !defined(ARMNN_DISABLE_THREADS)
251 std::lock_guard<std::mutex> lockGuard(m_Mutex);
257 auto search = m_LoadedNetworks.find(networkId);
258 if (search != m_LoadedNetworks.end())
260 arm::pipe::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
261 timelineUtils->RecordEvent(networkGuid,
262 arm::pipe::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
266 if (m_LoadedNetworks.erase(networkId) == 0)
268 ARMNN_LOG(warning) <<
"WARNING: RuntimeImpl::UnloadNetwork(): " << networkId <<
" not found!";
272 if (m_ProfilingService->IsProfilingEnabled())
274 m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
278 for (
auto&& context : m_BackendContexts)
280 context.second->AfterUnloadNetwork(networkId);
286 ARMNN_LOG(debug) <<
"RuntimeImpl::UnloadNetwork(): Unloaded network with ID: " << networkId;
References ARMNN_LOG, armnn::debug, armnn::Failure, ProfilerManager::GetInstance(), ProfilerManager::RegisterProfiler(), armnn::Success, and armnn::warning.
Referenced by RuntimeImpl::~RuntimeImpl().
◆ GetProfilingService
arm::pipe::IProfilingService& GetProfilingService |
( |
RuntimeImpl * |
runtime | ) |
|
|
friend |
Definition at line 59 of file TestUtils.cpp.
61 return *(runtime->m_ProfilingService.get());
◆ RuntimeLoadedNetworksReserve
void RuntimeLoadedNetworksReserve |
( |
RuntimeImpl * |
runtime | ) |
|
|
friend |
The documentation for this struct was generated from the following files:
#define ARMNN_ASSERT(COND)
void AddSupportedBackends(const BackendIdSet &backendIds, bool isDynamic=false)
TensorInfo GetOutputTensorInfo(LayerBindingId layerId) const
void RegisterProfiler(IProfiler *profiler)
Status EnqueueWorkload(const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Single thread execution of the loaded network.
void RegisterDebugCallback(const DebugCallbackFunction &func)
const BackendIdSet & GetDynamicBackends() const
std::unordered_set< BackendId > BackendIdSet
ProfilingOptions ConvertExternalProfilingOptions(const armnn::IRuntime::CreationOptions::ExternalProfilingOptions &options)
std::string m_DynamicBackendsPath
Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive Only a...
std::chrono::high_resolution_clock::time_point GetTimeNow()
BackendCapability
BackendCapability class.
void RegisterMemoryOptimizerStrategy(const BackendId &id, std::shared_ptr< IMemoryOptimizerStrategy > strategy)
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
std::vector< ImportedInputId > ImportInputs(const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
bool m_TimelineEnabled
Indicates whether external timeline profiling is enabled or not.
#define ARMNN_LOG(severity)
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
void ClearDynamicBackends()
void SetProfilingService(armnn::Optional< arm::pipe::IProfilingService & > profilingService)
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
std::unique_ptr< IMemoryOptimizerStrategy > GetMemoryOptimizerStrategy(const std::string &strategyName)
std::vector< ImportedOutputId > ImportOutputs(const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
BackendRegistry & BackendRegistryInstance()
Base class for all ArmNN exceptions so that users can filter to just those.
TensorInfo GetInputTensorInfo(LayerBindingId layerId) const
FactoryFunction GetFactory(const BackendId &id) const
void ParseOptions(const std::vector< BackendOptions > &options, BackendId backend, F f)
bool m_ProtectedMode
Setting this flag will allow the user to create the Runtime in protected mode.
static void DeregisterDynamicBackends(const BackendIdSet &dynamicBackends)
#define ARMNN_VERSION
ARMNN_VERSION: "X.Y.Z" where: X = Major version number Y = Minor version number Z = Patch version num...
void RegisterAllocator(const BackendId &id, std::shared_ptr< ICustomAllocator > alloc)
ExternalProfilingOptions m_ProfilingOptions
static std::unique_ptr< LoadedNetwork > MakeLoadedNetwork(std::unique_ptr< IOptimizedNetwork > net, std::string &errorMessage, const INetworkProperties &networkProperties, arm::pipe::IProfilingService *profilingService)
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
static ProfilerManager & GetInstance()
Status Execute(const InputTensors &inputTensors, const OutputTensors &outputTensors, IWorkingMemHandle &workingMemHandle, std::vector< ImportedInputId > preImportedInputs={}, std::vector< ImportedOutputId > preImportedOutputs={})
Thread safe execution of the loaded network.
Very basic type safe variant.
const std::shared_ptr< IProfiler > & GetProfiler() const
void ClearImportedOutputs(const std::vector< ImportedOutputId > outputIds)
std::map< BackendId, std::shared_ptr< IMemoryOptimizerStrategy > > m_MemoryOptimizerStrategyMap
A map to define a custom memory optimizer strategy for specific backend Ids.
std::map< BackendId, std::shared_ptr< ICustomAllocator > > m_CustomAllocatorMap
A map to define a custom memory allocator for specific backend Ids.
void ClearImportedInputs(const std::vector< ImportedInputId > inputIds)
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Class for non-fatal exceptions raised while initialising a backend.
bool m_EnableProfiling
Indicates whether external profiling is enabled or not.
constexpr const char * GetMemBlockStrategyTypeName(MemBlockStrategyType memBlockStrategyType)
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
bool HasMatchingCapability(const BackendOptions::BackendOption &capability, const BackendCapabilities &capabilities)
Convenience function to check if a given capability matches a capability in a BackendCapabilities str...