24.08
|
#include <Runtime.hpp>
|
Status | LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network) |
| Loads a complete network into the Runtime. More...
|
|
Status | LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage) |
| Load a complete network into the IRuntime. More...
|
|
Status | LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage, const INetworkProperties &networkProperties) |
|
armnn::TensorInfo | GetInputTensorInfo (NetworkId networkId, LayerBindingId layerId) const |
|
armnn::TensorInfo | GetOutputTensorInfo (NetworkId networkId, LayerBindingId layerId) const |
|
std::vector< ImportedInputId > | ImportInputs (NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource) |
|
std::vector< ImportedOutputId > | ImportOutputs (NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource) |
|
void | ClearImportedInputs (NetworkId networkId, const std::vector< ImportedInputId > inputIds) |
|
void | ClearImportedOutputs (NetworkId networkId, const std::vector< ImportedOutputId > outputIds) |
|
Status | EnqueueWorkload (NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={}) |
|
Status | Execute (IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputs, std::vector< ImportedOutputId > preImportedOutputs) |
| This is an experimental function. More...
|
|
Status | UnloadNetwork (NetworkId networkId) |
| Unloads a network from the Runtime. More...
|
|
const IDeviceSpec & | GetDeviceSpec () const |
|
const std::shared_ptr< IProfiler > | GetProfiler (NetworkId networkId) const |
| Gets the profiler corresponding to the given network id. More...
|
|
std::unique_ptr< IWorkingMemHandle > | CreateWorkingMemHandle (NetworkId networkId) |
| Create a new unique WorkingMemHandle object. More...
|
|
void | RegisterDebugCallback (NetworkId networkId, const DebugCallbackFunction &func) |
| Registers a callback function to debug layers performing custom computations on intermediate tensors. More...
|
|
| RuntimeImpl (const IRuntime::CreationOptions &options) |
| Creates a runtime for workload execution. More...
|
|
| ~RuntimeImpl () |
|
void | ReportStructure (arm::pipe::IProfilingService &profilingService) override |
|
void | InitialiseProfilingService (arm::pipe::IProfilingService &profilingService) override |
|
Definition at line 30 of file Runtime.hpp.
◆ RuntimeImpl()
Creates a runtime for workload execution.
Definition at line 323 of file Runtime.cpp.
324 : m_NetworkIdCounter(0)
326 m_ProfilingService = arm::pipe::IProfilingService::CreateProfilingService(
327 arm::pipe::MAX_ARMNN_COUNTER,
329 arm::pipe::ARMNN_SOFTWARE_INFO,
330 arm::pipe::ARMNN_SOFTWARE_VERSION,
331 arm::pipe::ARMNN_HARDWARE_VERSION,
338 "It is not possible to enable timeline reporting without profiling being enabled");
340 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
357 auto backend = factoryFun();
361 customAllocatorMapIterator->second ==
nullptr)
363 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
368 throw armnn::Exception(
"Allocator associated with id " +
id.Get() +
" is null");
376 BackendCapability protectedContentCapability {
"ProtectedContentAllocation",
true};
383 <<
" is not registered as does not support protected content allocation.";
391 if (customAllocatorMapIterator->second->GetMemorySourceType()
394 if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
398 <<
" reported an error when entering protected mode. Backend won't be"
399 <<
" used. ErrorMsg: " << err;
404 m_AllocatorsAddedByThisRuntime.emplace(
id);
408 ARMNN_LOG(error) <<
"The CustomAllocator provided with the runtime options doesn't support "
409 "protected memory. Protected mode can't be activated. The backend "
411 <<
" is not going to be used. MemorySource must be MemorySource::DmaBufProtected";
417 ARMNN_LOG(error) <<
"Protected mode can't be activated for backend: "
419 <<
" no custom allocator was provided to the runtime options.";
429 if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
433 <<
" reported an error when trying to use the provided custom allocator."
434 " Backend won't be used."
435 <<
" ErrorMsg: " << err;
440 m_AllocatorsAddedByThisRuntime.emplace(
id);
453 id, customMemoryOptimizerStrategyMapIterator->second);
455 ARMNN_LOG(info) <<
"MemoryOptimizerStrategy "
456 << customMemoryOptimizerStrategyMapIterator->second->GetName()
457 <<
" set for the backend " <<
id <<
".";
463 std::string memoryOptimizerStrategyName =
"";
466 if (name ==
"MemoryOptimizerStrategy")
468 memoryOptimizerStrategyName = ParseStringBackendOption(value,
"");
471 if (memoryOptimizerStrategyName !=
"")
473 std::shared_ptr<IMemoryOptimizerStrategy> strategy =
478 ARMNN_LOG(warning) <<
"MemoryOptimizerStrategy: " << memoryOptimizerStrategyName
479 <<
" was not found.";
490 ARMNN_LOG(info) <<
"MemoryOptimizerStrategy: "
491 << memoryOptimizerStrategyName <<
" set for the backend " <<
id <<
".";
497 <<
" does not have multi-axis packing capability and cannot support"
498 <<
"MemoryOptimizerStrategy: " << memoryOptimizerStrategyName <<
".";
504 auto context = backend->CreateBackendContext(options);
510 m_BackendContexts.emplace(std::make_pair(
id, std::move(context)));
512 supportedBackends.emplace(
id);
514 unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
515 arm::pipe::IBackendProfiling::CreateBackendProfiling(
517 *m_ProfilingService.get(),
521 auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
523 if (profilingContext)
526 m_ProfilingService->AddBackendProfilingContext(
id, profilingContext);
537 m_ProfilingService->ConfigureProfilingService(
542 m_ProfilingService->WaitForProfilingServiceActivation(3000);
547 ARMNN_LOG(info) <<
"Initialization time: " << std::setprecision(2)
References ARMNN_LOG, ARMNN_VERSION, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), arm::pipe::ConvertExternalProfilingOptions(), DynamicBackendUtils::DeregisterDynamicBackends(), armnn::DmaBufProtected, armnn::error, DeviceSpec::GetDynamicBackends(), BackendRegistry::GetFactory(), armnn::GetMemBlockStrategyTypeName(), armnn::GetMemoryOptimizerStrategy(), armnn::GetTimeNow(), armnn::HasMatchingCapability(), armnn::info, IRuntime::CreationOptions::m_BackendOptions, IRuntime::CreationOptions::m_CustomAllocatorMap, IRuntime::CreationOptions::m_DynamicBackendsPath, IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling, IRuntime::CreationOptions::m_MemoryOptimizerStrategyMap, IRuntime::CreationOptions::m_ProfilingOptions, IRuntime::CreationOptions::m_ProtectedMode, IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled, armnn::ParseOptions(), BackendRegistry::RegisterAllocator(), BackendRegistry::RegisterMemoryOptimizerStrategy(), and armnn::warning.
◆ ~RuntimeImpl()
Definition at line 551 of file Runtime.cpp.
554 std::vector<int> networkIDs;
558 std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(),
559 std::back_inserter(networkIDs),
560 [](
const auto &pair) { return pair.first; });
562 catch (
const std::exception& e)
567 std::cerr <<
"WARNING: An error has occurred when getting the IDs of the networks to unload: " << e.what()
568 <<
"\nSome of the loaded networks may not be unloaded" << std::endl;
573 for (
auto networkID : networkIDs)
581 catch (
const std::exception& e)
586 std::cerr <<
"WARNING: An error has occurred when unloading network " << networkID <<
": " << e.what()
590 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
595 m_BackendContexts.clear();
601 for_each(m_AllocatorsAddedByThisRuntime.begin(), m_AllocatorsAddedByThisRuntime.end(),
602 [](
BackendId id) {BackendRegistryInstance().DeregisterAllocator(id);});
604 ARMNN_LOG(info) <<
"Shutdown time: " << std::setprecision(2)
References ARMNN_LOG, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), DynamicBackendUtils::DeregisterDynamicBackends(), DeviceSpec::GetDynamicBackends(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, BackendRegistry::SetProfilingService(), and RuntimeImpl::UnloadNetwork().
◆ ClearImportedInputs()
◆ ClearImportedOutputs()
◆ CreateWorkingMemHandle()
◆ EnqueueWorkload()
Definition at line 647 of file Runtime.cpp.
655 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
659 ARMNN_LOG(error) <<
"A Network with an id of " << networkId <<
" does not exist.";
664 ARMNN_LOG(error) <<
"Network " << networkId <<
" is async enabled.";
671 static thread_local
NetworkId lastId = networkId;
672 if (lastId != networkId)
681 auto status = loadedNetwork->
EnqueueWorkload(inputTensors, outputTensors,
682 preImportedInputIds, preImportedOutputIds);
686 if (!preImportedInputIds.empty() || !preImportedOutputIds.empty())
689 for (
auto&& context : m_BackendContexts)
691 context.second->AfterEnqueueWorkload(networkId);
694 ARMNN_LOG(info) <<
"Execution time: " << std::setprecision(2)
References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, LoadedNetwork::EnqueueWorkload(), armnn::error, armnn::Failure, LoadedNetwork::FreeWorkingMemory(), ProfilerManager::GetInstance(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.
◆ Execute()
This is an experimental function.
Evaluates a network using input in inputTensors and outputs filled into outputTensors. This function performs a thread safe execution of the network. Returns once execution is complete. Will block until this and any other thread using the same workingMem object completes.
Definition at line 699 of file Runtime.cpp.
707 NetworkId networkId = iWorkingMemHandle.GetNetworkId();
708 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
712 ARMNN_LOG(error) <<
"A Network with an id of " << networkId <<
" does not exist.";
717 ARMNN_LOG(error) <<
"Attempting execute " << networkId <<
" when it is not async enabled.";
724 auto status = loadedNetwork->
Execute(inputTensors,
730 ARMNN_LOG(info) <<
"Execution time: " << std::setprecision(2)
References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, armnn::error, LoadedNetwork::Execute(), armnn::Failure, ProfilerManager::GetInstance(), IWorkingMemHandle::GetNetworkId(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.
◆ GetDeviceSpec()
Definition at line 90 of file Runtime.hpp.
90 {
return m_DeviceSpec; }
◆ GetInputTensorInfo()
◆ GetOutputTensorInfo()
◆ GetProfiler()
Gets the profiler corresponding to the given network id.
- Parameters
-
networkId | The id of the network for which to get the profile. |
- Returns
- A pointer to the requested profiler, or nullptr if not found.
Definition at line 290 of file Runtime.cpp.
292 auto it = m_LoadedNetworks.find(networkId);
293 if (it != m_LoadedNetworks.end())
295 auto& loadedNetwork = it->second;
296 return loadedNetwork->GetProfiler();
◆ ImportInputs()
◆ ImportOutputs()
◆ InitialiseProfilingService()
void InitialiseProfilingService |
( |
arm::pipe::IProfilingService & |
profilingService | ) |
|
|
override |
◆ LoadNetwork() [1/3]
Loads a complete network into the Runtime.
- Parameters
-
[out] | networkIdOut | - Unique identifier for the network is returned in this reference. |
[in] | network | - Complete network to load into the Runtime. The runtime takes ownership of the network once passed in. |
- Returns
- armnn::Status
Definition at line 166 of file Runtime.cpp.
168 std::string ignoredErrorMessage;
169 return LoadNetwork(networkIdOut, std::move(inNetwork), ignoredErrorMessage);
Referenced by RuntimeImpl::LoadNetwork().
◆ LoadNetwork() [2/3]
Load a complete network into the IRuntime.
- Parameters
-
[out] | networkIdOut | Unique identifier for the network is returned in this reference. |
[in] | network | Complete network to load into the IRuntime. |
[out] | errorMessage | Error message if there were any errors. The runtime takes ownership of the network once passed in. |
- Returns
- armnn::Status
Definition at line 172 of file Runtime.cpp.
178 return LoadNetwork(networkIdOut, std::move(inNetwork), errorMessage, networkProperties);
References RuntimeImpl::LoadNetwork(), and armnn::Undefined.
◆ LoadNetwork() [3/3]
Definition at line 181 of file Runtime.cpp.
187 auto profiler = inNetwork->GetProfiler();
192 networkIdOut = GenerateNetworkId();
194 for (
auto&& context : m_BackendContexts)
196 context.second->BeforeLoadNetwork(networkIdOut);
200 std::unique_ptr<IOptimizedNetwork>(rawNetwork),
203 m_ProfilingService.get());
211 #if !defined(ARMNN_DISABLE_THREADS)
212 std::lock_guard<std::mutex> lockGuard(m_Mutex);
216 m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork);
219 for (
auto&& context : m_BackendContexts)
221 context.second->AfterLoadNetwork(networkIdOut);
224 if (m_ProfilingService->IsProfilingEnabled())
226 m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_LOADS);
References armnn::Failure, ProfilerManager::GetInstance(), LoadedNetwork::MakeLoadedNetwork(), ProfilerManager::RegisterProfiler(), and armnn::Success.
◆ RegisterDebugCallback()
Registers a callback function to debug layers performing custom computations on intermediate tensors.
- Parameters
-
networkId | The id of the network to register the callback. |
func | callback function to pass to the debug layer. |
Definition at line 769 of file Runtime.cpp.
771 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
References LoadedNetwork::RegisterDebugCallback().
◆ ReportStructure()
void ReportStructure |
( |
arm::pipe::IProfilingService & |
profilingService | ) |
|
|
override |
Definition at line 302 of file Runtime.cpp.
304 if (profilingService.IsProfilingEnabled())
306 LoadedNetworks::iterator it = m_LoadedNetworks.begin();
307 while (it != m_LoadedNetworks.end())
309 auto& loadedNetwork = it->second;
310 loadedNetwork->SendNetworkStructure(profilingService);
◆ UnloadNetwork()
Unloads a network from the Runtime.
At the moment this only removes the network from the m_Impl->m_Network. This might need more work in the future to be AndroidNN compliant.
- Parameters
-
[in] | networkId | Unique identifier for the network to be unloaded. Generated in LoadNetwork(). |
- Returns
- armnn::Status
Definition at line 232 of file Runtime.cpp.
234 bool unloadOk =
true;
235 for (
auto&& context : m_BackendContexts)
237 unloadOk &= context.second->BeforeUnloadNetwork(networkId);
242 ARMNN_LOG(warning) <<
"RuntimeImpl::UnloadNetwork(): failed to unload "
243 "network with ID:" << networkId <<
" because BeforeUnloadNetwork failed";
247 std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
248 arm::pipe::TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService.get());
250 #if !defined(ARMNN_DISABLE_THREADS)
251 std::lock_guard<std::mutex> lockGuard(m_Mutex);
257 auto search = m_LoadedNetworks.find(networkId);
258 if (search != m_LoadedNetworks.end())
260 arm::pipe::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
261 timelineUtils->RecordEvent(networkGuid,
262 arm::pipe::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
266 if (m_LoadedNetworks.erase(networkId) == 0)
268 ARMNN_LOG(warning) <<
"WARNING: RuntimeImpl::UnloadNetwork(): " << networkId <<
" not found!";
272 if (m_ProfilingService->IsProfilingEnabled())
274 m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
278 for (
auto&& context : m_BackendContexts)
280 context.second->AfterUnloadNetwork(networkId);
286 ARMNN_LOG(debug) <<
"RuntimeImpl::UnloadNetwork(): Unloaded network with ID: " << networkId;
References ARMNN_LOG, armnn::debug, armnn::Failure, ProfilerManager::GetInstance(), ProfilerManager::RegisterProfiler(), armnn::Success, and armnn::warning.
Referenced by RuntimeImpl::~RuntimeImpl().
◆ GetProfilingService
arm::pipe::IProfilingService& GetProfilingService |
( |
RuntimeImpl * |
runtime | ) |
|
|
friend |
Definition at line 59 of file TestUtils.cpp.
61 return *(runtime->m_ProfilingService.get());
◆ RuntimeLoadedNetworksReserve
void RuntimeLoadedNetworksReserve |
( |
RuntimeImpl * |
runtime | ) |
|
|
friend |
The documentation for this struct was generated from the following files:
void AddSupportedBackends(const BackendIdSet &backendIds, bool isDynamic=false)
TensorInfo GetOutputTensorInfo(LayerBindingId layerId) const
void RegisterProfiler(IProfiler *profiler)
Status EnqueueWorkload(const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Single thread execution of the loaded network.
void RegisterDebugCallback(const DebugCallbackFunction &func)
const BackendIdSet & GetDynamicBackends() const
std::unordered_set< BackendId > BackendIdSet
ProfilingOptions ConvertExternalProfilingOptions(const armnn::IRuntime::CreationOptions::ExternalProfilingOptions &options)
std::string m_DynamicBackendsPath
Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive Only a...
std::chrono::high_resolution_clock::time_point GetTimeNow()
BackendCapability
BackendCapability class.
void RegisterMemoryOptimizerStrategy(const BackendId &id, std::shared_ptr< IMemoryOptimizerStrategy > strategy)
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
std::vector< ImportedInputId > ImportInputs(const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
bool m_TimelineEnabled
Indicates whether external timeline profiling is enabled or not.
#define ARMNN_LOG(severity)
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
void ClearDynamicBackends()
void SetProfilingService(armnn::Optional< arm::pipe::IProfilingService & > profilingService)
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
std::unique_ptr< IMemoryOptimizerStrategy > GetMemoryOptimizerStrategy(const std::string &strategyName)
std::vector< ImportedOutputId > ImportOutputs(const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
BackendRegistry & BackendRegistryInstance()
Base class for all ArmNN exceptions so that users can filter to just those.
TensorInfo GetInputTensorInfo(LayerBindingId layerId) const
FactoryFunction GetFactory(const BackendId &id) const
void ParseOptions(const std::vector< BackendOptions > &options, BackendId backend, F f)
bool m_ProtectedMode
Setting this flag will allow the user to create the Runtime in protected mode.
static void DeregisterDynamicBackends(const BackendIdSet &dynamicBackends)
#define ARMNN_VERSION
ARMNN_VERSION: "X.Y.Z" where: X = Major version number Y = Minor version number Z = Patch version num...
void RegisterAllocator(const BackendId &id, std::shared_ptr< ICustomAllocator > alloc)
ExternalProfilingOptions m_ProfilingOptions
static std::unique_ptr< LoadedNetwork > MakeLoadedNetwork(std::unique_ptr< IOptimizedNetwork > net, std::string &errorMessage, const INetworkProperties &networkProperties, arm::pipe::IProfilingService *profilingService)
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
static ProfilerManager & GetInstance()
Status Execute(const InputTensors &inputTensors, const OutputTensors &outputTensors, IWorkingMemHandle &workingMemHandle, std::vector< ImportedInputId > preImportedInputs={}, std::vector< ImportedOutputId > preImportedOutputs={})
Thread safe execution of the loaded network.
Very basic type safe variant.
const std::shared_ptr< IProfiler > & GetProfiler() const
void ClearImportedOutputs(const std::vector< ImportedOutputId > outputIds)
std::map< BackendId, std::shared_ptr< IMemoryOptimizerStrategy > > m_MemoryOptimizerStrategyMap
A map to define a custom memory optimizer strategy for specific backend Ids.
std::map< BackendId, std::shared_ptr< ICustomAllocator > > m_CustomAllocatorMap
A map to define a custom memory allocator for specific backend Ids.
void ClearImportedInputs(const std::vector< ImportedInputId > inputIds)
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Class for non-fatal exceptions raised while initialising a backend.
bool m_EnableProfiling
Indicates whether external profiling is enabled or not.
constexpr const char * GetMemBlockStrategyTypeName(MemBlockStrategyType memBlockStrategyType)
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
bool HasMatchingCapability(const BackendOptions::BackendOption &capability, const BackendCapabilities &capabilities)
Convenience function to check if a given capability matches a capability in a BackendCapabilities str...