23 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
29 #include <client/include/backends/IBackendProfiling.hpp>
31 #include <common/include/LabelsAndEventClasses.hpp>
36 using namespace armnn;
64 return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network));
69 std::string& errorMessage)
71 return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network), errorMessage);
76 std::string& errorMessage,
79 return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network), errorMessage, networkProperties);
84 return pRuntimeImpl->GetInputTensorInfo(networkId, layerId);
89 return pRuntimeImpl->GetOutputTensorInfo(networkId, layerId);
95 return pRuntimeImpl->ImportInputs(networkId, inputTensors, forceImportMemorySource);
101 return pRuntimeImpl->ImportOutputs(networkId, outputTensors, forceImportMemorySource);
106 return pRuntimeImpl->ClearImportedInputs(networkId, inputIds);
110 return pRuntimeImpl->ClearImportedOutputs(networkId, outputIds);
116 std::vector<ImportedInputId> preImportedInputIds,
117 std::vector<ImportedOutputId> preImportedOutputIds)
119 return pRuntimeImpl->EnqueueWorkload(networkId, inputTensors, outputTensors,
120 preImportedInputIds, preImportedOutputIds);
126 std::vector<ImportedInputId> preImportedInputs,
127 std::vector<ImportedOutputId> preImportedOutputs)
158 return pRuntimeImpl->RegisterDebugCallback(networkId, func);
161 int RuntimeImpl::GenerateNetworkId()
163 return m_NetworkIdCounter++;
168 std::string ignoredErrorMessage;
169 return LoadNetwork(networkIdOut, std::move(inNetwork), ignoredErrorMessage);
174 std::string& errorMessage)
178 return LoadNetwork(networkIdOut, std::move(inNetwork), errorMessage, networkProperties);
183 std::string& errorMessage,
187 auto profiler = inNetwork->GetProfiler();
192 networkIdOut = GenerateNetworkId();
194 for (
auto&& context : m_BackendContexts)
196 context.second->BeforeLoadNetwork(networkIdOut);
200 std::unique_ptr<IOptimizedNetwork>(rawNetwork),
203 m_ProfilingService.get());
211 #if !defined(ARMNN_DISABLE_THREADS)
212 std::lock_guard<std::mutex> lockGuard(m_Mutex);
216 m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork);
219 for (
auto&& context : m_BackendContexts)
221 context.second->AfterLoadNetwork(networkIdOut);
224 if (m_ProfilingService->IsProfilingEnabled())
226 m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_LOADS);
234 bool unloadOk =
true;
235 for (
auto&& context : m_BackendContexts)
237 unloadOk &= context.second->BeforeUnloadNetwork(networkId);
243 "network with ID:" << networkId <<
" because BeforeUnloadNetwork failed";
247 std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
248 arm::pipe::TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService.get());
250 #if !defined(ARMNN_DISABLE_THREADS)
251 std::lock_guard<std::mutex> lockGuard(m_Mutex);
257 auto search = m_LoadedNetworks.find(networkId);
258 if (search != m_LoadedNetworks.end())
260 arm::pipe::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
261 timelineUtils->RecordEvent(networkGuid,
262 arm::pipe::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
266 if (m_LoadedNetworks.erase(networkId) == 0)
268 ARMNN_LOG(
warning) <<
"WARNING: RuntimeImpl::UnloadNetwork(): " << networkId <<
" not found!";
272 if (m_ProfilingService->IsProfilingEnabled())
274 m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
278 for (
auto&& context : m_BackendContexts)
280 context.second->AfterUnloadNetwork(networkId);
286 ARMNN_LOG(
debug) <<
"RuntimeImpl::UnloadNetwork(): Unloaded network with ID: " << networkId;
292 auto it = m_LoadedNetworks.find(networkId);
293 if (it != m_LoadedNetworks.end())
295 auto& loadedNetwork = it->second;
296 return loadedNetwork->GetProfiler();
304 if (profilingService.IsProfilingEnabled())
306 LoadedNetworks::iterator it = m_LoadedNetworks.begin();
307 while (it != m_LoadedNetworks.end())
309 auto& loadedNetwork = it->second;
310 loadedNetwork->SendNetworkStructure(profilingService);
324 : m_NetworkIdCounter(0)
326 m_ProfilingService = arm::pipe::IProfilingService::CreateProfilingService(
327 arm::pipe::MAX_ARMNN_COUNTER,
329 arm::pipe::ARMNN_SOFTWARE_INFO,
330 arm::pipe::ARMNN_SOFTWARE_VERSION,
331 arm::pipe::ARMNN_HARDWARE_VERSION,
338 "It is not possible to enable timeline reporting without profiling being enabled");
340 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
352 auto backend = factoryFun();
358 customAllocatorMapIterator->second ==
nullptr)
360 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
365 throw armnn::Exception(
"Allocator associated with id " +
id.Get() +
" is null");
373 BackendCapability protectedContentCapability {
"ProtectedContentAllocation",
true};
380 <<
" is not registered as does not support protected content allocation.";
388 if (customAllocatorMapIterator->second->GetMemorySourceType()
391 if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
395 <<
" reported an error when entering protected mode. Backend won't be"
396 <<
" used. ErrorMsg: " << err;
401 m_AllocatorsAddedByThisRuntime.emplace(
id);
405 ARMNN_LOG(
error) <<
"The CustomAllocator provided with the runtime options doesn't support "
406 "protected memory. Protected mode can't be activated. The backend "
408 <<
" is not going to be used. MemorySource must be MemorySource::DmaBufProtected";
414 ARMNN_LOG(
error) <<
"Protected mode can't be activated for backend: "
416 <<
" no custom allocator was provided to the runtime options.";
426 if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
430 <<
" reported an error when trying to use the provided custom allocator."
431 " Backend won't be used."
432 <<
" ErrorMsg: " << err;
437 m_AllocatorsAddedByThisRuntime.emplace(
id);
450 id, customMemoryOptimizerStrategyMapIterator->second);
453 << customMemoryOptimizerStrategyMapIterator->second->GetName()
454 <<
" set for the backend " <<
id <<
".";
460 std::string memoryOptimizerStrategyName =
"";
463 if (name ==
"MemoryOptimizerStrategy")
465 memoryOptimizerStrategyName = ParseStringBackendOption(value,
"");
468 if (memoryOptimizerStrategyName !=
"")
470 std::shared_ptr<IMemoryOptimizerStrategy> strategy =
475 ARMNN_LOG(
warning) <<
"MemoryOptimizerStrategy: " << memoryOptimizerStrategyName
476 <<
" was not found.";
488 << memoryOptimizerStrategyName <<
" set for the backend " <<
id <<
".";
494 <<
" does not have multi-axis packing capability and cannot support"
495 <<
"MemoryOptimizerStrategy: " << memoryOptimizerStrategyName <<
".";
501 auto context = backend->CreateBackendContext(options);
507 m_BackendContexts.emplace(std::make_pair(
id, std::move(context)));
509 supportedBackends.emplace(
id);
511 unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
512 arm::pipe::IBackendProfiling::CreateBackendProfiling(
514 *m_ProfilingService.get(),
518 auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
520 if (profilingContext)
523 m_ProfilingService->AddBackendProfilingContext(
id, profilingContext);
534 m_ProfilingService->ConfigureProfilingService(
536 if (options.m_ProfilingOptions.m_EnableProfiling)
539 m_ProfilingService->WaitForProfilingServiceActivation(3000);
542 m_DeviceSpec.AddSupportedBackends(supportedBackends);
544 ARMNN_LOG(info) <<
"Initialization time: " << std::setprecision(2)
548 RuntimeImpl::~RuntimeImpl()
551 std::vector<int> networkIDs;
555 std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(),
556 std::back_inserter(networkIDs),
557 [](
const auto &pair) { return pair.first; });
559 catch (
const std::exception& e)
564 std::cerr <<
"WARNING: An error has occurred when getting the IDs of the networks to unload: " << e.what()
565 <<
"\nSome of the loaded networks may not be unloaded" << std::endl;
570 for (
auto networkID : networkIDs)
578 catch (
const std::exception& e)
583 std::cerr <<
"WARNING: An error has occurred when unloading network " << networkID <<
": " << e.what()
587 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
592 m_BackendContexts.clear();
598 for_each(m_AllocatorsAddedByThisRuntime.begin(), m_AllocatorsAddedByThisRuntime.end(),
599 [](
BackendId id) {BackendRegistryInstance().DeregisterAllocator(id);});
607 #if !defined(ARMNN_DISABLE_THREADS)
608 std::lock_guard<std::mutex> lockGuard(m_Mutex);
610 return m_LoadedNetworks.at(networkId).get();
626 return GetLoadedNetworkPtr(networkId)->
ImportInputs(inputTensors, forceImportMemorySource);
632 return GetLoadedNetworkPtr(networkId)->
ImportOutputs(outputTensors, forceImportMemorySource);
647 std::vector<ImportedInputId> preImportedInputIds,
648 std::vector<ImportedOutputId> preImportedOutputIds)
652 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
656 ARMNN_LOG(
error) <<
"A Network with an id of " << networkId <<
" does not exist.";
661 ARMNN_LOG(
error) <<
"Network " << networkId <<
" is async enabled.";
668 static thread_local
NetworkId lastId = networkId;
669 if (lastId != networkId)
678 auto status = loadedNetwork->
EnqueueWorkload(inputTensors, outputTensors,
679 preImportedInputIds, preImportedOutputIds);
683 if (!preImportedInputIds.empty() || !preImportedOutputIds.empty())
686 for (
auto&& context : m_BackendContexts)
688 context.second->AfterEnqueueWorkload(networkId);
691 ARMNN_LOG(
info) <<
"Execution time: " << std::setprecision(2)
699 std::vector<ImportedInputId> preImportedInputs,
700 std::vector<ImportedOutputId> preImportedOutputs)
705 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
709 ARMNN_LOG(
error) <<
"A Network with an id of " << networkId <<
" does not exist.";
714 ARMNN_LOG(
error) <<
"Attempting execute " << networkId <<
" when it is not async enabled.";
721 auto status = loadedNetwork->
Execute(inputTensors,
727 ARMNN_LOG(
info) <<
"Execution time: " << std::setprecision(2)
737 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
741 ARMNN_LOG(
error) <<
"A Network with an id of " << networkId <<
" does not exist.";
746 ARMNN_LOG(
error) <<
"Network " << networkId <<
" is not async enabled.";
753 static thread_local
NetworkId lastId = networkId;
754 if (lastId != networkId)
768 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
772 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
773 void RuntimeImpl::LoadDynamicBackends(
const std::string& overrideBackendPath)