23 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
29 #include <client/include/backends/IBackendProfiling.hpp>
31 #include <common/include/LabelsAndEventClasses.hpp>
36 using namespace armnn;
64 return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network));
69 std::string& errorMessage)
71 return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network), errorMessage);
76 std::string& errorMessage,
79 return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network), errorMessage, networkProperties);
84 return pRuntimeImpl->GetInputTensorInfo(networkId, layerId);
89 return pRuntimeImpl->GetOutputTensorInfo(networkId, layerId);
95 return pRuntimeImpl->ImportInputs(networkId, inputTensors, forceImportMemorySource);
101 return pRuntimeImpl->ImportOutputs(networkId, outputTensors, forceImportMemorySource);
106 return pRuntimeImpl->ClearImportedInputs(networkId, inputIds);
110 return pRuntimeImpl->ClearImportedOutputs(networkId, outputIds);
116 std::vector<ImportedInputId> preImportedInputIds,
117 std::vector<ImportedOutputId> preImportedOutputIds)
119 return pRuntimeImpl->EnqueueWorkload(networkId, inputTensors, outputTensors,
120 preImportedInputIds, preImportedOutputIds);
126 std::vector<ImportedInputId> preImportedInputs,
127 std::vector<ImportedOutputId> preImportedOutputs)
158 return pRuntimeImpl->RegisterDebugCallback(networkId, func);
161 int RuntimeImpl::GenerateNetworkId()
163 return m_NetworkIdCounter++;
168 std::string ignoredErrorMessage;
169 return LoadNetwork(networkIdOut, std::move(inNetwork), ignoredErrorMessage);
174 std::string& errorMessage)
178 return LoadNetwork(networkIdOut, std::move(inNetwork), errorMessage, networkProperties);
183 std::string& errorMessage,
187 auto profiler = inNetwork->GetProfiler();
192 networkIdOut = GenerateNetworkId();
194 for (
auto&& context : m_BackendContexts)
196 context.second->BeforeLoadNetwork(networkIdOut);
200 std::unique_ptr<IOptimizedNetwork>(rawNetwork),
203 m_ProfilingService.get());
211 #if !defined(ARMNN_DISABLE_THREADS)
212 std::lock_guard<std::mutex> lockGuard(m_Mutex);
216 m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork);
219 for (
auto&& context : m_BackendContexts)
221 context.second->AfterLoadNetwork(networkIdOut);
224 if (m_ProfilingService->IsProfilingEnabled())
226 m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_LOADS);
234 bool unloadOk =
true;
235 for (
auto&& context : m_BackendContexts)
237 unloadOk &= context.second->BeforeUnloadNetwork(networkId);
243 "network with ID:" << networkId <<
" because BeforeUnloadNetwork failed";
247 std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
248 arm::pipe::TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService.get());
250 #if !defined(ARMNN_DISABLE_THREADS)
251 std::lock_guard<std::mutex> lockGuard(m_Mutex);
257 auto search = m_LoadedNetworks.find(networkId);
258 if (search != m_LoadedNetworks.end())
260 arm::pipe::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
261 timelineUtils->RecordEvent(networkGuid,
262 arm::pipe::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
266 if (m_LoadedNetworks.erase(networkId) == 0)
268 ARMNN_LOG(
warning) <<
"WARNING: RuntimeImpl::UnloadNetwork(): " << networkId <<
" not found!";
272 if (m_ProfilingService->IsProfilingEnabled())
274 m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
278 for (
auto&& context : m_BackendContexts)
280 context.second->AfterUnloadNetwork(networkId);
286 ARMNN_LOG(
debug) <<
"RuntimeImpl::UnloadNetwork(): Unloaded network with ID: " << networkId;
292 auto it = m_LoadedNetworks.find(networkId);
293 if (it != m_LoadedNetworks.end())
295 auto& loadedNetwork = it->second;
296 return loadedNetwork->GetProfiler();
304 if (profilingService.IsProfilingEnabled())
306 LoadedNetworks::iterator it = m_LoadedNetworks.begin();
307 while (it != m_LoadedNetworks.end())
309 auto& loadedNetwork = it->second;
310 loadedNetwork->SendNetworkStructure(profilingService);
324 : m_NetworkIdCounter(0)
326 m_ProfilingService = arm::pipe::IProfilingService::CreateProfilingService(
327 arm::pipe::MAX_ARMNN_COUNTER,
329 arm::pipe::ARMNN_SOFTWARE_INFO,
330 arm::pipe::ARMNN_SOFTWARE_VERSION,
331 arm::pipe::ARMNN_HARDWARE_VERSION,
338 "It is not possible to enable timeline reporting without profiling being enabled");
340 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
357 auto backend = factoryFun();
361 customAllocatorMapIterator->second ==
nullptr)
363 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
368 throw armnn::Exception(
"Allocator associated with id " +
id.Get() +
" is null");
376 BackendCapability protectedContentCapability {
"ProtectedContentAllocation",
true};
383 <<
" is not registered as does not support protected content allocation.";
391 if (customAllocatorMapIterator->second->GetMemorySourceType()
394 if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
398 <<
" reported an error when entering protected mode. Backend won't be"
399 <<
" used. ErrorMsg: " << err;
404 m_AllocatorsAddedByThisRuntime.emplace(
id);
408 ARMNN_LOG(
error) <<
"The CustomAllocator provided with the runtime options doesn't support "
409 "protected memory. Protected mode can't be activated. The backend "
411 <<
" is not going to be used. MemorySource must be MemorySource::DmaBufProtected";
417 ARMNN_LOG(
error) <<
"Protected mode can't be activated for backend: "
419 <<
" no custom allocator was provided to the runtime options.";
429 if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
433 <<
" reported an error when trying to use the provided custom allocator."
434 " Backend won't be used."
435 <<
" ErrorMsg: " << err;
440 m_AllocatorsAddedByThisRuntime.emplace(
id);
453 id, customMemoryOptimizerStrategyMapIterator->second);
456 << customMemoryOptimizerStrategyMapIterator->second->GetName()
457 <<
" set for the backend " <<
id <<
".";
463 std::string memoryOptimizerStrategyName =
"";
466 if (name ==
"MemoryOptimizerStrategy")
468 memoryOptimizerStrategyName = ParseStringBackendOption(value,
"");
471 if (memoryOptimizerStrategyName !=
"")
473 std::shared_ptr<IMemoryOptimizerStrategy> strategy =
478 ARMNN_LOG(
warning) <<
"MemoryOptimizerStrategy: " << memoryOptimizerStrategyName
479 <<
" was not found.";
491 << memoryOptimizerStrategyName <<
" set for the backend " <<
id <<
".";
497 <<
" does not have multi-axis packing capability and cannot support"
498 <<
"MemoryOptimizerStrategy: " << memoryOptimizerStrategyName <<
".";
504 auto context = backend->CreateBackendContext(options);
510 m_BackendContexts.emplace(std::make_pair(
id, std::move(context)));
512 supportedBackends.emplace(
id);
514 unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
515 arm::pipe::IBackendProfiling::CreateBackendProfiling(
517 *m_ProfilingService.get(),
521 auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
523 if (profilingContext)
526 m_ProfilingService->AddBackendProfilingContext(
id, profilingContext);
537 m_ProfilingService->ConfigureProfilingService(
539 if (options.m_ProfilingOptions.m_EnableProfiling)
542 m_ProfilingService->WaitForProfilingServiceActivation(3000);
545 m_DeviceSpec.AddSupportedBackends(supportedBackends);
547 ARMNN_LOG(info) <<
"Initialization time: " << std::setprecision(2)
551 RuntimeImpl::~RuntimeImpl()
554 std::vector<int> networkIDs;
558 std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(),
559 std::back_inserter(networkIDs),
560 [](
const auto &pair) { return pair.first; });
562 catch (
const std::exception& e)
567 std::cerr <<
"WARNING: An error has occurred when getting the IDs of the networks to unload: " << e.what()
568 <<
"\nSome of the loaded networks may not be unloaded" << std::endl;
573 for (
auto networkID : networkIDs)
581 catch (
const std::exception& e)
586 std::cerr <<
"WARNING: An error has occurred when unloading network " << networkID <<
": " << e.what()
590 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
595 m_BackendContexts.clear();
601 for_each(m_AllocatorsAddedByThisRuntime.begin(), m_AllocatorsAddedByThisRuntime.end(),
602 [](
BackendId id) {BackendRegistryInstance().DeregisterAllocator(id);});
610 #if !defined(ARMNN_DISABLE_THREADS)
611 std::lock_guard<std::mutex> lockGuard(m_Mutex);
613 return m_LoadedNetworks.at(networkId).get();
629 return GetLoadedNetworkPtr(networkId)->
ImportInputs(inputTensors, forceImportMemorySource);
635 return GetLoadedNetworkPtr(networkId)->
ImportOutputs(outputTensors, forceImportMemorySource);
650 std::vector<ImportedInputId> preImportedInputIds,
651 std::vector<ImportedOutputId> preImportedOutputIds)
655 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
659 ARMNN_LOG(
error) <<
"A Network with an id of " << networkId <<
" does not exist.";
664 ARMNN_LOG(
error) <<
"Network " << networkId <<
" is async enabled.";
671 static thread_local
NetworkId lastId = networkId;
672 if (lastId != networkId)
681 auto status = loadedNetwork->
EnqueueWorkload(inputTensors, outputTensors,
682 preImportedInputIds, preImportedOutputIds);
686 if (!preImportedInputIds.empty() || !preImportedOutputIds.empty())
689 for (
auto&& context : m_BackendContexts)
691 context.second->AfterEnqueueWorkload(networkId);
694 ARMNN_LOG(
info) <<
"Execution time: " << std::setprecision(2)
702 std::vector<ImportedInputId> preImportedInputs,
703 std::vector<ImportedOutputId> preImportedOutputs)
708 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
712 ARMNN_LOG(
error) <<
"A Network with an id of " << networkId <<
" does not exist.";
717 ARMNN_LOG(
error) <<
"Attempting execute " << networkId <<
" when it is not async enabled.";
724 auto status = loadedNetwork->
Execute(inputTensors,
730 ARMNN_LOG(
info) <<
"Execution time: " << std::setprecision(2)
740 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
744 ARMNN_LOG(
error) <<
"A Network with an id of " << networkId <<
" does not exist.";
749 ARMNN_LOG(
error) <<
"Network " << networkId <<
" is not async enabled.";
756 static thread_local
NetworkId lastId = networkId;
757 if (lastId != networkId)
771 LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
775 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
776 void RuntimeImpl::LoadDynamicBackends(
const std::string& overrideBackendPath)