ArmNN
 24.08
RuntimeImpl Struct Referencefinal

#include <Runtime.hpp>

Inheritance diagram for RuntimeImpl:
[legend]
Collaboration diagram for RuntimeImpl:
[legend]

Public Member Functions

Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network)
 Loads a complete network into the Runtime. More...
 
Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage)
 Load a complete network into the IRuntime. More...
 
Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage, const INetworkProperties &networkProperties)
 
armnn::TensorInfo GetInputTensorInfo (NetworkId networkId, LayerBindingId layerId) const
 
armnn::TensorInfo GetOutputTensorInfo (NetworkId networkId, LayerBindingId layerId) const
 
std::vector< ImportedInputIdImportInputs (NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource)
 
std::vector< ImportedOutputIdImportOutputs (NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource)
 
void ClearImportedInputs (NetworkId networkId, const std::vector< ImportedInputId > inputIds)
 
void ClearImportedOutputs (NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
 
Status EnqueueWorkload (NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
 
Status Execute (IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputs, std::vector< ImportedOutputId > preImportedOutputs)
 This is an experimental function. More...
 
Status UnloadNetwork (NetworkId networkId)
 Unloads a network from the Runtime. More...
 
const IDeviceSpecGetDeviceSpec () const
 
const std::shared_ptr< IProfilerGetProfiler (NetworkId networkId) const
 Gets the profiler corresponding to the given network id. More...
 
std::unique_ptr< IWorkingMemHandleCreateWorkingMemHandle (NetworkId networkId)
 Create a new unique WorkingMemHandle object. More...
 
void RegisterDebugCallback (NetworkId networkId, const DebugCallbackFunction &func)
 Registers a callback function to debug layers performing custom computations on intermediate tensors. More...
 
 RuntimeImpl (const IRuntime::CreationOptions &options)
 Creates a runtime for workload execution. More...
 
 ~RuntimeImpl ()
 
void ReportStructure (arm::pipe::IProfilingService &profilingService) override
 
void InitialiseProfilingService (arm::pipe::IProfilingService &profilingService) override
 

Friends

void RuntimeLoadedNetworksReserve (RuntimeImpl *runtime)
 
arm::pipe::IProfilingService & GetProfilingService (RuntimeImpl *runtime)
 

Detailed Description

Definition at line 30 of file Runtime.hpp.

Constructor & Destructor Documentation

◆ RuntimeImpl()

RuntimeImpl ( const IRuntime::CreationOptions options)

Creates a runtime for workload execution.

Definition at line 323 of file Runtime.cpp.

324  : m_NetworkIdCounter(0)
325 {
326  m_ProfilingService = arm::pipe::IProfilingService::CreateProfilingService(
327  arm::pipe::MAX_ARMNN_COUNTER,
328  *this,
329  arm::pipe::ARMNN_SOFTWARE_INFO,
330  arm::pipe::ARMNN_SOFTWARE_VERSION,
331  arm::pipe::ARMNN_HARDWARE_VERSION,
332  *this);
333  const auto start_time = armnn::GetTimeNow();
334  ARMNN_LOG(info) << "ArmNN v" << ARMNN_VERSION;
336  {
337  throw RuntimeException(
338  "It is not possible to enable timeline reporting without profiling being enabled");
339  }
340 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
341  // Load any available/compatible dynamic backend before the runtime
342  // goes through the backend registry
343  LoadDynamicBackends(options.m_DynamicBackendsPath);
344 #endif
345  armnn::BackendIdSet supportedBackends;
346  for (const auto& id : BackendRegistryInstance().GetBackendIds())
347  {
348  // Store backend contexts for the supported ones
349  try {
350  auto factoryFun = BackendRegistryInstance().GetFactory(id);
351 
352  if (!factoryFun)
353  {
354  throw armnn::NullPointerException("Factory Function should not be null.");
355  }
356 
357  auto backend = factoryFun();
358 
359  auto customAllocatorMapIterator = options.m_CustomAllocatorMap.find(id);
360  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end() &&
361  customAllocatorMapIterator->second == nullptr)
362  {
363 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
364  // We need to manually clean up the dynamic backends before throwing an exception.
366  m_DeviceSpec.ClearDynamicBackends();
367 #endif
368  throw armnn::Exception("Allocator associated with id " + id.Get() + " is null");
369  }
370 
371  // If the runtime is created in protected mode only add backends that support this mode
372  if (options.m_ProtectedMode)
373  {
374  // check if backend supports ProtectedMode
376  BackendCapability protectedContentCapability {"ProtectedContentAllocation", true};
377  if (!HasMatchingCapability(protectedContentCapability, id))
378  {
379  // Protected Content Allocation is not supported by the backend
380  // backend should not be registered
381  ARMNN_LOG(warning) << "Backend "
382  << id
383  << " is not registered as does not support protected content allocation.";
384  continue;
385  }
386  // The user is responsible to provide a custom memory allocator which allows to allocate
387  // protected memory
388  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
389  {
390  std::string err;
391  if (customAllocatorMapIterator->second->GetMemorySourceType()
393  {
394  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
395  {
396  ARMNN_LOG(error) << "The backend "
397  << id
398  << " reported an error when entering protected mode. Backend won't be"
399  << " used. ErrorMsg: " << err;
400  continue;
401  }
402  // No errors so register the Custom Allocator with the BackendRegistry
403  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
404  m_AllocatorsAddedByThisRuntime.emplace(id);
405  }
406  else
407  {
408  ARMNN_LOG(error) << "The CustomAllocator provided with the runtime options doesn't support "
409  "protected memory. Protected mode can't be activated. The backend "
410  << id
411  << " is not going to be used. MemorySource must be MemorySource::DmaBufProtected";
412  continue;
413  }
414  }
415  else
416  {
417  ARMNN_LOG(error) << "Protected mode can't be activated for backend: "
418  << id
419  << " no custom allocator was provided to the runtime options.";
420  continue;
421  }
422  }
423  else
424  {
425  // If a custom memory allocator is provided make the backend use that instead of the default
426  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
427  {
428  std::string err;
429  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
430  {
431  ARMNN_LOG(error) << "The backend "
432  << id
433  << " reported an error when trying to use the provided custom allocator."
434  " Backend won't be used."
435  << " ErrorMsg: " << err;
436  continue;
437  }
438  // No errors so register the Custom Allocator with the BackendRegistry
439  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
440  m_AllocatorsAddedByThisRuntime.emplace(id);
441  }
442  }
443 
444  // check if custom memory optimizer strategy map is set
445  if (!options.m_MemoryOptimizerStrategyMap.empty())
446  {
447  auto customMemoryOptimizerStrategyMapIterator = options.m_MemoryOptimizerStrategyMap.find(id);
448  // if a memory optimizer strategy is provided make the backend use that instead of the default
449  if (customMemoryOptimizerStrategyMapIterator != options.m_MemoryOptimizerStrategyMap.end())
450  {
451  // no errors.. register the memory optimizer strategy with the BackendRegistry
453  id, customMemoryOptimizerStrategyMapIterator->second);
454 
455  ARMNN_LOG(info) << "MemoryOptimizerStrategy "
456  << customMemoryOptimizerStrategyMapIterator->second->GetName()
457  << " set for the backend " << id << ".";
458  }
459  }
460  else
461  {
462  // check if to use one of the existing memory optimizer strategies is set
463  std::string memoryOptimizerStrategyName = "";
464  ParseOptions(options.m_BackendOptions, id, [&](std::string name, const BackendOptions::Var& value)
465  {
466  if (name == "MemoryOptimizerStrategy")
467  {
468  memoryOptimizerStrategyName = ParseStringBackendOption(value, "");
469  }
470  });
471  if (memoryOptimizerStrategyName != "")
472  {
473  std::shared_ptr<IMemoryOptimizerStrategy> strategy =
474  GetMemoryOptimizerStrategy(memoryOptimizerStrategyName);
475 
476  if (!strategy)
477  {
478  ARMNN_LOG(warning) << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName
479  << " was not found.";
480  }
481  else
482  {
484  auto strategyType = GetMemBlockStrategyTypeName(strategy->GetMemBlockStrategyType());
485  BackendCapability memOptimizeStrategyCapability {strategyType, true};
486  if (HasMatchingCapability(memOptimizeStrategyCapability, id))
487  {
489 
490  ARMNN_LOG(info) << "MemoryOptimizerStrategy: "
491  << memoryOptimizerStrategyName << " set for the backend " << id << ".";
492  }
493  else
494  {
495  ARMNN_LOG(warning) << "Backend "
496  << id
497  << " does not have multi-axis packing capability and cannot support"
498  << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName << ".";
499  }
500  }
501  }
502  }
503 
504  auto context = backend->CreateBackendContext(options);
505 
506  // backends are allowed to return nullptrs if they
507  // don't wish to create a backend specific context
508  if (context)
509  {
510  m_BackendContexts.emplace(std::make_pair(id, std::move(context)));
511  }
512  supportedBackends.emplace(id);
513 
514  unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
515  arm::pipe::IBackendProfiling::CreateBackendProfiling(
517  *m_ProfilingService.get(),
518  id.Get());
519 
520  // Backends may also provide a profiling context. Ask for it now.
521  auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
522  // Backends that don't support profiling will return a null profiling context.
523  if (profilingContext)
524  {
525  // Pass the context onto the profiling service.
526  m_ProfilingService->AddBackendProfilingContext(id, profilingContext);
527  }
528  }
529  catch (const BackendUnavailableException&)
530  {
531  // Ignore backends which are unavailable
532  }
533  }
534 
535  BackendRegistryInstance().SetProfilingService(*m_ProfilingService.get());
536  // pass configuration info to the profiling service
537  m_ProfilingService->ConfigureProfilingService(
540  {
541  // try to wait for the profiling service to initialise
542  m_ProfilingService->WaitForProfilingServiceActivation(3000);
543  }
544 
545  m_DeviceSpec.AddSupportedBackends(supportedBackends);
546 
547  ARMNN_LOG(info) << "Initialization time: " << std::setprecision(2)
548  << std::fixed << armnn::GetTimeDuration(start_time).count() << " ms.";
549 }

References ARMNN_LOG, ARMNN_VERSION, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), arm::pipe::ConvertExternalProfilingOptions(), DynamicBackendUtils::DeregisterDynamicBackends(), armnn::DmaBufProtected, armnn::error, DeviceSpec::GetDynamicBackends(), BackendRegistry::GetFactory(), armnn::GetMemBlockStrategyTypeName(), armnn::GetMemoryOptimizerStrategy(), armnn::GetTimeNow(), armnn::HasMatchingCapability(), armnn::info, IRuntime::CreationOptions::m_BackendOptions, IRuntime::CreationOptions::m_CustomAllocatorMap, IRuntime::CreationOptions::m_DynamicBackendsPath, IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling, IRuntime::CreationOptions::m_MemoryOptimizerStrategyMap, IRuntime::CreationOptions::m_ProfilingOptions, IRuntime::CreationOptions::m_ProtectedMode, IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled, armnn::ParseOptions(), BackendRegistry::RegisterAllocator(), BackendRegistry::RegisterMemoryOptimizerStrategy(), and armnn::warning.

◆ ~RuntimeImpl()

Definition at line 551 of file Runtime.cpp.

552 {
553  const auto startTime = armnn::GetTimeNow();
554  std::vector<int> networkIDs;
555  try
556  {
557  // Coverity fix: The following code may throw an exception of type std::length_error.
558  std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(),
559  std::back_inserter(networkIDs),
560  [](const auto &pair) { return pair.first; });
561  }
562  catch (const std::exception& e)
563  {
564  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
565  // exception of type std::length_error.
566  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
567  std::cerr << "WARNING: An error has occurred when getting the IDs of the networks to unload: " << e.what()
568  << "\nSome of the loaded networks may not be unloaded" << std::endl;
569  }
570  // We then proceed to unload all the networks which IDs have been appended to the list
571  // up to the point the exception was thrown (if any).
572 
573  for (auto networkID : networkIDs)
574  {
575  try
576  {
577  // Coverity fix: UnloadNetwork() may throw an exception of type std::length_error,
578  // boost::log::v2s_mt_posix::odr_violation or boost::log::v2s_mt_posix::system_error
579  UnloadNetwork(networkID);
580  }
581  catch (const std::exception& e)
582  {
583  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
584  // exception of type std::length_error.
585  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
586  std::cerr << "WARNING: An error has occurred when unloading network " << networkID << ": " << e.what()
587  << std::endl;
588  }
589  }
590 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
591  // Clear all dynamic backends.
593  m_DeviceSpec.ClearDynamicBackends();
594 #endif
595  m_BackendContexts.clear();
596 
598  // Remove custom allocators that this runtime has added.
599  // Note: that as backends can be per process and there can be many instances of a runtime in a process an allocator
600  // may have been overwritten by another runtime.
601  for_each(m_AllocatorsAddedByThisRuntime.begin(), m_AllocatorsAddedByThisRuntime.end(),
602  [](BackendId id) {BackendRegistryInstance().DeregisterAllocator(id);});
603 
604  ARMNN_LOG(info) << "Shutdown time: " << std::setprecision(2)
605  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
606 }

References ARMNN_LOG, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), DynamicBackendUtils::DeregisterDynamicBackends(), DeviceSpec::GetDynamicBackends(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, BackendRegistry::SetProfilingService(), and RuntimeImpl::UnloadNetwork().

Member Function Documentation

◆ ClearImportedInputs()

void ClearImportedInputs ( NetworkId  networkId,
const std::vector< ImportedInputId inputIds 
)

Definition at line 638 of file Runtime.cpp.

639 {
640  return GetLoadedNetworkPtr(networkId)->ClearImportedInputs(inputIds);
641 }

References LoadedNetwork::ClearImportedInputs().

◆ ClearImportedOutputs()

void ClearImportedOutputs ( NetworkId  networkId,
const std::vector< ImportedOutputId outputIds 
)

Definition at line 642 of file Runtime.cpp.

643 {
644  return GetLoadedNetworkPtr(networkId)->ClearImportedOutputs(outputIds);
645 }

References LoadedNetwork::ClearImportedOutputs().

◆ CreateWorkingMemHandle()

std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle ( NetworkId  networkId)

Create a new unique WorkingMemHandle object.

Create multiple handles if you wish to have overlapped Execution by calling this function from different threads.

Definition at line 738 of file Runtime.cpp.

739 {
740  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
741 
742  if (!loadedNetwork)
743  {
744  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
745  return nullptr;
746  }
747  if (!loadedNetwork->IsAsyncEnabled())
748  {
749  ARMNN_LOG(error) << "Network " << networkId << " is not async enabled.";
750  return nullptr;
751  }
753 
754  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "CreateWorkingMemHandle");
755 
756  static thread_local NetworkId lastId = networkId;
757  if (lastId != networkId)
758  {
759  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
760  {
761  network->FreeWorkingMemory();
762  });
763  }
764  lastId=networkId;
765 
766  return loadedNetwork->CreateWorkingMemHandle(networkId);
767 }

References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, LoadedNetwork::CreateWorkingMemHandle(), armnn::error, LoadedNetwork::FreeWorkingMemory(), ProfilerManager::GetInstance(), LoadedNetwork::GetProfiler(), LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.

◆ EnqueueWorkload()

Status EnqueueWorkload ( NetworkId  networkId,
const InputTensors inputTensors,
const OutputTensors outputTensors,
std::vector< ImportedInputId preImportedInputIds = {},
std::vector< ImportedOutputId preImportedOutputIds = {} 
)

Definition at line 647 of file Runtime.cpp.

652 {
653  const auto startTime = armnn::GetTimeNow();
654 
655  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
656 
657  if (!loadedNetwork)
658  {
659  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
660  return Status::Failure;
661  }
662  if (loadedNetwork->IsAsyncEnabled())
663  {
664  ARMNN_LOG(error) << "Network " << networkId << " is async enabled.";
665  return Status::Failure;
666  }
668 
670 
671  static thread_local NetworkId lastId = networkId;
672  if (lastId != networkId)
673  {
674  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
675  {
676  network->FreeWorkingMemory();
677  });
678  }
679  lastId=networkId;
680 
681  auto status = loadedNetwork->EnqueueWorkload(inputTensors, outputTensors,
682  preImportedInputIds, preImportedOutputIds);
683 
684 
685  // Check if we imported, if not there's no need to call the After EnqueueWorkload events
686  if (!preImportedInputIds.empty() || !preImportedOutputIds.empty())
687  {
688  // Call After EnqueueWorkload events
689  for (auto&& context : m_BackendContexts)
690  {
691  context.second->AfterEnqueueWorkload(networkId);
692  }
693  }
694  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
695  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
696  return status;
697 }

References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, LoadedNetwork::EnqueueWorkload(), armnn::error, armnn::Failure, LoadedNetwork::FreeWorkingMemory(), ProfilerManager::GetInstance(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.

◆ Execute()

Status Execute ( IWorkingMemHandle workingMemHandle,
const InputTensors inputTensors,
const OutputTensors outputTensors,
std::vector< ImportedInputId preImportedInputs,
std::vector< ImportedOutputId preImportedOutputs 
)

This is an experimental function.

Evaluates a network using input in inputTensors and outputs filled into outputTensors. This function performs a thread safe execution of the network. Returns once execution is complete. Will block until this and any other thread using the same workingMem object completes.

Definition at line 699 of file Runtime.cpp.

704 {
705  const auto startTime = armnn::GetTimeNow();
706 
707  NetworkId networkId = iWorkingMemHandle.GetNetworkId();
708  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
709 
710  if (!loadedNetwork)
711  {
712  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
713  return Status::Failure;
714  }
715  if (!loadedNetwork->IsAsyncEnabled())
716  {
717  ARMNN_LOG(error) << "Attempting execute " << networkId << " when it is not async enabled.";
718  return Status::Failure;
719  }
721 
723 
724  auto status = loadedNetwork->Execute(inputTensors,
725  outputTensors,
726  iWorkingMemHandle,
727  preImportedInputs,
728  preImportedOutputs);
729 
730  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
731  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
732 
733  return status;
734 }

References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, armnn::error, LoadedNetwork::Execute(), armnn::Failure, ProfilerManager::GetInstance(), IWorkingMemHandle::GetNetworkId(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.

◆ GetDeviceSpec()

const IDeviceSpec& GetDeviceSpec ( ) const
inline

Definition at line 90 of file Runtime.hpp.

90 { return m_DeviceSpec; }

◆ GetInputTensorInfo()

TensorInfo GetInputTensorInfo ( NetworkId  networkId,
LayerBindingId  layerId 
) const

Definition at line 616 of file Runtime.cpp.

617 {
618  return GetLoadedNetworkPtr(networkId)->GetInputTensorInfo(layerId);
619 }

References LoadedNetwork::GetInputTensorInfo().

◆ GetOutputTensorInfo()

TensorInfo GetOutputTensorInfo ( NetworkId  networkId,
LayerBindingId  layerId 
) const

Definition at line 621 of file Runtime.cpp.

622 {
623  return GetLoadedNetworkPtr(networkId)->GetOutputTensorInfo(layerId);
624 }

References LoadedNetwork::GetOutputTensorInfo().

◆ GetProfiler()

const std::shared_ptr< IProfiler > GetProfiler ( NetworkId  networkId) const

Gets the profiler corresponding to the given network id.

Parameters
networkIdThe id of the network for which to get the profile.
Returns
A pointer to the requested profiler, or nullptr if not found.

Definition at line 290 of file Runtime.cpp.

291 {
292  auto it = m_LoadedNetworks.find(networkId);
293  if (it != m_LoadedNetworks.end())
294  {
295  auto& loadedNetwork = it->second;
296  return loadedNetwork->GetProfiler();
297  }
298 
299  return nullptr;
300 }

◆ ImportInputs()

std::vector< ImportedInputId > ImportInputs ( NetworkId  networkId,
const InputTensors inputTensors,
MemorySource  forceImportMemorySource 
)

Definition at line 626 of file Runtime.cpp.

628 {
629  return GetLoadedNetworkPtr(networkId)->ImportInputs(inputTensors, forceImportMemorySource);
630 }

References LoadedNetwork::ImportInputs().

◆ ImportOutputs()

std::vector< ImportedOutputId > ImportOutputs ( NetworkId  networkId,
const OutputTensors outputTensors,
MemorySource  forceImportMemorySource 
)

Definition at line 632 of file Runtime.cpp.

634 {
635  return GetLoadedNetworkPtr(networkId)->ImportOutputs(outputTensors, forceImportMemorySource);
636 }

References LoadedNetwork::ImportOutputs().

◆ InitialiseProfilingService()

void InitialiseProfilingService ( arm::pipe::IProfilingService &  profilingService)
override

Definition at line 317 of file Runtime.cpp.

318 {
320  initialiser.InitialiseProfilingService(profilingService);
321 }

References ArmNNProfilingServiceInitialiser::InitialiseProfilingService().

◆ LoadNetwork() [1/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network 
)

Loads a complete network into the Runtime.

Parameters
[out]networkIdOut- Unique identifier for the network is returned in this reference.
[in]network- Complete network to load into the Runtime. The runtime takes ownership of the network once passed in.
Returns
armnn::Status

Definition at line 166 of file Runtime.cpp.

167 {
168  std::string ignoredErrorMessage;
169  return LoadNetwork(networkIdOut, std::move(inNetwork), ignoredErrorMessage);
170 }

Referenced by RuntimeImpl::LoadNetwork().

◆ LoadNetwork() [2/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network,
std::string &  errorMessage 
)

Load a complete network into the IRuntime.

Parameters
[out]networkIdOutUnique identifier for the network is returned in this reference.
[in]networkComplete network to load into the IRuntime.
[out]errorMessageError message if there were any errors. The runtime takes ownership of the network once passed in.
Returns
armnn::Status

Definition at line 172 of file Runtime.cpp.

175 {
176  INetworkProperties networkProperties(
178  return LoadNetwork(networkIdOut, std::move(inNetwork), errorMessage, networkProperties);
179 }

References RuntimeImpl::LoadNetwork(), and armnn::Undefined.

◆ LoadNetwork() [3/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network,
std::string &  errorMessage,
const INetworkProperties networkProperties 
)

Definition at line 181 of file Runtime.cpp.

185 {
186  // Register the profiler
187  auto profiler = inNetwork->GetProfiler();
189 
190  IOptimizedNetwork* rawNetwork = inNetwork.release();
191 
192  networkIdOut = GenerateNetworkId();
193 
194  for (auto&& context : m_BackendContexts)
195  {
196  context.second->BeforeLoadNetwork(networkIdOut);
197  }
198 
199  unique_ptr<LoadedNetwork> loadedNetwork = LoadedNetwork::MakeLoadedNetwork(
200  std::unique_ptr<IOptimizedNetwork>(rawNetwork),
201  errorMessage,
202  networkProperties,
203  m_ProfilingService.get());
204 
205  if (!loadedNetwork)
206  {
207  return Status::Failure;
208  }
209 
210  {
211 #if !defined(ARMNN_DISABLE_THREADS)
212  std::lock_guard<std::mutex> lockGuard(m_Mutex);
213 #endif
214 
215  // Stores the network
216  m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork);
217  }
218 
219  for (auto&& context : m_BackendContexts)
220  {
221  context.second->AfterLoadNetwork(networkIdOut);
222  }
223 
224  if (m_ProfilingService->IsProfilingEnabled())
225  {
226  m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_LOADS);
227  }
228 
229  return Status::Success;
230 }

References armnn::Failure, ProfilerManager::GetInstance(), LoadedNetwork::MakeLoadedNetwork(), ProfilerManager::RegisterProfiler(), and armnn::Success.

◆ RegisterDebugCallback()

void RegisterDebugCallback ( NetworkId  networkId,
const DebugCallbackFunction func 
)

Registers a callback function to debug layers performing custom computations on intermediate tensors.

Parameters
networkIdThe id of the network to register the callback.
funccallback function to pass to the debug layer.

Definition at line 769 of file Runtime.cpp.

770 {
771  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
772  loadedNetwork->RegisterDebugCallback(func);
773 }

References LoadedNetwork::RegisterDebugCallback().

◆ ReportStructure()

void ReportStructure ( arm::pipe::IProfilingService &  profilingService)
override

Definition at line 302 of file Runtime.cpp.

303 {
304  if (profilingService.IsProfilingEnabled())
305  {
306  LoadedNetworks::iterator it = m_LoadedNetworks.begin();
307  while (it != m_LoadedNetworks.end())
308  {
309  auto& loadedNetwork = it->second;
310  loadedNetwork->SendNetworkStructure(profilingService);
311  // Increment the Iterator to point to next entry
312  it++;
313  }
314  }
315 }

◆ UnloadNetwork()

Status UnloadNetwork ( NetworkId  networkId)

Unloads a network from the Runtime.

At the moment this only removes the network from the m_Impl->m_Network. This might need more work in the future to be AndroidNN compliant.

Parameters
[in]networkIdUnique identifier for the network to be unloaded. Generated in LoadNetwork().
Returns
armnn::Status

Definition at line 232 of file Runtime.cpp.

233 {
234  bool unloadOk = true;
235  for (auto&& context : m_BackendContexts)
236  {
237  unloadOk &= context.second->BeforeUnloadNetwork(networkId);
238  }
239 
240  if (!unloadOk)
241  {
242  ARMNN_LOG(warning) << "RuntimeImpl::UnloadNetwork(): failed to unload "
243  "network with ID:" << networkId << " because BeforeUnloadNetwork failed";
244  return Status::Failure;
245  }
246 
247  std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
248  arm::pipe::TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService.get());
249  {
250 #if !defined(ARMNN_DISABLE_THREADS)
251  std::lock_guard<std::mutex> lockGuard(m_Mutex);
252 #endif
253 
254  // If timeline recording is on mark the Network end of life
255  if (timelineUtils)
256  {
257  auto search = m_LoadedNetworks.find(networkId);
258  if (search != m_LoadedNetworks.end())
259  {
260  arm::pipe::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
261  timelineUtils->RecordEvent(networkGuid,
262  arm::pipe::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
263  }
264  }
265 
266  if (m_LoadedNetworks.erase(networkId) == 0)
267  {
268  ARMNN_LOG(warning) << "WARNING: RuntimeImpl::UnloadNetwork(): " << networkId << " not found!";
269  return Status::Failure;
270  }
271 
272  if (m_ProfilingService->IsProfilingEnabled())
273  {
274  m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
275  }
276  }
277 
278  for (auto&& context : m_BackendContexts)
279  {
280  context.second->AfterUnloadNetwork(networkId);
281  }
282 
283  // Unregister the profiler
285 
286  ARMNN_LOG(debug) << "RuntimeImpl::UnloadNetwork(): Unloaded network with ID: " << networkId;
287  return Status::Success;
288 }

References ARMNN_LOG, armnn::debug, armnn::Failure, ProfilerManager::GetInstance(), ProfilerManager::RegisterProfiler(), armnn::Success, and armnn::warning.

Referenced by RuntimeImpl::~RuntimeImpl().

Friends And Related Function Documentation

◆ GetProfilingService

arm::pipe::IProfilingService& GetProfilingService ( RuntimeImpl runtime)
friend

Definition at line 59 of file TestUtils.cpp.

60 {
61  return *(runtime->m_ProfilingService.get());
62 }

◆ RuntimeLoadedNetworksReserve

void RuntimeLoadedNetworksReserve ( RuntimeImpl runtime)
friend

The documentation for this struct was generated from the following files:
armnn::Compute::Undefined
@ Undefined
armnn::DeviceSpec::AddSupportedBackends
void AddSupportedBackends(const BackendIdSet &backendIds, bool isDynamic=false)
Definition: DeviceSpec.hpp:30
armnn::ArmNNProfilingServiceInitialiser
Definition: ArmNNProfilingServiceInitialiser.hpp:14
armnn::LoadedNetwork::GetOutputTensorInfo
TensorInfo GetOutputTensorInfo(LayerBindingId layerId) const
Definition: LoadedNetwork.cpp:733
armnn::ProfilerManager::RegisterProfiler
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:609
armnn::LoadedNetwork::EnqueueWorkload
Status EnqueueWorkload(const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Single thread execution of the loaded network.
Definition: LoadedNetwork.cpp:872
armnn::LoadedNetwork::RegisterDebugCallback
void RegisterDebugCallback(const DebugCallbackFunction &func)
Definition: LoadedNetwork.cpp:2296
armnn::DeviceSpec::GetDynamicBackends
const BackendIdSet & GetDynamicBackends() const
Definition: DeviceSpec.hpp:48
armnn::BackendIdSet
std::unordered_set< BackendId > BackendIdSet
Definition: BackendId.hpp:193
arm::pipe::ConvertExternalProfilingOptions
ProfilingOptions ConvertExternalProfilingOptions(const armnn::IRuntime::CreationOptions::ExternalProfilingOptions &options)
Definition: ProfilingOptionsConverter.cpp:17
armnn::IRuntime::CreationOptions::m_DynamicBackendsPath
std::string m_DynamicBackendsPath
Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive Only a...
Definition: IRuntime.hpp:99
armnn::GetTimeNow
std::chrono::high_resolution_clock::time_point GetTimeNow()
Definition: Timer.hpp:14
armnn::MemorySource::DmaBufProtected
@ DmaBufProtected
armnn::BackendCapability
BackendCapability
BackendCapability class.
Definition: Types.hpp:286
armnn::IOptimizedNetwork
Definition: INetwork.hpp:908
armnn::LoadedNetwork
Definition: LoadedNetwork.hpp:42
armnn::BackendOptions::BackendOption
Definition: BackendOptions.hpp:215
armnn::BackendRegistry::RegisterMemoryOptimizerStrategy
void RegisterMemoryOptimizerStrategy(const BackendId &id, std::shared_ptr< IMemoryOptimizerStrategy > strategy)
Definition: BackendRegistry.cpp:133
armnn::RuntimeImpl::LoadNetwork
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
Definition: Runtime.cpp:166
armnn::LoadedNetwork::ImportInputs
std::vector< ImportedInputId > ImportInputs(const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
Definition: LoadedNetwork.cpp:1486
armnn::IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled
bool m_TimelineEnabled
Indicates whether external timeline profiling is enabled or not.
Definition: IRuntime.hpp:141
ARMNN_LOG
#define ARMNN_LOG(severity)
Definition: Logging.hpp:212
armnn::RuntimeImpl::UnloadNetwork
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
Definition: Runtime.cpp:232
armnn::DeviceSpec::ClearDynamicBackends
void ClearDynamicBackends()
Definition: DeviceSpec.hpp:39
armnn::BackendRegistry::SetProfilingService
void SetProfilingService(armnn::Optional< arm::pipe::IProfilingService & > profilingService)
Definition: BackendRegistry.cpp:107
armnn::NetworkId
int NetworkId
Definition: IRuntime.hpp:35
armnn::IRuntime::CreationOptions::m_BackendOptions
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
Definition: IRuntime.hpp:190
ARMNN_SCOPED_PROFILING_EVENT
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
armnn::MemorySource::Undefined
@ Undefined
armnn::EmptyOptional
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
armnn::GetMemoryOptimizerStrategy
std::unique_ptr< IMemoryOptimizerStrategy > GetMemoryOptimizerStrategy(const std::string &strategyName)
Definition: MemoryOptimizerStrategyLibrary.hpp:36
armnn::LoadedNetwork::ImportOutputs
std::vector< ImportedOutputId > ImportOutputs(const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
Definition: LoadedNetwork.cpp:1632
armnn::BackendRegistryInstance
BackendRegistry & BackendRegistryInstance()
Definition: BackendRegistry.cpp:15
armnn::Status::Success
@ Success
armnn::INetworkProperties
Definition: IRuntime.hpp:43
armnn::Exception
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
armnn::RuntimeException
Definition: Exceptions.hpp:120
armnn::LoadedNetwork::GetInputTensorInfo
TensorInfo GetInputTensorInfo(LayerBindingId layerId) const
Definition: LoadedNetwork.cpp:715
armnn::BackendRegistry::GetFactory
FactoryFunction GetFactory(const BackendId &id) const
Definition: BackendRegistry.cpp:57
armnn::ParseOptions
void ParseOptions(const std::vector< BackendOptions > &options, BackendId backend, F f)
Definition: BackendOptions.hpp:297
armnn::IRuntime::CreationOptions::m_ProtectedMode
bool m_ProtectedMode
Setting this flag will allow the user to create the Runtime in protected mode.
Definition: IRuntime.hpp:106
armnn::DynamicBackendUtils::DeregisterDynamicBackends
static void DeregisterDynamicBackends(const BackendIdSet &dynamicBackends)
Definition: DynamicBackendUtils.cpp:320
ARMNN_VERSION
#define ARMNN_VERSION
ARMNN_VERSION: "X.Y.Z" where: X = Major version number Y = Minor version number Z = Patch version num...
Definition: Version.hpp:22
armnn::BackendRegistry::RegisterAllocator
void RegisterAllocator(const BackendId &id, std::shared_ptr< ICustomAllocator > alloc)
Definition: BackendRegistry.cpp:112
armnn::IRuntime::CreationOptions::m_ProfilingOptions
ExternalProfilingOptions m_ProfilingOptions
Definition: IRuntime.hpp:154
armnn::LoadedNetwork::MakeLoadedNetwork
static std::unique_ptr< LoadedNetwork > MakeLoadedNetwork(std::unique_ptr< IOptimizedNetwork > net, std::string &errorMessage, const INetworkProperties &networkProperties, arm::pipe::IProfilingService *profilingService)
Definition: LoadedNetwork.cpp:173
armnn::LoadedNetwork::CreateWorkingMemHandle
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
Definition: LoadedNetwork.cpp:2025
armnn::ProfilerManager::GetInstance
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:602
armnn::LoadedNetwork::Execute
Status Execute(const InputTensors &inputTensors, const OutputTensors &outputTensors, IWorkingMemHandle &workingMemHandle, std::vector< ImportedInputId > preImportedInputs={}, std::vector< ImportedOutputId > preImportedOutputs={})
Thread safe execution of the loaded network.
Definition: LoadedNetwork.cpp:1803
armnn::BackendOptions::Var
Very basic type safe variant.
Definition: BackendOptions.hpp:38
armnn::BackendId
Definition: BackendId.hpp:75
armnn::LoadedNetwork::GetProfiler
const std::shared_ptr< IProfiler > & GetProfiler() const
Definition: LoadedNetwork.hpp:87
armnn::LoadedNetwork::ClearImportedOutputs
void ClearImportedOutputs(const std::vector< ImportedOutputId > outputIds)
Definition: LoadedNetwork.cpp:1782
armnn::IRuntime::CreationOptions::m_MemoryOptimizerStrategyMap
std::map< BackendId, std::shared_ptr< IMemoryOptimizerStrategy > > m_MemoryOptimizerStrategyMap
A map to define a custom memory optimizer strategy for specific backend Ids.
Definition: IRuntime.hpp:123
armnn::LoadedNetwork::FreeWorkingMemory
void FreeWorkingMemory()
Definition: LoadedNetwork.cpp:1286
armnn::LoadedNetwork::IsAsyncEnabled
bool IsAsyncEnabled()
Definition: LoadedNetwork.hpp:95
armnn::IRuntime::CreationOptions::m_CustomAllocatorMap
std::map< BackendId, std::shared_ptr< ICustomAllocator > > m_CustomAllocatorMap
A map to define a custom memory allocator for specific backend Ids.
Definition: IRuntime.hpp:115
armnn::LoadedNetwork::ClearImportedInputs
void ClearImportedInputs(const std::vector< ImportedInputId > inputIds)
Definition: LoadedNetwork.cpp:1761
armnn::ArmNNProfilingServiceInitialiser::InitialiseProfilingService
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Definition: ArmNNProfilingServiceInitialiser.cpp:17
armnn::NullPointerException
Definition: Exceptions.hpp:146
armnn::BackendUnavailableException
Class for non-fatal exceptions raised while initialising a backend.
Definition: Exceptions.hpp:68
armnn::IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling
bool m_EnableProfiling
Indicates whether external profiling is enabled or not.
Definition: IRuntime.hpp:139
armnn::GetMemBlockStrategyTypeName
constexpr const char * GetMemBlockStrategyTypeName(MemBlockStrategyType memBlockStrategyType)
Definition: TypesUtils.hpp:295
armnn::Status::Failure
@ Failure
armnn::GetTimeDuration
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
Definition: Timer.hpp:19
armnn::HasMatchingCapability
bool HasMatchingCapability(const BackendOptions::BackendOption &capability, const BackendCapabilities &capabilities)
Convenience function to check if a given capability matches a capability in a BackendCapabilities str...
Definition: BackendHelper.cpp:85