ArmNN
 25.02
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
RuntimeImpl Struct Referencefinal

#include <Runtime.hpp>

Inheritance diagram for RuntimeImpl:
[legend]
Collaboration diagram for RuntimeImpl:
[legend]

Public Member Functions

Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network)
 Loads a complete network into the Runtime. More...
 
Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage)
 Load a complete network into the IRuntime. More...
 
Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage, const INetworkProperties &networkProperties)
 
armnn::TensorInfo GetInputTensorInfo (NetworkId networkId, LayerBindingId layerId) const
 
armnn::TensorInfo GetOutputTensorInfo (NetworkId networkId, LayerBindingId layerId) const
 
std::vector< ImportedInputIdImportInputs (NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource)
 
std::vector< ImportedOutputIdImportOutputs (NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource)
 
void ClearImportedInputs (NetworkId networkId, const std::vector< ImportedInputId > inputIds)
 
void ClearImportedOutputs (NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
 
Status EnqueueWorkload (NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
 
Status UnloadNetwork (NetworkId networkId)
 Unloads a network from the Runtime. More...
 
const IDeviceSpecGetDeviceSpec () const
 
const std::shared_ptr< IProfilerGetProfiler (NetworkId networkId) const
 Gets the profiler corresponding to the given network id. More...
 
void RegisterDebugCallback (NetworkId networkId, const DebugCallbackFunction &func)
 Registers a callback function to debug layers performing custom computations on intermediate tensors. More...
 
 RuntimeImpl (const IRuntime::CreationOptions &options)
 Creates a runtime for workload execution. More...
 
 ~RuntimeImpl ()
 
void ReportStructure (arm::pipe::IProfilingService &profilingService) override
 
void InitialiseProfilingService (arm::pipe::IProfilingService &profilingService) override
 

Friends

void RuntimeLoadedNetworksReserve (RuntimeImpl *runtime)
 
arm::pipe::IProfilingService & GetProfilingService (RuntimeImpl *runtime)
 

Detailed Description

Definition at line 30 of file Runtime.hpp.

Constructor & Destructor Documentation

◆ RuntimeImpl()

RuntimeImpl ( const IRuntime::CreationOptions options)

Creates a runtime for workload execution.

Definition at line 295 of file Runtime.cpp.

296  : m_NetworkIdCounter(0)
297 {
298  m_ProfilingService = arm::pipe::IProfilingService::CreateProfilingService(
299  arm::pipe::MAX_ARMNN_COUNTER,
300  *this,
301  arm::pipe::ARMNN_SOFTWARE_INFO,
302  arm::pipe::ARMNN_SOFTWARE_VERSION,
303  arm::pipe::ARMNN_HARDWARE_VERSION,
304  *this);
305  const auto start_time = armnn::GetTimeNow();
306  ARMNN_LOG(info) << "ArmNN v" << ARMNN_VERSION;
308  {
309  throw RuntimeException(
310  "It is not possible to enable timeline reporting without profiling being enabled");
311  }
312 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
313  // Load any available/compatible dynamic backend before the runtime
314  // goes through the backend registry
315  LoadDynamicBackends(options.m_DynamicBackendsPath);
316 #endif
317  armnn::BackendIdSet supportedBackends;
318  for (const auto& id : BackendRegistryInstance().GetBackendIds())
319  {
320  // Store backend contexts for the supported ones
321  try {
322  auto factoryFun = BackendRegistryInstance().GetFactory(id);
323 
324  if (!factoryFun)
325  {
326  throw armnn::NullPointerException("Factory Function should not be null.");
327  }
328 
329  auto backend = factoryFun();
330 
331  auto customAllocatorMapIterator = options.m_CustomAllocatorMap.find(id);
332  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end() &&
333  customAllocatorMapIterator->second == nullptr)
334  {
335 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
336  // We need to manually clean up the dynamic backends before throwing an exception.
338  m_DeviceSpec.ClearDynamicBackends();
339 #endif
340  throw armnn::Exception("Allocator associated with id " + id.Get() + " is null");
341  }
342 
343  // If the runtime is created in protected mode only add backends that support this mode
344  if (options.m_ProtectedMode)
345  {
346  // check if backend supports ProtectedMode
348  BackendCapability protectedContentCapability {"ProtectedContentAllocation", true};
349  if (!HasMatchingCapability(protectedContentCapability, id))
350  {
351  // Protected Content Allocation is not supported by the backend
352  // backend should not be registered
353  ARMNN_LOG(warning) << "Backend "
354  << id
355  << " is not registered as does not support protected content allocation.";
356  continue;
357  }
358  // The user is responsible to provide a custom memory allocator which allows to allocate
359  // protected memory
360  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
361  {
362  std::string err;
363  if (customAllocatorMapIterator->second->GetMemorySourceType()
365  {
366  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
367  {
368  ARMNN_LOG(error) << "The backend "
369  << id
370  << " reported an error when entering protected mode. Backend won't be"
371  << " used. ErrorMsg: " << err;
372  continue;
373  }
374  // No errors so register the Custom Allocator with the BackendRegistry
375  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
376  m_AllocatorsAddedByThisRuntime.emplace(id);
377  }
378  else
379  {
380  ARMNN_LOG(error) << "The CustomAllocator provided with the runtime options doesn't support "
381  "protected memory. Protected mode can't be activated. The backend "
382  << id
383  << " is not going to be used. MemorySource must be MemorySource::DmaBufProtected";
384  continue;
385  }
386  }
387  else
388  {
389  ARMNN_LOG(error) << "Protected mode can't be activated for backend: "
390  << id
391  << " no custom allocator was provided to the runtime options.";
392  continue;
393  }
394  }
395  else
396  {
397  // If a custom memory allocator is provided make the backend use that instead of the default
398  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
399  {
400  std::string err;
401  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
402  {
403  ARMNN_LOG(error) << "The backend "
404  << id
405  << " reported an error when trying to use the provided custom allocator."
406  " Backend won't be used."
407  << " ErrorMsg: " << err;
408  continue;
409  }
410  // No errors so register the Custom Allocator with the BackendRegistry
411  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
412  m_AllocatorsAddedByThisRuntime.emplace(id);
413  }
414  }
415 
416  // check if custom memory optimizer strategy map is set
417  if (!options.m_MemoryOptimizerStrategyMap.empty())
418  {
419  auto customMemoryOptimizerStrategyMapIterator = options.m_MemoryOptimizerStrategyMap.find(id);
420  // if a memory optimizer strategy is provided make the backend use that instead of the default
421  if (customMemoryOptimizerStrategyMapIterator != options.m_MemoryOptimizerStrategyMap.end())
422  {
423  // no errors.. register the memory optimizer strategy with the BackendRegistry
425  id, customMemoryOptimizerStrategyMapIterator->second);
426 
427  ARMNN_LOG(info) << "MemoryOptimizerStrategy "
428  << customMemoryOptimizerStrategyMapIterator->second->GetName()
429  << " set for the backend " << id << ".";
430  }
431  }
432  else
433  {
434  // check if to use one of the existing memory optimizer strategies is set
435  std::string memoryOptimizerStrategyName = "";
436  ParseOptions(options.m_BackendOptions, id, [&](std::string name, const BackendOptions::Var& value)
437  {
438  if (name == "MemoryOptimizerStrategy")
439  {
440  memoryOptimizerStrategyName = ParseStringBackendOption(value, "");
441  }
442  });
443  if (memoryOptimizerStrategyName != "")
444  {
445  std::shared_ptr<IMemoryOptimizerStrategy> strategy =
446  GetMemoryOptimizerStrategy(memoryOptimizerStrategyName);
447 
448  if (!strategy)
449  {
450  ARMNN_LOG(warning) << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName
451  << " was not found.";
452  }
453  else
454  {
456  auto strategyType = GetMemBlockStrategyTypeName(strategy->GetMemBlockStrategyType());
457  BackendCapability memOptimizeStrategyCapability {strategyType, true};
458  if (HasMatchingCapability(memOptimizeStrategyCapability, id))
459  {
461 
462  ARMNN_LOG(info) << "MemoryOptimizerStrategy: "
463  << memoryOptimizerStrategyName << " set for the backend " << id << ".";
464  }
465  else
466  {
467  ARMNN_LOG(warning) << "Backend "
468  << id
469  << " does not have multi-axis packing capability and cannot support"
470  << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName << ".";
471  }
472  }
473  }
474  }
475 
476  auto context = backend->CreateBackendContext(options);
477 
478  // backends are allowed to return nullptrs if they
479  // don't wish to create a backend specific context
480  if (context)
481  {
482  m_BackendContexts.emplace(std::make_pair(id, std::move(context)));
483  }
484  supportedBackends.emplace(id);
485 
486  unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
487  arm::pipe::IBackendProfiling::CreateBackendProfiling(
489  *m_ProfilingService.get(),
490  id.Get());
491 
492  // Backends may also provide a profiling context. Ask for it now.
493  auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
494  // Backends that don't support profiling will return a null profiling context.
495  if (profilingContext)
496  {
497  // Pass the context onto the profiling service.
498  m_ProfilingService->AddBackendProfilingContext(id, profilingContext);
499  }
500  }
501  catch (const BackendUnavailableException&)
502  {
503  // Ignore backends which are unavailable
504  }
505  }
506 
507  BackendRegistryInstance().SetProfilingService(*m_ProfilingService.get());
508  // pass configuration info to the profiling service
509  m_ProfilingService->ConfigureProfilingService(
512  {
513  // try to wait for the profiling service to initialise
514  m_ProfilingService->WaitForProfilingServiceActivation(3000);
515  }
516 
517  m_DeviceSpec.AddSupportedBackends(supportedBackends);
518 
519  ARMNN_LOG(info) << "Initialization time: " << std::setprecision(2)
520  << std::fixed << armnn::GetTimeDuration(start_time).count() << " ms.";
521 }
#define ARMNN_LOG(severity)
Definition: Logging.hpp:212
Very basic type safe variant.
void SetProfilingService(armnn::Optional< arm::pipe::IProfilingService & > profilingService)
void RegisterAllocator(const BackendId &id, std::shared_ptr< ICustomAllocator > alloc)
void RegisterMemoryOptimizerStrategy(const BackendId &id, std::shared_ptr< IMemoryOptimizerStrategy > strategy)
FactoryFunction GetFactory(const BackendId &id) const
Class for non-fatal exceptions raised while initialising a backend.
Definition: Exceptions.hpp:69
const BackendIdSet & GetDynamicBackends() const
Definition: DeviceSpec.hpp:48
void AddSupportedBackends(const BackendIdSet &backendIds, bool isDynamic=false)
Definition: DeviceSpec.hpp:30
void ClearDynamicBackends()
Definition: DeviceSpec.hpp:39
static void DeregisterDynamicBackends(const BackendIdSet &dynamicBackends)
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:47
#define ARMNN_VERSION
ARMNN_VERSION: "X.Y.Z" where: X = Major version number Y = Minor version number Z = Patch version num...
Definition: Version.hpp:22
ProfilingOptions ConvertExternalProfilingOptions(const armnn::IRuntime::CreationOptions::ExternalProfilingOptions &options)
std::unordered_set< BackendId > BackendIdSet
Definition: BackendId.hpp:193
bool HasMatchingCapability(const BackendOptions::BackendOption &capability, const BackendCapabilities &capabilities)
Convenience function to check if a given capability matches a capability in a BackendCapabilities str...
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
Definition: Timer.hpp:19
std::unique_ptr< IMemoryOptimizerStrategy > GetMemoryOptimizerStrategy(const std::string &strategyName)
constexpr const char * GetMemBlockStrategyTypeName(MemBlockStrategyType memBlockStrategyType)
Definition: TypesUtils.hpp:296
BackendCapability
BackendCapability class.
Definition: Types.hpp:288
BackendRegistry & BackendRegistryInstance()
std::chrono::high_resolution_clock::time_point GetTimeNow()
Definition: Timer.hpp:14
void ParseOptions(const std::vector< BackendOptions > &options, BackendId backend, F f)
bool m_EnableProfiling
Indicates whether external profiling is enabled or not.
Definition: IRuntime.hpp:131
bool m_TimelineEnabled
Indicates whether external timeline profiling is enabled or not.
Definition: IRuntime.hpp:133
ExternalProfilingOptions m_ProfilingOptions
Definition: IRuntime.hpp:146
std::map< BackendId, std::shared_ptr< IMemoryOptimizerStrategy > > m_MemoryOptimizerStrategyMap
A map to define a custom memory optimizer strategy for specific backend Ids.
Definition: IRuntime.hpp:115
std::map< BackendId, std::shared_ptr< ICustomAllocator > > m_CustomAllocatorMap
A map to define a custom memory allocator for specific backend Ids.
Definition: IRuntime.hpp:107
bool m_ProtectedMode
Setting this flag will allow the user to create the Runtime in protected mode.
Definition: IRuntime.hpp:98
std::string m_DynamicBackendsPath
Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive Only a...
Definition: IRuntime.hpp:91
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
Definition: IRuntime.hpp:182

References ARMNN_LOG, ARMNN_VERSION, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), arm::pipe::ConvertExternalProfilingOptions(), DynamicBackendUtils::DeregisterDynamicBackends(), armnn::DmaBufProtected, armnn::error, DeviceSpec::GetDynamicBackends(), BackendRegistry::GetFactory(), armnn::GetMemBlockStrategyTypeName(), armnn::GetMemoryOptimizerStrategy(), armnn::GetTimeNow(), armnn::HasMatchingCapability(), armnn::info, IRuntime::CreationOptions::m_BackendOptions, IRuntime::CreationOptions::m_CustomAllocatorMap, IRuntime::CreationOptions::m_DynamicBackendsPath, IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling, IRuntime::CreationOptions::m_MemoryOptimizerStrategyMap, IRuntime::CreationOptions::m_ProfilingOptions, IRuntime::CreationOptions::m_ProtectedMode, IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled, armnn::ParseOptions(), BackendRegistry::RegisterAllocator(), BackendRegistry::RegisterMemoryOptimizerStrategy(), and armnn::warning.

◆ ~RuntimeImpl()

Definition at line 523 of file Runtime.cpp.

524 {
525  const auto startTime = armnn::GetTimeNow();
526  std::vector<int> networkIDs;
527  try
528  {
529  // Coverity fix: The following code may throw an exception of type std::length_error.
530  std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(),
531  std::back_inserter(networkIDs),
532  [](const auto &pair) { return pair.first; });
533  }
534  catch (const std::exception& e)
535  {
536  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
537  // exception of type std::length_error.
538  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
539  std::cerr << "WARNING: An error has occurred when getting the IDs of the networks to unload: " << e.what()
540  << "\nSome of the loaded networks may not be unloaded" << std::endl;
541  }
542  // We then proceed to unload all the networks which IDs have been appended to the list
543  // up to the point the exception was thrown (if any).
544 
545  for (auto networkID : networkIDs)
546  {
547  try
548  {
549  // Coverity fix: UnloadNetwork() may throw an exception of type std::length_error,
550  // boost::log::v2s_mt_posix::odr_violation or boost::log::v2s_mt_posix::system_error
551  UnloadNetwork(networkID);
552  }
553  catch (const std::exception& e)
554  {
555  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
556  // exception of type std::length_error.
557  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
558  std::cerr << "WARNING: An error has occurred when unloading network " << networkID << ": " << e.what()
559  << std::endl;
560  }
561  }
562 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
563  // Clear all dynamic backends.
565  m_DeviceSpec.ClearDynamicBackends();
566 #endif
567  m_BackendContexts.clear();
568 
570  // Remove custom allocators that this runtime has added.
571  // Note: that as backends can be per process and there can be many instances of a runtime in a process an allocator
572  // may have been overwritten by another runtime.
573  for_each(m_AllocatorsAddedByThisRuntime.begin(), m_AllocatorsAddedByThisRuntime.end(),
574  [](BackendId id) {BackendRegistryInstance().DeregisterAllocator(id);});
575 
576  ARMNN_LOG(info) << "Shutdown time: " << std::setprecision(2)
577  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
578 }
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
Definition: Runtime.cpp:204

References ARMNN_LOG, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), DynamicBackendUtils::DeregisterDynamicBackends(), DeviceSpec::GetDynamicBackends(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, BackendRegistry::SetProfilingService(), and RuntimeImpl::UnloadNetwork().

Member Function Documentation

◆ ClearImportedInputs()

void ClearImportedInputs ( NetworkId  networkId,
const std::vector< ImportedInputId inputIds 
)

Definition at line 610 of file Runtime.cpp.

611 {
612  return GetLoadedNetworkPtr(networkId)->ClearImportedInputs(inputIds);
613 }
void ClearImportedInputs(const std::vector< ImportedInputId > inputIds)

References LoadedNetwork::ClearImportedInputs().

◆ ClearImportedOutputs()

void ClearImportedOutputs ( NetworkId  networkId,
const std::vector< ImportedOutputId outputIds 
)

Definition at line 614 of file Runtime.cpp.

615 {
616  return GetLoadedNetworkPtr(networkId)->ClearImportedOutputs(outputIds);
617 }
void ClearImportedOutputs(const std::vector< ImportedOutputId > outputIds)

References LoadedNetwork::ClearImportedOutputs().

◆ EnqueueWorkload()

Status EnqueueWorkload ( NetworkId  networkId,
const InputTensors inputTensors,
const OutputTensors outputTensors,
std::vector< ImportedInputId preImportedInputIds = {},
std::vector< ImportedOutputId preImportedOutputIds = {} 
)

Definition at line 619 of file Runtime.cpp.

624 {
625  const auto startTime = armnn::GetTimeNow();
626 
627  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
628 
629  if (!loadedNetwork)
630  {
631  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
632  return Status::Failure;
633  }
635 
637 
638  static thread_local NetworkId lastId = networkId;
639  if (lastId != networkId)
640  {
641  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
642  {
643  network->FreeWorkingMemory();
644  });
645  }
646  lastId=networkId;
647 
648  auto status = loadedNetwork->EnqueueWorkload(inputTensors, outputTensors,
649  preImportedInputIds, preImportedOutputIds);
650 
651  // Check if we imported, if not there's no need to call the After EnqueueWorkload events
652  if (!preImportedInputIds.empty() || !preImportedOutputIds.empty())
653  {
654  // Call After EnqueueWorkload events
655  for (auto&& context : m_BackendContexts)
656  {
657  context.second->AfterEnqueueWorkload(networkId);
658  }
659  }
660  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
661  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
662  return status;
663 }
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
Status EnqueueWorkload(const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Single thread execution of the loaded network.
const std::shared_ptr< IProfiler > & GetProfiler() const
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:609
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:602
int NetworkId
Definition: IRuntime.hpp:33

References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, LoadedNetwork::EnqueueWorkload(), armnn::error, armnn::Failure, LoadedNetwork::FreeWorkingMemory(), ProfilerManager::GetInstance(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, ProfilerManager::RegisterProfiler(), and armnn::Undefined.

◆ GetDeviceSpec()

const IDeviceSpec& GetDeviceSpec ( ) const
inline

Definition at line 80 of file Runtime.hpp.

80 { return m_DeviceSpec; }

◆ GetInputTensorInfo()

TensorInfo GetInputTensorInfo ( NetworkId  networkId,
LayerBindingId  layerId 
) const

Definition at line 588 of file Runtime.cpp.

589 {
590  return GetLoadedNetworkPtr(networkId)->GetInputTensorInfo(layerId);
591 }
TensorInfo GetInputTensorInfo(LayerBindingId layerId) const

References LoadedNetwork::GetInputTensorInfo().

◆ GetOutputTensorInfo()

TensorInfo GetOutputTensorInfo ( NetworkId  networkId,
LayerBindingId  layerId 
) const

Definition at line 593 of file Runtime.cpp.

594 {
595  return GetLoadedNetworkPtr(networkId)->GetOutputTensorInfo(layerId);
596 }
TensorInfo GetOutputTensorInfo(LayerBindingId layerId) const

References LoadedNetwork::GetOutputTensorInfo().

◆ GetProfiler()

const std::shared_ptr< IProfiler > GetProfiler ( NetworkId  networkId) const

Gets the profiler corresponding to the given network id.

Parameters
networkIdThe id of the network for which to get the profile.
Returns
A pointer to the requested profiler, or nullptr if not found.

Definition at line 262 of file Runtime.cpp.

263 {
264  auto it = m_LoadedNetworks.find(networkId);
265  if (it != m_LoadedNetworks.end())
266  {
267  auto& loadedNetwork = it->second;
268  return loadedNetwork->GetProfiler();
269  }
270 
271  return nullptr;
272 }

◆ ImportInputs()

std::vector< ImportedInputId > ImportInputs ( NetworkId  networkId,
const InputTensors inputTensors,
MemorySource  forceImportMemorySource 
)

Definition at line 598 of file Runtime.cpp.

600 {
601  return GetLoadedNetworkPtr(networkId)->ImportInputs(inputTensors, forceImportMemorySource);
602 }
std::vector< ImportedInputId > ImportInputs(const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)

References LoadedNetwork::ImportInputs().

◆ ImportOutputs()

std::vector< ImportedOutputId > ImportOutputs ( NetworkId  networkId,
const OutputTensors outputTensors,
MemorySource  forceImportMemorySource 
)

Definition at line 604 of file Runtime.cpp.

606 {
607  return GetLoadedNetworkPtr(networkId)->ImportOutputs(outputTensors, forceImportMemorySource);
608 }
std::vector< ImportedOutputId > ImportOutputs(const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)

References LoadedNetwork::ImportOutputs().

◆ InitialiseProfilingService()

void InitialiseProfilingService ( arm::pipe::IProfilingService &  profilingService)
override

Definition at line 289 of file Runtime.cpp.

290 {
292  initialiser.InitialiseProfilingService(profilingService);
293 }
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override

References ArmNNProfilingServiceInitialiser::InitialiseProfilingService().

◆ LoadNetwork() [1/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network 
)

Loads a complete network into the Runtime.

Parameters
[out]networkIdOut- Unique identifier for the network is returned in this reference.
[in]network- Complete network to load into the Runtime. The runtime takes ownership of the network once passed in.
Returns
armnn::Status

Definition at line 139 of file Runtime.cpp.

140 {
141  std::string ignoredErrorMessage;
142  return LoadNetwork(networkIdOut, std::move(inNetwork), ignoredErrorMessage);
143 }
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
Definition: Runtime.cpp:139

Referenced by RuntimeImpl::LoadNetwork().

◆ LoadNetwork() [2/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network,
std::string &  errorMessage 
)

Load a complete network into the IRuntime.

Parameters
[out]networkIdOutUnique identifier for the network is returned in this reference.
[in]networkComplete network to load into the IRuntime.
[out]errorMessageError message if there were any errors. The runtime takes ownership of the network once passed in.
Returns
armnn::Status

Definition at line 145 of file Runtime.cpp.

148 {
150  return LoadNetwork(networkIdOut, std::move(inNetwork), errorMessage, networkProperties);
151 }

References RuntimeImpl::LoadNetwork(), and armnn::Undefined.

◆ LoadNetwork() [3/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network,
std::string &  errorMessage,
const INetworkProperties networkProperties 
)

Definition at line 153 of file Runtime.cpp.

157 {
158  // Register the profiler
159  auto profiler = inNetwork->GetProfiler();
161 
162  IOptimizedNetwork* rawNetwork = inNetwork.release();
163 
164  networkIdOut = GenerateNetworkId();
165 
166  for (auto&& context : m_BackendContexts)
167  {
168  context.second->BeforeLoadNetwork(networkIdOut);
169  }
170 
171  unique_ptr<LoadedNetwork> loadedNetwork = LoadedNetwork::MakeLoadedNetwork(
172  std::unique_ptr<IOptimizedNetwork>(rawNetwork),
173  errorMessage,
174  networkProperties,
175  m_ProfilingService.get());
176 
177  if (!loadedNetwork)
178  {
179  return Status::Failure;
180  }
181 
182  {
183 #if !defined(ARMNN_DISABLE_THREADS)
184  std::lock_guard<std::mutex> lockGuard(m_Mutex);
185 #endif
186 
187  // Stores the network
188  m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork);
189  }
190 
191  for (auto&& context : m_BackendContexts)
192  {
193  context.second->AfterLoadNetwork(networkIdOut);
194  }
195 
196  if (m_ProfilingService->IsProfilingEnabled())
197  {
198  m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_LOADS);
199  }
200 
201  return Status::Success;
202 }
static std::unique_ptr< LoadedNetwork > MakeLoadedNetwork(std::unique_ptr< IOptimizedNetwork > net, std::string &errorMessage, const INetworkProperties &networkProperties, arm::pipe::IProfilingService *profilingService)

References armnn::Failure, ProfilerManager::GetInstance(), LoadedNetwork::MakeLoadedNetwork(), ProfilerManager::RegisterProfiler(), and armnn::Success.

◆ RegisterDebugCallback()

void RegisterDebugCallback ( NetworkId  networkId,
const DebugCallbackFunction func 
)

Registers a callback function to debug layers performing custom computations on intermediate tensors.

Parameters
networkIdThe id of the network to register the callback.
funccallback function to pass to the debug layer.

Definition at line 665 of file Runtime.cpp.

666 {
667  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
668  loadedNetwork->RegisterDebugCallback(func);
669 }
void RegisterDebugCallback(const DebugCallbackFunction &func)

References LoadedNetwork::RegisterDebugCallback().

◆ ReportStructure()

void ReportStructure ( arm::pipe::IProfilingService &  profilingService)
override

Definition at line 274 of file Runtime.cpp.

275 {
276  if (profilingService.IsProfilingEnabled())
277  {
278  LoadedNetworks::iterator it = m_LoadedNetworks.begin();
279  while (it != m_LoadedNetworks.end())
280  {
281  auto& loadedNetwork = it->second;
282  loadedNetwork->SendNetworkStructure(profilingService);
283  // Increment the Iterator to point to next entry
284  it++;
285  }
286  }
287 }

◆ UnloadNetwork()

Status UnloadNetwork ( NetworkId  networkId)

Unloads a network from the Runtime.

At the moment this only removes the network from the m_Impl->m_Network. This might need more work in the future to be AndroidNN compliant.

Parameters
[in]networkIdUnique identifier for the network to be unloaded. Generated in LoadNetwork().
Returns
armnn::Status

Definition at line 204 of file Runtime.cpp.

205 {
206  bool unloadOk = true;
207  for (auto&& context : m_BackendContexts)
208  {
209  unloadOk &= context.second->BeforeUnloadNetwork(networkId);
210  }
211 
212  if (!unloadOk)
213  {
214  ARMNN_LOG(warning) << "RuntimeImpl::UnloadNetwork(): failed to unload "
215  "network with ID:" << networkId << " because BeforeUnloadNetwork failed";
216  return Status::Failure;
217  }
218 
219  std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
220  arm::pipe::TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService.get());
221  {
222 #if !defined(ARMNN_DISABLE_THREADS)
223  std::lock_guard<std::mutex> lockGuard(m_Mutex);
224 #endif
225 
226  // If timeline recording is on mark the Network end of life
227  if (timelineUtils)
228  {
229  auto search = m_LoadedNetworks.find(networkId);
230  if (search != m_LoadedNetworks.end())
231  {
232  arm::pipe::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
233  timelineUtils->RecordEvent(networkGuid,
234  arm::pipe::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
235  }
236  }
237 
238  if (m_LoadedNetworks.erase(networkId) == 0)
239  {
240  ARMNN_LOG(warning) << "WARNING: RuntimeImpl::UnloadNetwork(): " << networkId << " not found!";
241  return Status::Failure;
242  }
243 
244  if (m_ProfilingService->IsProfilingEnabled())
245  {
246  m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
247  }
248  }
249 
250  for (auto&& context : m_BackendContexts)
251  {
252  context.second->AfterUnloadNetwork(networkId);
253  }
254 
255  // Unregister the profiler
257 
258  ARMNN_LOG(debug) << "RuntimeImpl::UnloadNetwork(): Unloaded network with ID: " << networkId;
259  return Status::Success;
260 }

References ARMNN_LOG, armnn::debug, armnn::Failure, ProfilerManager::GetInstance(), ProfilerManager::RegisterProfiler(), armnn::Success, and armnn::warning.

Referenced by RuntimeImpl::~RuntimeImpl().

Friends And Related Function Documentation

◆ GetProfilingService

arm::pipe::IProfilingService& GetProfilingService ( RuntimeImpl runtime)
friend

Definition at line 59 of file TestUtils.cpp.

60 {
61  return *(runtime->m_ProfilingService.get());
62 }

◆ RuntimeLoadedNetworksReserve

void RuntimeLoadedNetworksReserve ( RuntimeImpl runtime)
friend

The documentation for this struct was generated from the following files: