ArmNN
 24.08
Runtime.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017, 2022-2024 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
7 #include "Runtime.hpp"
8 
10 
11 #include <armnn/Version.hpp>
13 #include <armnn/BackendHelper.hpp>
14 #include <armnn/Logging.hpp>
15 
17 
19 
21 #include <armnn/utility/Timer.hpp>
22 
23 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
25 #endif
26 
28 
29 #include <client/include/backends/IBackendProfiling.hpp>
30 
31 #include <common/include/LabelsAndEventClasses.hpp>
32 
33 #include <iostream>
34 
35 
36 using namespace armnn;
37 using namespace std;
38 
39 namespace armnn
40 {
42 
43 IRuntime::IRuntime(const IRuntime::CreationOptions& options) : pRuntimeImpl(new RuntimeImpl(options)) {}
44 
45 IRuntime::~IRuntime() = default;
46 
48 {
49  return new IRuntime(options);
50 }
51 
53 {
54  return IRuntimePtr(CreateRaw(options), &IRuntime::Destroy);
55 }
56 
58 {
59  delete runtime;
60 }
61 
63 {
64  return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network));
65 }
66 
68  IOptimizedNetworkPtr network,
69  std::string& errorMessage)
70 {
71  return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network), errorMessage);
72 }
73 
75  IOptimizedNetworkPtr network,
76  std::string& errorMessage,
77  const INetworkProperties& networkProperties)
78 {
79  return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network), errorMessage, networkProperties);
80 }
81 
83 {
84  return pRuntimeImpl->GetInputTensorInfo(networkId, layerId);
85 }
86 
88 {
89  return pRuntimeImpl->GetOutputTensorInfo(networkId, layerId);
90 }
91 
92 std::vector<ImportedInputId> IRuntime::ImportInputs(NetworkId networkId, const InputTensors& inputTensors,
93  MemorySource forceImportMemorySource)
94 {
95  return pRuntimeImpl->ImportInputs(networkId, inputTensors, forceImportMemorySource);
96 }
97 
98 std::vector<ImportedOutputId> IRuntime::ImportOutputs(NetworkId networkId, const OutputTensors& outputTensors,
99  MemorySource forceImportMemorySource)
100 {
101  return pRuntimeImpl->ImportOutputs(networkId, outputTensors, forceImportMemorySource);
102 }
103 
104 void IRuntime::ClearImportedInputs(NetworkId networkId, const std::vector<ImportedInputId> inputIds)
105 {
106  return pRuntimeImpl->ClearImportedInputs(networkId, inputIds);
107 }
108 void IRuntime::ClearImportedOutputs(NetworkId networkId, const std::vector<ImportedOutputId> outputIds)
109 {
110  return pRuntimeImpl->ClearImportedOutputs(networkId, outputIds);
111 }
112 
114  const InputTensors& inputTensors,
115  const OutputTensors& outputTensors,
116  std::vector<ImportedInputId> preImportedInputIds,
117  std::vector<ImportedOutputId> preImportedOutputIds)
118 {
119  return pRuntimeImpl->EnqueueWorkload(networkId, inputTensors, outputTensors,
120  preImportedInputIds, preImportedOutputIds);
121 }
122 
124  const InputTensors& inputTensors,
125  const OutputTensors& outputTensors,
126  std::vector<ImportedInputId> preImportedInputs,
127  std::vector<ImportedOutputId> preImportedOutputs)
128 {
129  return pRuntimeImpl->Execute(workingMemHandle,
130  inputTensors,
131  outputTensors,
132  preImportedInputs,
133  preImportedOutputs);
134 }
135 
137 {
138  return pRuntimeImpl->UnloadNetwork(networkId);
139 }
140 
142 {
143  return pRuntimeImpl->GetDeviceSpec();
144 }
145 
146 std::unique_ptr<IWorkingMemHandle> IRuntime::CreateWorkingMemHandle(NetworkId networkId)
147 {
148  return pRuntimeImpl->CreateWorkingMemHandle(networkId);
149 }
150 
151 const std::shared_ptr<IProfiler> IRuntime::GetProfiler(NetworkId networkId) const
152 {
153  return pRuntimeImpl->GetProfiler(networkId);
154 }
155 
157 {
158  return pRuntimeImpl->RegisterDebugCallback(networkId, func);
159 }
160 
161 int RuntimeImpl::GenerateNetworkId()
162 {
163  return m_NetworkIdCounter++;
164 }
165 
167 {
168  std::string ignoredErrorMessage;
169  return LoadNetwork(networkIdOut, std::move(inNetwork), ignoredErrorMessage);
170 }
171 
173  IOptimizedNetworkPtr inNetwork,
174  std::string& errorMessage)
175 {
176  INetworkProperties networkProperties(
178  return LoadNetwork(networkIdOut, std::move(inNetwork), errorMessage, networkProperties);
179 }
180 
182  IOptimizedNetworkPtr inNetwork,
183  std::string& errorMessage,
184  const INetworkProperties& networkProperties)
185 {
186  // Register the profiler
187  auto profiler = inNetwork->GetProfiler();
189 
190  IOptimizedNetwork* rawNetwork = inNetwork.release();
191 
192  networkIdOut = GenerateNetworkId();
193 
194  for (auto&& context : m_BackendContexts)
195  {
196  context.second->BeforeLoadNetwork(networkIdOut);
197  }
198 
199  unique_ptr<LoadedNetwork> loadedNetwork = LoadedNetwork::MakeLoadedNetwork(
200  std::unique_ptr<IOptimizedNetwork>(rawNetwork),
201  errorMessage,
202  networkProperties,
203  m_ProfilingService.get());
204 
205  if (!loadedNetwork)
206  {
207  return Status::Failure;
208  }
209 
210  {
211 #if !defined(ARMNN_DISABLE_THREADS)
212  std::lock_guard<std::mutex> lockGuard(m_Mutex);
213 #endif
214 
215  // Stores the network
216  m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork);
217  }
218 
219  for (auto&& context : m_BackendContexts)
220  {
221  context.second->AfterLoadNetwork(networkIdOut);
222  }
223 
224  if (m_ProfilingService->IsProfilingEnabled())
225  {
226  m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_LOADS);
227  }
228 
229  return Status::Success;
230 }
231 
233 {
234  bool unloadOk = true;
235  for (auto&& context : m_BackendContexts)
236  {
237  unloadOk &= context.second->BeforeUnloadNetwork(networkId);
238  }
239 
240  if (!unloadOk)
241  {
242  ARMNN_LOG(warning) << "RuntimeImpl::UnloadNetwork(): failed to unload "
243  "network with ID:" << networkId << " because BeforeUnloadNetwork failed";
244  return Status::Failure;
245  }
246 
247  std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
248  arm::pipe::TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService.get());
249  {
250 #if !defined(ARMNN_DISABLE_THREADS)
251  std::lock_guard<std::mutex> lockGuard(m_Mutex);
252 #endif
253 
254  // If timeline recording is on mark the Network end of life
255  if (timelineUtils)
256  {
257  auto search = m_LoadedNetworks.find(networkId);
258  if (search != m_LoadedNetworks.end())
259  {
260  arm::pipe::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
261  timelineUtils->RecordEvent(networkGuid,
262  arm::pipe::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
263  }
264  }
265 
266  if (m_LoadedNetworks.erase(networkId) == 0)
267  {
268  ARMNN_LOG(warning) << "WARNING: RuntimeImpl::UnloadNetwork(): " << networkId << " not found!";
269  return Status::Failure;
270  }
271 
272  if (m_ProfilingService->IsProfilingEnabled())
273  {
274  m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
275  }
276  }
277 
278  for (auto&& context : m_BackendContexts)
279  {
280  context.second->AfterUnloadNetwork(networkId);
281  }
282 
283  // Unregister the profiler
285 
286  ARMNN_LOG(debug) << "RuntimeImpl::UnloadNetwork(): Unloaded network with ID: " << networkId;
287  return Status::Success;
288 }
289 
290 const std::shared_ptr<IProfiler> RuntimeImpl::GetProfiler(NetworkId networkId) const
291 {
292  auto it = m_LoadedNetworks.find(networkId);
293  if (it != m_LoadedNetworks.end())
294  {
295  auto& loadedNetwork = it->second;
296  return loadedNetwork->GetProfiler();
297  }
298 
299  return nullptr;
300 }
301 
302 void RuntimeImpl::ReportStructure(arm::pipe::IProfilingService& profilingService)
303 {
304  if (profilingService.IsProfilingEnabled())
305  {
306  LoadedNetworks::iterator it = m_LoadedNetworks.begin();
307  while (it != m_LoadedNetworks.end())
308  {
309  auto& loadedNetwork = it->second;
310  loadedNetwork->SendNetworkStructure(profilingService);
311  // Increment the Iterator to point to next entry
312  it++;
313  }
314  }
315 }
316 
317 void RuntimeImpl::InitialiseProfilingService(arm::pipe::IProfilingService& profilingService)
318 {
320  initialiser.InitialiseProfilingService(profilingService);
321 }
322 
324  : m_NetworkIdCounter(0)
325 {
326  m_ProfilingService = arm::pipe::IProfilingService::CreateProfilingService(
327  arm::pipe::MAX_ARMNN_COUNTER,
328  *this,
329  arm::pipe::ARMNN_SOFTWARE_INFO,
330  arm::pipe::ARMNN_SOFTWARE_VERSION,
331  arm::pipe::ARMNN_HARDWARE_VERSION,
332  *this);
333  const auto start_time = armnn::GetTimeNow();
334  ARMNN_LOG(info) << "ArmNN v" << ARMNN_VERSION;
336  {
337  throw RuntimeException(
338  "It is not possible to enable timeline reporting without profiling being enabled");
339  }
340 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
341  // Load any available/compatible dynamic backend before the runtime
342  // goes through the backend registry
343  LoadDynamicBackends(options.m_DynamicBackendsPath);
344 #endif
345  armnn::BackendIdSet supportedBackends;
346  for (const auto& id : BackendRegistryInstance().GetBackendIds())
347  {
348  // Store backend contexts for the supported ones
349  try {
350  auto factoryFun = BackendRegistryInstance().GetFactory(id);
351 
352  if (!factoryFun)
353  {
354  throw armnn::NullPointerException("Factory Function should not be null.");
355  }
356 
357  auto backend = factoryFun();
358 
359  auto customAllocatorMapIterator = options.m_CustomAllocatorMap.find(id);
360  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end() &&
361  customAllocatorMapIterator->second == nullptr)
362  {
363 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
364  // We need to manually clean up the dynamic backends before throwing an exception.
366  m_DeviceSpec.ClearDynamicBackends();
367 #endif
368  throw armnn::Exception("Allocator associated with id " + id.Get() + " is null");
369  }
370 
371  // If the runtime is created in protected mode only add backends that support this mode
372  if (options.m_ProtectedMode)
373  {
374  // check if backend supports ProtectedMode
376  BackendCapability protectedContentCapability {"ProtectedContentAllocation", true};
377  if (!HasMatchingCapability(protectedContentCapability, id))
378  {
379  // Protected Content Allocation is not supported by the backend
380  // backend should not be registered
381  ARMNN_LOG(warning) << "Backend "
382  << id
383  << " is not registered as does not support protected content allocation.";
384  continue;
385  }
386  // The user is responsible to provide a custom memory allocator which allows to allocate
387  // protected memory
388  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
389  {
390  std::string err;
391  if (customAllocatorMapIterator->second->GetMemorySourceType()
393  {
394  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
395  {
396  ARMNN_LOG(error) << "The backend "
397  << id
398  << " reported an error when entering protected mode. Backend won't be"
399  << " used. ErrorMsg: " << err;
400  continue;
401  }
402  // No errors so register the Custom Allocator with the BackendRegistry
403  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
404  m_AllocatorsAddedByThisRuntime.emplace(id);
405  }
406  else
407  {
408  ARMNN_LOG(error) << "The CustomAllocator provided with the runtime options doesn't support "
409  "protected memory. Protected mode can't be activated. The backend "
410  << id
411  << " is not going to be used. MemorySource must be MemorySource::DmaBufProtected";
412  continue;
413  }
414  }
415  else
416  {
417  ARMNN_LOG(error) << "Protected mode can't be activated for backend: "
418  << id
419  << " no custom allocator was provided to the runtime options.";
420  continue;
421  }
422  }
423  else
424  {
425  // If a custom memory allocator is provided make the backend use that instead of the default
426  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
427  {
428  std::string err;
429  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
430  {
431  ARMNN_LOG(error) << "The backend "
432  << id
433  << " reported an error when trying to use the provided custom allocator."
434  " Backend won't be used."
435  << " ErrorMsg: " << err;
436  continue;
437  }
438  // No errors so register the Custom Allocator with the BackendRegistry
439  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
440  m_AllocatorsAddedByThisRuntime.emplace(id);
441  }
442  }
443 
444  // check if custom memory optimizer strategy map is set
445  if (!options.m_MemoryOptimizerStrategyMap.empty())
446  {
447  auto customMemoryOptimizerStrategyMapIterator = options.m_MemoryOptimizerStrategyMap.find(id);
448  // if a memory optimizer strategy is provided make the backend use that instead of the default
449  if (customMemoryOptimizerStrategyMapIterator != options.m_MemoryOptimizerStrategyMap.end())
450  {
451  // no errors.. register the memory optimizer strategy with the BackendRegistry
453  id, customMemoryOptimizerStrategyMapIterator->second);
454 
455  ARMNN_LOG(info) << "MemoryOptimizerStrategy "
456  << customMemoryOptimizerStrategyMapIterator->second->GetName()
457  << " set for the backend " << id << ".";
458  }
459  }
460  else
461  {
462  // check if to use one of the existing memory optimizer strategies is set
463  std::string memoryOptimizerStrategyName = "";
464  ParseOptions(options.m_BackendOptions, id, [&](std::string name, const BackendOptions::Var& value)
465  {
466  if (name == "MemoryOptimizerStrategy")
467  {
468  memoryOptimizerStrategyName = ParseStringBackendOption(value, "");
469  }
470  });
471  if (memoryOptimizerStrategyName != "")
472  {
473  std::shared_ptr<IMemoryOptimizerStrategy> strategy =
474  GetMemoryOptimizerStrategy(memoryOptimizerStrategyName);
475 
476  if (!strategy)
477  {
478  ARMNN_LOG(warning) << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName
479  << " was not found.";
480  }
481  else
482  {
484  auto strategyType = GetMemBlockStrategyTypeName(strategy->GetMemBlockStrategyType());
485  BackendCapability memOptimizeStrategyCapability {strategyType, true};
486  if (HasMatchingCapability(memOptimizeStrategyCapability, id))
487  {
489 
490  ARMNN_LOG(info) << "MemoryOptimizerStrategy: "
491  << memoryOptimizerStrategyName << " set for the backend " << id << ".";
492  }
493  else
494  {
495  ARMNN_LOG(warning) << "Backend "
496  << id
497  << " does not have multi-axis packing capability and cannot support"
498  << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName << ".";
499  }
500  }
501  }
502  }
503 
504  auto context = backend->CreateBackendContext(options);
505 
506  // backends are allowed to return nullptrs if they
507  // don't wish to create a backend specific context
508  if (context)
509  {
510  m_BackendContexts.emplace(std::make_pair(id, std::move(context)));
511  }
512  supportedBackends.emplace(id);
513 
514  unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
515  arm::pipe::IBackendProfiling::CreateBackendProfiling(
517  *m_ProfilingService.get(),
518  id.Get());
519 
520  // Backends may also provide a profiling context. Ask for it now.
521  auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
522  // Backends that don't support profiling will return a null profiling context.
523  if (profilingContext)
524  {
525  // Pass the context onto the profiling service.
526  m_ProfilingService->AddBackendProfilingContext(id, profilingContext);
527  }
528  }
529  catch (const BackendUnavailableException&)
530  {
531  // Ignore backends which are unavailable
532  }
533  }
534 
535  BackendRegistryInstance().SetProfilingService(*m_ProfilingService.get());
536  // pass configuration info to the profiling service
537  m_ProfilingService->ConfigureProfilingService(
538  arm::pipe::ConvertExternalProfilingOptions(options.m_ProfilingOptions));
539  if (options.m_ProfilingOptions.m_EnableProfiling)
540  {
541  // try to wait for the profiling service to initialise
542  m_ProfilingService->WaitForProfilingServiceActivation(3000);
543  }
544 
545  m_DeviceSpec.AddSupportedBackends(supportedBackends);
546 
547  ARMNN_LOG(info) << "Initialization time: " << std::setprecision(2)
548  << std::fixed << armnn::GetTimeDuration(start_time).count() << " ms.";
549 }
550 
551 RuntimeImpl::~RuntimeImpl()
552 {
553  const auto startTime = armnn::GetTimeNow();
554  std::vector<int> networkIDs;
555  try
556  {
557  // Coverity fix: The following code may throw an exception of type std::length_error.
558  std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(),
559  std::back_inserter(networkIDs),
560  [](const auto &pair) { return pair.first; });
561  }
562  catch (const std::exception& e)
563  {
564  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
565  // exception of type std::length_error.
566  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
567  std::cerr << "WARNING: An error has occurred when getting the IDs of the networks to unload: " << e.what()
568  << "\nSome of the loaded networks may not be unloaded" << std::endl;
569  }
570  // We then proceed to unload all the networks which IDs have been appended to the list
571  // up to the point the exception was thrown (if any).
572 
573  for (auto networkID : networkIDs)
574  {
575  try
576  {
577  // Coverity fix: UnloadNetwork() may throw an exception of type std::length_error,
578  // boost::log::v2s_mt_posix::odr_violation or boost::log::v2s_mt_posix::system_error
579  UnloadNetwork(networkID);
580  }
581  catch (const std::exception& e)
582  {
583  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
584  // exception of type std::length_error.
585  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
586  std::cerr << "WARNING: An error has occurred when unloading network " << networkID << ": " << e.what()
587  << std::endl;
588  }
589  }
590 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
591  // Clear all dynamic backends.
593  m_DeviceSpec.ClearDynamicBackends();
594 #endif
595  m_BackendContexts.clear();
596 
598  // Remove custom allocators that this runtime has added.
599  // Note: that as backends can be per process and there can be many instances of a runtime in a process an allocator
600  // may have been overwritten by another runtime.
601  for_each(m_AllocatorsAddedByThisRuntime.begin(), m_AllocatorsAddedByThisRuntime.end(),
602  [](BackendId id) {BackendRegistryInstance().DeregisterAllocator(id);});
603 
604  ARMNN_LOG(info) << "Shutdown time: " << std::setprecision(2)
605  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
606 }
607 
608 LoadedNetwork* RuntimeImpl::GetLoadedNetworkPtr(NetworkId networkId) const
609 {
610 #if !defined(ARMNN_DISABLE_THREADS)
611  std::lock_guard<std::mutex> lockGuard(m_Mutex);
612 #endif
613  return m_LoadedNetworks.at(networkId).get();
614 }
615 
617 {
618  return GetLoadedNetworkPtr(networkId)->GetInputTensorInfo(layerId);
619 }
620 
622 {
623  return GetLoadedNetworkPtr(networkId)->GetOutputTensorInfo(layerId);
624 }
625 
626 std::vector<ImportedInputId> RuntimeImpl::ImportInputs(NetworkId networkId, const InputTensors& inputTensors,
627  MemorySource forceImportMemorySource)
628 {
629  return GetLoadedNetworkPtr(networkId)->ImportInputs(inputTensors, forceImportMemorySource);
630 }
631 
632 std::vector<ImportedOutputId> RuntimeImpl::ImportOutputs(NetworkId networkId, const OutputTensors& outputTensors,
633  MemorySource forceImportMemorySource)
634 {
635  return GetLoadedNetworkPtr(networkId)->ImportOutputs(outputTensors, forceImportMemorySource);
636 }
637 
638 void RuntimeImpl::ClearImportedInputs(NetworkId networkId, const std::vector<ImportedInputId> inputIds)
639 {
640  return GetLoadedNetworkPtr(networkId)->ClearImportedInputs(inputIds);
641 }
642 void RuntimeImpl::ClearImportedOutputs(NetworkId networkId, const std::vector<ImportedOutputId> outputIds)
643 {
644  return GetLoadedNetworkPtr(networkId)->ClearImportedOutputs(outputIds);
645 }
646 
648  const InputTensors& inputTensors,
649  const OutputTensors& outputTensors,
650  std::vector<ImportedInputId> preImportedInputIds,
651  std::vector<ImportedOutputId> preImportedOutputIds)
652 {
653  const auto startTime = armnn::GetTimeNow();
654 
655  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
656 
657  if (!loadedNetwork)
658  {
659  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
660  return Status::Failure;
661  }
662  if (loadedNetwork->IsAsyncEnabled())
663  {
664  ARMNN_LOG(error) << "Network " << networkId << " is async enabled.";
665  return Status::Failure;
666  }
668 
670 
671  static thread_local NetworkId lastId = networkId;
672  if (lastId != networkId)
673  {
674  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
675  {
676  network->FreeWorkingMemory();
677  });
678  }
679  lastId=networkId;
680 
681  auto status = loadedNetwork->EnqueueWorkload(inputTensors, outputTensors,
682  preImportedInputIds, preImportedOutputIds);
683 
684 
685  // Check if we imported, if not there's no need to call the After EnqueueWorkload events
686  if (!preImportedInputIds.empty() || !preImportedOutputIds.empty())
687  {
688  // Call After EnqueueWorkload events
689  for (auto&& context : m_BackendContexts)
690  {
691  context.second->AfterEnqueueWorkload(networkId);
692  }
693  }
694  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
695  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
696  return status;
697 }
698 
700  const InputTensors& inputTensors,
701  const OutputTensors& outputTensors,
702  std::vector<ImportedInputId> preImportedInputs,
703  std::vector<ImportedOutputId> preImportedOutputs)
704 {
705  const auto startTime = armnn::GetTimeNow();
706 
707  NetworkId networkId = iWorkingMemHandle.GetNetworkId();
708  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
709 
710  if (!loadedNetwork)
711  {
712  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
713  return Status::Failure;
714  }
715  if (!loadedNetwork->IsAsyncEnabled())
716  {
717  ARMNN_LOG(error) << "Attempting execute " << networkId << " when it is not async enabled.";
718  return Status::Failure;
719  }
721 
723 
724  auto status = loadedNetwork->Execute(inputTensors,
725  outputTensors,
726  iWorkingMemHandle,
727  preImportedInputs,
728  preImportedOutputs);
729 
730  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
731  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
732 
733  return status;
734 }
735 
736 /// Create a new unique WorkingMemHandle object. Create multiple handles if you wish to have
737 /// overlapped Execution by calling this function from different threads.
738 std::unique_ptr<IWorkingMemHandle> RuntimeImpl::CreateWorkingMemHandle(NetworkId networkId)
739 {
740  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
741 
742  if (!loadedNetwork)
743  {
744  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
745  return nullptr;
746  }
747  if (!loadedNetwork->IsAsyncEnabled())
748  {
749  ARMNN_LOG(error) << "Network " << networkId << " is not async enabled.";
750  return nullptr;
751  }
753 
754  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "CreateWorkingMemHandle");
755 
756  static thread_local NetworkId lastId = networkId;
757  if (lastId != networkId)
758  {
759  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
760  {
761  network->FreeWorkingMemory();
762  });
763  }
764  lastId=networkId;
765 
766  return loadedNetwork->CreateWorkingMemHandle(networkId);
767 }
768 
770 {
771  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
772  loadedNetwork->RegisterDebugCallback(func);
773 }
774 
775 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
776 void RuntimeImpl::LoadDynamicBackends(const std::string& overrideBackendPath)
777 {
778  // Get the paths where to load the dynamic backends from
779  std::vector<std::string> backendPaths = DynamicBackendUtils::GetBackendPaths(overrideBackendPath);
780 
781  // Get the shared objects to try to load as dynamic backends
782  std::vector<std::string> sharedObjects = DynamicBackendUtils::GetSharedObjects(backendPaths);
783 
784  // Create a list of dynamic backends
785  m_DynamicBackends = DynamicBackendUtils::CreateDynamicBackends(sharedObjects);
786 
787  // Register the dynamic backends in the backend registry
788  armnn::BackendIdSet registeredBackendIds = DynamicBackendUtils::RegisterDynamicBackends(m_DynamicBackends);
789 
790  // Add the registered dynamic backend ids to the list of supported backends
791  m_DeviceSpec.AddSupportedBackends(registeredBackendIds, true);
792 }
793 #endif
794 } // namespace armnn
BackendHelper.hpp
armnn::IRuntime::~IRuntime
~IRuntime()
armnn::RuntimeImpl::ReportStructure
void ReportStructure(arm::pipe::IProfilingService &profilingService) override
Definition: Runtime.cpp:302
armnn::IOptimizedNetworkPtr
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:340
armnn::Compute::Undefined
@ Undefined
armnn::DeviceSpec::AddSupportedBackends
void AddSupportedBackends(const BackendIdSet &backendIds, bool isDynamic=false)
Definition: DeviceSpec.hpp:30
armnn::ArmNNProfilingServiceInitialiser
Definition: ArmNNProfilingServiceInitialiser.hpp:14
armnn::DynamicBackendUtils::CreateDynamicBackends
static std::vector< DynamicBackendPtr > CreateDynamicBackends(const std::vector< std::string > &sharedObjects)
Definition: DynamicBackendUtils.cpp:269
armnn::LoadedNetwork::GetOutputTensorInfo
TensorInfo GetOutputTensorInfo(LayerBindingId layerId) const
Definition: LoadedNetwork.cpp:733
armnn::ProfilerManager::RegisterProfiler
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:609
armnn::experimental::IWorkingMemHandle::GetNetworkId
virtual NetworkId GetNetworkId()=0
Returns the NetworkId of the Network that this IWorkingMemHandle works with.
armnn::RuntimeImpl::ClearImportedOutputs
void ClearImportedOutputs(NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
Definition: Runtime.cpp:642
armnn::InputTensors
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:394
armnn::LoadedNetwork::EnqueueWorkload
Status EnqueueWorkload(const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Single thread execution of the loaded network.
Definition: LoadedNetwork.cpp:872
armnn::LoadedNetwork::RegisterDebugCallback
void RegisterDebugCallback(const DebugCallbackFunction &func)
Definition: LoadedNetwork.cpp:2296
armnn::DeviceSpec::GetDynamicBackends
const BackendIdSet & GetDynamicBackends() const
Definition: DeviceSpec.hpp:48
armnn::BackendIdSet
std::unordered_set< BackendId > BackendIdSet
Definition: BackendId.hpp:193
arm::pipe::ConvertExternalProfilingOptions
ProfilingOptions ConvertExternalProfilingOptions(const armnn::IRuntime::CreationOptions::ExternalProfilingOptions &options)
Definition: ProfilingOptionsConverter.cpp:17
armnn::IRuntime::CreationOptions::m_DynamicBackendsPath
std::string m_DynamicBackendsPath
Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive Only a...
Definition: IRuntime.hpp:99
armnn::GetTimeNow
std::chrono::high_resolution_clock::time_point GetTimeNow()
Definition: Timer.hpp:14
armnn::TensorInfo
Definition: Tensor.hpp:152
armnn::MemorySource::DmaBufProtected
@ DmaBufProtected
armnn::BackendCapability
BackendCapability
BackendCapability class.
Definition: Types.hpp:286
armnn::IOptimizedNetwork
Definition: INetwork.hpp:908
armnn::LoadedNetwork
Definition: LoadedNetwork.hpp:42
armnn::BackendOptions::BackendOption
Definition: BackendOptions.hpp:215
armnn::BackendRegistry::RegisterMemoryOptimizerStrategy
void RegisterMemoryOptimizerStrategy(const BackendId &id, std::shared_ptr< IMemoryOptimizerStrategy > strategy)
Definition: BackendRegistry.cpp:133
armnn::RuntimeImpl::LoadNetwork
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
Definition: Runtime.cpp:166
armnn::RuntimeImpl::GetProfiler
const std::shared_ptr< IProfiler > GetProfiler(NetworkId networkId) const
Gets the profiler corresponding to the given network id.
Definition: Runtime.cpp:290
armnn::RuntimeImpl::ClearImportedInputs
void ClearImportedInputs(NetworkId networkId, const std::vector< ImportedInputId > inputIds)
Definition: Runtime.cpp:638
BackendRegistry.hpp
armnn::OutputTensors
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:395
armnn::IRuntime::IRuntime
IRuntime()
Definition: Runtime.cpp:41
armnn::experimental::IWorkingMemHandle
Definition: IWorkingMemHandle.hpp:20
armnn::BoostLogSeverityMapping::error
@ error
armnn::IRuntime
Definition: IRuntime.hpp:75
armnn::LoadedNetwork::ImportInputs
std::vector< ImportedInputId > ImportInputs(const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
Definition: LoadedNetwork.cpp:1486
armnn::IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled
bool m_TimelineEnabled
Indicates whether external timeline profiling is enabled or not.
Definition: IRuntime.hpp:141
armnn::RuntimeImpl::ImportInputs
std::vector< ImportedInputId > ImportInputs(NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource)
Definition: Runtime.cpp:626
ARMNN_LOG
#define ARMNN_LOG(severity)
Definition: Logging.hpp:212
armnn::RuntimeImpl::UnloadNetwork
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
Definition: Runtime.cpp:232
armnn::IRuntime::Execute
Status Execute(IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputs={}, std::vector< ImportedOutputId > preImportedOutputs={})
This is an experimental function.
Definition: Runtime.cpp:123
armnn::IRuntime::ImportInputs
std::vector< ImportedInputId > ImportInputs(NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
ImportInputs separates the importing and mapping of InputTensors from network execution.
Definition: Runtime.cpp:92
armnn::DeviceSpec::ClearDynamicBackends
void ClearDynamicBackends()
Definition: DeviceSpec.hpp:39
armnn::BackendRegistry::SetProfilingService
void SetProfilingService(armnn::Optional< arm::pipe::IProfilingService & > profilingService)
Definition: BackendRegistry.cpp:107
armnn::RuntimeImpl::EnqueueWorkload
Status EnqueueWorkload(NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Definition: Runtime.cpp:647
armnn::RuntimeImpl::ImportOutputs
std::vector< ImportedOutputId > ImportOutputs(NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource)
Definition: Runtime.cpp:632
armnn::NetworkId
int NetworkId
Definition: IRuntime.hpp:35
armnn::IRuntime::CreationOptions::m_BackendOptions
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
Definition: IRuntime.hpp:190
ArmNNProfilingServiceInitialiser.hpp
armnn::IRuntime::Destroy
static void Destroy(IRuntime *runtime)
Definition: Runtime.cpp:57
Version.hpp
armnn::IRuntimePtr
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:41
Logging.hpp
ARMNN_SCOPED_PROFILING_EVENT
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
armnn::MemorySource::Undefined
@ Undefined
IBackendContext.hpp
PolymorphicDowncast.hpp
armnn::EmptyOptional
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
Runtime.hpp
armnn::GetMemoryOptimizerStrategy
std::unique_ptr< IMemoryOptimizerStrategy > GetMemoryOptimizerStrategy(const std::string &strategyName)
Definition: MemoryOptimizerStrategyLibrary.hpp:36
armnn::DynamicBackendUtils::GetSharedObjects
static std::vector< std::string > GetSharedObjects(const std::vector< std::string > &backendPaths)
Definition: DynamicBackendUtils.cpp:178
armnn::LoadedNetwork::ImportOutputs
std::vector< ImportedOutputId > ImportOutputs(const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
Definition: LoadedNetwork.cpp:1632
armnn::BackendRegistryInstance
BackendRegistry & BackendRegistryInstance()
Definition: BackendRegistry.cpp:15
armnn::LayerBindingId
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:309
armnn::RuntimeImpl::RuntimeImpl
RuntimeImpl(const IRuntime::CreationOptions &options)
Creates a runtime for workload execution.
Definition: Runtime.cpp:323
armnn::IRuntime::GetOutputTensorInfo
TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition: Runtime.cpp:87
armnn::Status::Success
@ Success
armnn::INetworkProperties
Definition: IRuntime.hpp:43
armnn::Exception
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
armnn::RuntimeException
Definition: Exceptions.hpp:120
armnn::IRuntime::LoadNetwork
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the IRuntime.
Definition: Runtime.cpp:62
armnn::BoostLogSeverityMapping::info
@ info
armnn::LoadedNetwork::GetInputTensorInfo
TensorInfo GetInputTensorInfo(LayerBindingId layerId) const
Definition: LoadedNetwork.cpp:715
armnn::DynamicBackendUtils::RegisterDynamicBackends
static BackendIdSet RegisterDynamicBackends(const std::vector< DynamicBackendPtr > &dynamicBackends)
Definition: DynamicBackendUtils.cpp:332
ArmNNProfiling.hpp
armnn::BackendRegistry::GetFactory
FactoryFunction GetFactory(const BackendId &id) const
Definition: BackendRegistry.cpp:57
armnn::IRuntime::GetProfiler
const std::shared_ptr< IProfiler > GetProfiler(NetworkId networkId) const
Gets the profiler corresponding to the given network id.
Definition: Runtime.cpp:151
armnn::ParseOptions
void ParseOptions(const std::vector< BackendOptions > &options, BackendId backend, F f)
Definition: BackendOptions.hpp:297
armnn::RuntimeImpl::RegisterDebugCallback
void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction &func)
Registers a callback function to debug layers performing custom computations on intermediate tensors.
Definition: Runtime.cpp:769
armnn::IRuntime::RegisterDebugCallback
void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction &func)
Registers a callback function to debug layers performing custom computations on intermediate tensors.
Definition: Runtime.cpp:156
armnn::IRuntime::CreationOptions::m_ProtectedMode
bool m_ProtectedMode
Setting this flag will allow the user to create the Runtime in protected mode.
Definition: IRuntime.hpp:106
armnn::DynamicBackendUtils::DeregisterDynamicBackends
static void DeregisterDynamicBackends(const BackendIdSet &dynamicBackends)
Definition: DynamicBackendUtils.cpp:320
ARMNN_VERSION
#define ARMNN_VERSION
ARMNN_VERSION: "X.Y.Z" where: X = Major version number Y = Minor version number Z = Patch version num...
Definition: Version.hpp:22
armnn::BackendRegistry::RegisterAllocator
void RegisterAllocator(const BackendId &id, std::shared_ptr< ICustomAllocator > alloc)
Definition: BackendRegistry.cpp:112
armnn::IRuntime::CreationOptions::m_ProfilingOptions
ExternalProfilingOptions m_ProfilingOptions
Definition: IRuntime.hpp:154
armnn::IRuntime::UnloadNetwork
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the IRuntime.
Definition: Runtime.cpp:136
armnn::Status
Status
Definition: Types.hpp:42
armnn::LoadedNetwork::MakeLoadedNetwork
static std::unique_ptr< LoadedNetwork > MakeLoadedNetwork(std::unique_ptr< IOptimizedNetwork > net, std::string &errorMessage, const INetworkProperties &networkProperties, arm::pipe::IProfilingService *profilingService)
Definition: LoadedNetwork.cpp:173
armnn::IRuntime::ClearImportedOutputs
void ClearImportedOutputs(NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
Un-import and delete the imported OutputTensor/s This function is not thread safe and must not be use...
Definition: Runtime.cpp:108
armnn::IRuntime::CreationOptions
Definition: IRuntime.hpp:78
armnn::LoadedNetwork::CreateWorkingMemHandle
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
Definition: LoadedNetwork.cpp:2025
armnn::ProfilerManager::GetInstance
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:602
armnn::BoostLogSeverityMapping::debug
@ debug
armnn::IRuntime::Create
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:52
std
Definition: BackendId.hpp:149
armnn::LoadedNetwork::Execute
Status Execute(const InputTensors &inputTensors, const OutputTensors &outputTensors, IWorkingMemHandle &workingMemHandle, std::vector< ImportedInputId > preImportedInputs={}, std::vector< ImportedOutputId > preImportedOutputs={})
Thread safe execution of the loaded network.
Definition: LoadedNetwork.cpp:1803
ProfilingOptionsConverter.hpp
armnn::BackendOptions::Var
Very basic type safe variant.
Definition: BackendOptions.hpp:38
armnn::DebugCallbackFunction
std::function< void(LayerGuid guid, unsigned int slotIndex, ITensorHandle *tensorHandle)> DebugCallbackFunction
Define the type of callback for the Debug layer to call.
Definition: Types.hpp:398
armnn::IRuntime::CreateRaw
static IRuntime * CreateRaw(const CreationOptions &options)
Definition: Runtime.cpp:47
armnn::MemorySource
MemorySource
Define the Memory Source to reduce copies.
Definition: Types.hpp:244
armnn::IRuntime::GetDeviceSpec
const IDeviceSpec & GetDeviceSpec() const
Definition: Runtime.cpp:141
armnn::RuntimeImpl::Execute
Status Execute(IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputs, std::vector< ImportedOutputId > preImportedOutputs)
This is an experimental function.
Definition: Runtime.cpp:699
armnn::BackendId
Definition: BackendId.hpp:75
armnn::LoadedNetwork::GetProfiler
const std::shared_ptr< IProfiler > & GetProfiler() const
Definition: LoadedNetwork.hpp:87
armnn::LoadedNetwork::ClearImportedOutputs
void ClearImportedOutputs(const std::vector< ImportedOutputId > outputIds)
Definition: LoadedNetwork.cpp:1782
armnn::RuntimeImpl
Definition: Runtime.hpp:30
armnn::DynamicBackendUtils::GetBackendPaths
static std::vector< std::string > GetBackendPaths(const std::string &overrideBackendPath="")
Definition: DynamicBackendUtils.cpp:81
armnn::IRuntime::CreationOptions::m_MemoryOptimizerStrategyMap
std::map< BackendId, std::shared_ptr< IMemoryOptimizerStrategy > > m_MemoryOptimizerStrategyMap
A map to define a custom memory optimizer strategy for specific backend Ids.
Definition: IRuntime.hpp:123
armnn
Copyright (c) 2021 ARM Limited and Contributors.
Definition: 01_00_quick_start.dox:6
armnn::LoadedNetwork::FreeWorkingMemory
void FreeWorkingMemory()
Definition: LoadedNetwork.cpp:1286
armnn::IRuntime::GetInputTensorInfo
TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition: Runtime.cpp:82
armnn::IRuntime::EnqueueWorkload
Status EnqueueWorkload(NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Evaluates a network using input in inputTensors and outputs filled into outputTensors.
Definition: Runtime.cpp:113
armnn::IRuntime::CreateWorkingMemHandle
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
Definition: Runtime.cpp:146
armnn::RuntimeImpl::InitialiseProfilingService
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Definition: Runtime.cpp:317
armnn::BoostLogSeverityMapping::warning
@ warning
armnn::LoadedNetwork::IsAsyncEnabled
bool IsAsyncEnabled()
Definition: LoadedNetwork.hpp:95
armnn::IDeviceSpec
Device specific knowledge to be passed to the optimizer.
Definition: Types.hpp:299
Timer.hpp
armnn::IRuntime::CreationOptions::m_CustomAllocatorMap
std::map< BackendId, std::shared_ptr< ICustomAllocator > > m_CustomAllocatorMap
A map to define a custom memory allocator for specific backend Ids.
Definition: IRuntime.hpp:115
armnn::LoadedNetwork::ClearImportedInputs
void ClearImportedInputs(const std::vector< ImportedInputId > inputIds)
Definition: LoadedNetwork.cpp:1761
armnn::ArmNNProfilingServiceInitialiser::InitialiseProfilingService
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Definition: ArmNNProfilingServiceInitialiser.cpp:17
armnn::NullPointerException
Definition: Exceptions.hpp:146
armnn::IRuntime::pRuntimeImpl
std::unique_ptr< RuntimeImpl > pRuntimeImpl
Definition: IRuntime.hpp:296
armnn::IRuntime::ClearImportedInputs
void ClearImportedInputs(NetworkId networkId, const std::vector< ImportedInputId > inputIds)
Un-import and delete the imported InputTensor/s This function is not thread safe and must not be used...
Definition: Runtime.cpp:104
armnn::BackendUnavailableException
Class for non-fatal exceptions raised while initialising a backend.
Definition: Exceptions.hpp:68
armnn::IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling
bool m_EnableProfiling
Indicates whether external profiling is enabled or not.
Definition: IRuntime.hpp:139
armnn::GetMemBlockStrategyTypeName
constexpr const char * GetMemBlockStrategyTypeName(MemBlockStrategyType memBlockStrategyType)
Definition: TypesUtils.hpp:295
armnn::IRuntime::ImportOutputs
std::vector< ImportedOutputId > ImportOutputs(NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
ImportOutputs separates the importing and mapping of OutputTensors from network execution.
Definition: Runtime.cpp:98
MemoryOptimizerStrategyLibrary.hpp
armnn::Status::Failure
@ Failure
armnn::GetTimeDuration
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
Definition: Timer.hpp:19
armnn::RuntimeImpl::CreateWorkingMemHandle
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
Definition: Runtime.cpp:738
DynamicBackendUtils.hpp
armnn::RuntimeImpl::GetInputTensorInfo
armnn::TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition: Runtime.cpp:616
armnn::HasMatchingCapability
bool HasMatchingCapability(const BackendOptions::BackendOption &capability, const BackendCapabilities &capabilities)
Convenience function to check if a given capability matches a capability in a BackendCapabilities str...
Definition: BackendHelper.cpp:85
armnn::RuntimeImpl::GetOutputTensorInfo
armnn::TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition: Runtime.cpp:621