ArmNN
 25.11
Loading...
Searching...
No Matches
Runtime.hpp
Go to the documentation of this file.
1//
2// Copyright © 2017, 2023 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#pragma once
6
7#include "LoadedNetwork.hpp"
8#include "DeviceSpec.hpp"
9
10#include <armnn/INetwork.hpp>
11#include <armnn/IRuntime.hpp>
12#include <armnn/Tensor.hpp>
13#include <armnn/BackendId.hpp>
14
16
17#include <client/include/IInitialiseProfilingService.hpp>
18#include <client/include/IProfilingService.hpp>
19#include <client/include/IReportStructure.hpp>
20
21#include <mutex>
22#include <unordered_map>
23
24namespace armnn
25{
26using LoadedNetworks = std::unordered_map<NetworkId, std::unique_ptr<LoadedNetwork>>;
27using IReportStructure = arm::pipe::IReportStructure;
28 using IInitialiseProfilingService = arm::pipe::IInitialiseProfilingService;
29
31{
32public:
33 /// Loads a complete network into the Runtime.
34 /// @param [out] networkIdOut - Unique identifier for the network is returned in this reference.
35 /// @param [in] network - Complete network to load into the Runtime.
36 /// The runtime takes ownership of the network once passed in.
37 /// @return armnn::Status
38 Status LoadNetwork(NetworkId& networkIdOut, IOptimizedNetworkPtr network);
39
40 /// Load a complete network into the IRuntime.
41 /// @param [out] networkIdOut Unique identifier for the network is returned in this reference.
42 /// @param [in] network Complete network to load into the IRuntime.
43 /// @param [out] errorMessage Error message if there were any errors.
44 /// The runtime takes ownership of the network once passed in.
45 /// @return armnn::Status
46 Status LoadNetwork(NetworkId& networkIdOut,
48 std::string& errorMessage);
49
50 Status LoadNetwork(NetworkId& networkIdOut,
52 std::string& errorMessage,
53 const INetworkProperties& networkProperties);
54
57
58 std::vector<ImportedInputId> ImportInputs(NetworkId networkId, const InputTensors& inputTensors,
59 MemorySource forceImportMemorySource);
60 std::vector<ImportedOutputId> ImportOutputs(NetworkId networkId, const OutputTensors& outputTensors,
61 MemorySource forceImportMemorySource);
62
63 void ClearImportedInputs(NetworkId networkId, const std::vector<ImportedInputId> inputIds);
64 void ClearImportedOutputs(NetworkId networkId, const std::vector<ImportedOutputId> outputIds);
65
66 // Evaluates network using input in inputTensors, outputs filled into outputTensors.
68 const InputTensors& inputTensors,
69 const OutputTensors& outputTensors,
70 std::vector<ImportedInputId> preImportedInputIds = {},
71 std::vector<ImportedOutputId> preImportedOutputIds = {});
72
73 /// Unloads a network from the Runtime.
74 /// At the moment this only removes the network from the m_Impl->m_Network.
75 /// This might need more work in the future to be AndroidNN compliant.
76 /// @param [in] networkId Unique identifier for the network to be unloaded. Generated in LoadNetwork().
77 /// @return armnn::Status
79
80 const IDeviceSpec& GetDeviceSpec() const { return m_DeviceSpec; }
81
82 /// Gets the profiler corresponding to the given network id.
83 /// @param networkId The id of the network for which to get the profile.
84 /// @return A pointer to the requested profiler, or nullptr if not found.
85 const std::shared_ptr<IProfiler> GetProfiler(NetworkId networkId) const;
86
87 /// Registers a callback function to debug layers performing custom computations on intermediate tensors.
88 /// @param networkId The id of the network to register the callback.
89 /// @param func callback function to pass to the debug layer.
90 void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction& func);
91
92 /// Creates a runtime for workload execution.
94
96
97 //NOTE: we won't need the profiling service reference but it is good to pass the service
98 // in this way to facilitate other implementations down the road
99 void ReportStructure(arm::pipe::IProfilingService& profilingService) override;
100
101 void InitialiseProfilingService(arm::pipe::IProfilingService& profilingService) override;
102
103private:
104 friend void RuntimeLoadedNetworksReserve(RuntimeImpl* runtime); // See RuntimeTests.cpp
105
106 friend arm::pipe::IProfilingService& GetProfilingService(RuntimeImpl* runtime); // See RuntimeTests.cpp
107
108 int GenerateNetworkId();
109
110 LoadedNetwork* GetLoadedNetworkPtr(NetworkId networkId) const;
111
112 template<typename Func>
113 void LoadedNetworkFuncSafe(NetworkId networkId, Func f)
114 {
115#if !defined(ARMNN_DISABLE_THREADS)
116 std::lock_guard<std::mutex> lockGuard(m_Mutex);
117#endif
118 auto iter = m_LoadedNetworks.find(networkId);
119 if (iter != m_LoadedNetworks.end())
120 {
121 f(iter->second.get());
122 }
123 }
124
125 /// Loads any available/compatible dynamic backend in the runtime.
126 void LoadDynamicBackends(const std::string& overrideBackendPath);
127
128#if !defined(ARMNN_DISABLE_THREADS)
129 mutable std::mutex m_Mutex;
130#endif
131
132 /// Map of Loaded Networks with associated GUID as key
133 LoadedNetworks m_LoadedNetworks;
134
135 std::unordered_map<BackendId, IBackendInternal::IBackendContextPtr> m_BackendContexts;
136
137 int m_NetworkIdCounter;
138
139 DeviceSpec m_DeviceSpec;
140
141 /// List of dynamic backends loaded in the runtime
142 std::vector<DynamicBackendPtr> m_DynamicBackends;
143
144 /// Profiling Service Instance
145 std::unique_ptr<arm::pipe::IProfilingService> m_ProfilingService;
146
147 /// Keep track of backend ids of the custom allocators that this instance of the runtime added. The
148 /// destructor can then clean up for this runtime.
149 std::set<BackendId> m_AllocatorsAddedByThisRuntime;
150};
151
152} // namespace armnn
Device specific knowledge to be passed to the optimizer.
Definition Types.hpp:302
Copyright (c) 2021 ARM Limited and Contributors.
MemorySource
Define the Memory Source to reduce copies.
Definition Types.hpp:246
std::function< void(LayerGuid guid, unsigned int slotIndex, ITensorHandle *tensorHandle)> DebugCallbackFunction
Define the type of callback for the Debug layer to call.
Definition Types.hpp:400
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition INetwork.hpp:340
Status
enumeration
Definition Types.hpp:43
std::unordered_map< NetworkId, std::unique_ptr< LoadedNetwork > > LoadedNetworks
Definition Runtime.hpp:26
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition Tensor.hpp:394
int NetworkId
Definition IRuntime.hpp:33
arm::pipe::IReportStructure IReportStructure
Definition Runtime.hpp:27
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition Types.hpp:311
arm::pipe::IInitialiseProfilingService IInitialiseProfilingService
Definition Runtime.hpp:28
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition Tensor.hpp:395
RuntimeImpl(const IRuntime::CreationOptions &options)
Creates a runtime for workload execution.
Definition Runtime.cpp:295
const IDeviceSpec & GetDeviceSpec() const
Definition Runtime.hpp:80
void ReportStructure(arm::pipe::IProfilingService &profilingService) override
Definition Runtime.cpp:274
friend arm::pipe::IProfilingService & GetProfilingService(RuntimeImpl *runtime)
Definition TestUtils.cpp:59
void ClearImportedOutputs(NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
Definition Runtime.cpp:614
std::vector< ImportedOutputId > ImportOutputs(NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource)
Definition Runtime.cpp:604
armnn::TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition Runtime.cpp:593
void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction &func)
Registers a callback function to debug layers performing custom computations on intermediate tensors.
Definition Runtime.cpp:665
std::vector< ImportedInputId > ImportInputs(NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource)
Definition Runtime.cpp:598
armnn::TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition Runtime.cpp:588
friend void RuntimeLoadedNetworksReserve(RuntimeImpl *runtime)
void ClearImportedInputs(NetworkId networkId, const std::vector< ImportedInputId > inputIds)
Definition Runtime.cpp:610
Status EnqueueWorkload(NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Definition Runtime.cpp:619
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Definition Runtime.cpp:289
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
Definition Runtime.cpp:204
const std::shared_ptr< IProfiler > GetProfiler(NetworkId networkId) const
Gets the profiler corresponding to the given network id.
Definition Runtime.cpp:262
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
Definition Runtime.cpp:139