17 #include <client/include/IInitialiseProfilingService.hpp>
18 #include <client/include/IProfilingService.hpp>
19 #include <client/include/IReportStructure.hpp>
22 #include <unordered_map>
26 using LoadedNetworks = std::unordered_map<NetworkId, std::unique_ptr<LoadedNetwork>>;
48 std::string& errorMessage);
52 std::string& errorMessage,
70 std::vector<ImportedInputId> preImportedInputIds = {},
71 std::vector<ImportedOutputId> preImportedOutputIds = {});
99 void ReportStructure(arm::pipe::IProfilingService& profilingService)
override;
108 int GenerateNetworkId();
112 template<
typename Func>
113 void LoadedNetworkFuncSafe(
NetworkId networkId, Func f)
115 #if !defined(ARMNN_DISABLE_THREADS)
116 std::lock_guard<std::mutex> lockGuard(m_Mutex);
118 auto iter = m_LoadedNetworks.find(networkId);
119 if (iter != m_LoadedNetworks.end())
121 f(iter->second.get());
126 void LoadDynamicBackends(
const std::string& overrideBackendPath);
128 #if !defined(ARMNN_DISABLE_THREADS)
129 mutable std::mutex m_Mutex;
135 std::unordered_map<BackendId, IBackendInternal::IBackendContextPtr> m_BackendContexts;
137 int m_NetworkIdCounter;
139 DeviceSpec m_DeviceSpec;
142 std::vector<DynamicBackendPtr> m_DynamicBackends;
145 std::unique_ptr<arm::pipe::IProfilingService> m_ProfilingService;
149 std::set<BackendId> m_AllocatorsAddedByThisRuntime;
Device specific knowledge to be passed to the optimizer.
Copyright (c) 2021 ARM Limited and Contributors.
std::unordered_map< NetworkId, std::unique_ptr< LoadedNetwork > > LoadedNetworks
MemorySource
Define the Memory Source to reduce copies.
std::function< void(LayerGuid guid, unsigned int slotIndex, ITensorHandle *tensorHandle)> DebugCallbackFunction
Define the type of callback for the Debug layer to call.
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
arm::pipe::IReportStructure IReportStructure
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
arm::pipe::IInitialiseProfilingService IInitialiseProfilingService
RuntimeImpl(const IRuntime::CreationOptions &options)
Creates a runtime for workload execution.
void ReportStructure(arm::pipe::IProfilingService &profilingService) override
void ClearImportedOutputs(NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
std::vector< ImportedOutputId > ImportOutputs(NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource)
armnn::TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction &func)
Registers a callback function to debug layers performing custom computations on intermediate tensors.
std::vector< ImportedInputId > ImportInputs(NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource)
armnn::TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
friend void RuntimeLoadedNetworksReserve(RuntimeImpl *runtime)
void ClearImportedInputs(NetworkId networkId, const std::vector< ImportedInputId > inputIds)
Status EnqueueWorkload(NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
const IDeviceSpec & GetDeviceSpec() const
friend arm::pipe::IProfilingService & GetProfilingService(RuntimeImpl *runtime)
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
const std::shared_ptr< IProfiler > GetProfiler(NetworkId networkId) const
Gets the profiler corresponding to the given network id.
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.