23 #include <unordered_map> 27 using LoadedNetworks = std::unordered_map<NetworkId, std::unique_ptr<LoadedNetwork>>;
48 std::string& errorMessage);
52 std::string& errorMessage,
71 std::shared_ptr<IAsyncExecutionCallback> callback);
118 int GenerateNetworkId();
122 template<
typename Func>
123 void LoadedNetworkFuncSafe(
NetworkId networkId, Func f)
125 std::lock_guard<std::mutex> lockGuard(m_Mutex);
126 auto iter = m_LoadedNetworks.find(networkId);
127 if (iter != m_LoadedNetworks.end())
129 f(iter->second.get());
134 void LoadDynamicBackends(
const std::string& overrideBackendPath);
136 mutable std::mutex m_Mutex;
141 std::unordered_map<BackendId, IBackendInternal::IBackendContextPtr> m_BackendContexts;
143 int m_NetworkIdCounter;
148 std::vector<DynamicBackendPtr> m_DynamicBackends;
TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Status Execute(IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors)
This is an experimental function.
TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Copyright (c) 2021 ARM Limited and Contributors.
Status EnqueueWorkload(NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors)
profiling::IReportStructure IReportStructure
void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction &func)
Registers a callback function to debug layers performing custom computations on intermediate tensors...
const std::shared_ptr< IProfiler > GetProfiler(NetworkId networkId) const
Gets the profiler corresponding to the given network id.
std::function< void(LayerGuid guid, unsigned int slotIndex, ITensorHandle *tensorHandle)> DebugCallbackFunction
Define the type of callback for the Debug layer to call.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
friend profiling::ProfilingService & GetProfilingService(RuntimeImpl *runtime)
void Schedule(NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, const QosExecPriority priority, std::shared_ptr< IAsyncExecutionCallback > callback)
This is an experimental function.
const IDeviceSpec & GetDeviceSpec() const
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Device specific knowledge to be passed to the optimizer.
friend void RuntimeLoadedNetworksReserve(RuntimeImpl *runtime)
RuntimeImpl(const IRuntime::CreationOptions &options)
Creates a runtime for workload execution.
std::unordered_map< NetworkId, std::unique_ptr< LoadedNetwork > > LoadedNetworks
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.