ArmNN
 25.11
Loading...
Searching...
No Matches
IBackendInternal.hpp
Go to the documentation of this file.
1//
2// Copyright © 2022, 2024 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#pragma once
7
8#include <armnn/Types.hpp>
9#include <armnn/IRuntime.hpp>
10#include <armnn/Deprecated.hpp>
11
13
19
20#include <client/include/backends/IBackendProfiling.hpp>
21#include <client/include/backends/IBackendProfilingContext.hpp>
22
23#include <vector>
24#include <memory>
25
26namespace armnn
27{
29class IMemoryManager;
30class ILayerSupport;
31
33{
34 uint32_t m_Major;
35 uint32_t m_Minor;
36
37 constexpr BackendVersion()
38 : m_Major(0)
39 , m_Minor(0)
40 {}
41 constexpr BackendVersion(uint32_t major, uint32_t minor)
42 : m_Major(major)
43 , m_Minor(minor)
44 {}
45
46 bool operator==(const BackendVersion& other) const
47 {
48 return this == &other ||
49 (this->m_Major == other.m_Major &&
50 this->m_Minor == other.m_Minor);
51 }
52
53 bool operator<=(const BackendVersion& other) const
54 {
55 return this->m_Major < other.m_Major ||
56 (this->m_Major == other.m_Major &&
57 this->m_Minor <= other.m_Minor);
58 }
59
60 bool operator>=(const BackendVersion& other) const
61 {
62 return this->m_Major > other.m_Major ||
63 (this->m_Major == other.m_Major &&
64 this->m_Minor >= other.m_Minor);
65 }
66};
67
68inline std::ostream& operator<<(std::ostream& os, const BackendVersion& backendVersion)
69{
70 os << "[" << backendVersion.m_Major << "." << backendVersion.m_Minor << "]";
71
72 return os;
73}
74
76{
77protected:
78 /// Creation must be done through a specific
79 /// backend interface.
80 IBackendInternal() = default;
81
82public:
83 /// Allow backends created by the factory function
84 /// to be destroyed through IBackendInternal.
85 ~IBackendInternal() override = default;
86
87 using IWorkloadFactoryPtr = std::unique_ptr<IWorkloadFactory>;
88 using IBackendContextPtr = std::unique_ptr<IBackendContext>;
89 /// This is the bridge between backend and backend profiling we'll keep it in the backend namespace.
90 using IBackendProfilingContextPtr = std::shared_ptr<arm::pipe::IBackendProfilingContext>;
91 using IBackendProfilingPtr = std::unique_ptr<arm::pipe::IBackendProfiling>;
92 using ILayerSupportSharedPtr = std::shared_ptr<ILayerSupport>;
93
94 using IBackendSpecificModelContextPtr = std::shared_ptr<IBackendModelContext>;
95
96 using IMemoryManagerUniquePtr = std::unique_ptr<IMemoryManager>;
97 using IMemoryManagerSharedPtr = std::shared_ptr<IMemoryManager>;
98
100
102 const IMemoryManagerSharedPtr& memoryManager = nullptr) const = 0;
103
105 class TensorHandleFactoryRegistry& tensorHandleFactoryRegistry) const;
106
108 const IMemoryManagerSharedPtr& memoryManager,
109 const ModelOptions& modelOptions) const;
110
112 class TensorHandleFactoryRegistry& tensorHandleFactoryRegistry,
113 const ModelOptions& modelOptions) const;
114
116 class TensorHandleFactoryRegistry& tensorHandleFactoryRegistry,
117 const ModelOptions& modelOptions,
118 MemorySourceFlags inputFlags,
119 MemorySourceFlags outputFlags) const;
120
121 /// Create the runtime context of the backend
122 ///
123 /// Implementations may return a default-constructed IBackendContextPtr if
124 /// no context is needed at runtime.
125 /// Implementations must throw BackendUnavailableException if the backend
126 /// cannot be used (for example, necessary accelerator hardware is not present).
127 /// The default implementation always returns a default-constructed pointer.
129
131
132 /// Create context specifically used for profiling interaction from backends.
134 IBackendProfilingPtr& backendProfiling);
135
137
138 virtual ILayerSupportSharedPtr GetLayerSupport(const ModelOptions& modelOptions) const;
139
140 virtual OptimizationViews OptimizeSubgraphView(const SubgraphView& subgraph) const;
141
143 const ModelOptions& modelOptions) const;
144
145 bool SupportsTensorAllocatorAPI() const;
146
148
149 /// (Optional) Returns a vector of supported TensorHandleFactory ids in preference order.
150 virtual std::vector<ITensorHandleFactory::FactoryId> GetHandleFactoryPreferences() const;
151
152 /// (Optional) Register TensorHandleFactories
153 /// Either this method or CreateMemoryManager() and
154 /// IWorkloadFactory::CreateTensor() IWorkloadFactory::CreateSubtensor() methods must be implemented.
156
157 /// (Optional) Register TensorHandleFactories
158 /// Either this method or CreateMemoryManager() and
159 /// IWorkloadFactory::CreateTensor() IWorkloadFactory::CreateSubtensor() methods must be implemented.
161 MemorySourceFlags inputFlags,
162 MemorySourceFlags outputFlags);
163
164 /// Returns the version of the Backend API
165 static constexpr BackendVersion GetApiVersion() { return BackendVersion(1, 0); }
166
167 /// Returns a BackendCapability if the backend lists the capability
168 /// The BackendCapability must then be inspected to check whether or not that BackendCapability is supported
169 /// Otherwise returns an EmptyOptional if the BackendCapability is unlisted
171 {
172 return BackendCapabilities("IBackendInternal NullCapabilities");
173 };
174
175 /// Signals the backend to use a custom memory allocator provided by the user
176 ///
177 /// \param allocator - a pointer to the provided ICustomAllocator to use with this backend
178 /// \param errMsg - Optional string variable to return error messages
179 /// \return - Returns true if switching to custom allocator was successful
180 virtual bool UseCustomMemoryAllocator(std::shared_ptr<ICustomAllocator> allocator,
182 {
183 IgnoreUnused(allocator);
184 if (errMsg)
185 {
186 std::stringstream message;
187 message << "The backend " << GetId() << " doesn't support using a custom allocator. This error might"
188 " be related with the protected mode if the backend doesn't"
189 " fully support it.";
190
191 errMsg.value() = message.str();
192 }
193 return false;
194 }
195
196 /// Returns the default memory allocator for the backend
197 ///
198 /// \return - Returns unique pointer to the Default Allocator of the Backend
199 virtual std::unique_ptr<ICustomAllocator> GetDefaultAllocator() const
200 {
201 throw armnn::Exception("GetDefaultAllocator: Function has not been implemented in backend.");
202 }
203
204 /// Returns the number of files cached if backend supports caching
205 ///
206 /// \return - Returns 0 if backend does not support caching otherwise number of files cached
207 virtual unsigned int GetNumberOfCacheFiles() const { return 0; }
208};
209
210using IBackendInternalUniquePtr = std::unique_ptr<IBackendInternal>;
211
212} // namespace armnn
Base class for all ArmNN exceptions so that users can filter to just those.
virtual const BackendId & GetId() const =0
virtual BackendCapabilities GetCapabilities() const
Returns a BackendCapability if the backend lists the capability The BackendCapability must then be in...
virtual IBackendContextPtr CreateBackendContext(const IRuntime::CreationOptions &) const
Create the runtime context of the backend.
virtual IMemoryManagerUniquePtr CreateMemoryManager() const
std::shared_ptr< ILayerSupport > ILayerSupportSharedPtr
std::unique_ptr< IMemoryManager > IMemoryManagerUniquePtr
virtual std::unique_ptr< ICustomAllocator > GetDefaultAllocator() const
Returns the default memory allocator for the backend.
std::unique_ptr< arm::pipe::IBackendProfiling > IBackendProfilingPtr
virtual IBackendSpecificModelContextPtr CreateBackendSpecificModelContext(const ModelOptions &modelOptions) const
std::shared_ptr< IBackendModelContext > IBackendSpecificModelContextPtr
virtual void RegisterTensorHandleFactories(class TensorHandleFactoryRegistry &)
(Optional) Register TensorHandleFactories Either this method or CreateMemoryManager() and IWorkloadFa...
virtual OptimizationViews OptimizeSubgraphView(const SubgraphView &subgraph) const
IBackendInternal()=default
Creation must be done through a specific backend interface.
std::shared_ptr< IMemoryManager > IMemoryManagerSharedPtr
virtual IBackendProfilingContextPtr CreateBackendProfilingContext(const IRuntime::CreationOptions &creationOptions, IBackendProfilingPtr &backendProfiling)
Create context specifically used for profiling interaction from backends.
ITensorHandleFactory::FactoryId GetBackwardCompatibleFavoriteHandleFactory()
std::unique_ptr< IWorkloadFactory > IWorkloadFactoryPtr
static constexpr BackendVersion GetApiVersion()
Returns the version of the Backend API.
virtual unsigned int GetNumberOfCacheFiles() const
Returns the number of files cached if backend supports caching.
virtual bool UseCustomMemoryAllocator(std::shared_ptr< ICustomAllocator > allocator, armnn::Optional< std::string & > errMsg)
Signals the backend to use a custom memory allocator provided by the user.
~IBackendInternal() override=default
Allow backends created by the factory function to be destroyed through IBackendInternal.
virtual std::vector< ITensorHandleFactory::FactoryId > GetHandleFactoryPreferences() const
(Optional) Returns a vector of supported TensorHandleFactory ids in preference order.
virtual ILayerSupportSharedPtr GetLayerSupport() const =0
std::unique_ptr< IBackendContext > IBackendContextPtr
std::shared_ptr< arm::pipe::IBackendProfilingContext > IBackendProfilingContextPtr
This is the bridge between backend and backend profiling we'll keep it in the backend namespace.
bool SupportsTensorAllocatorAPI() const
virtual IWorkloadFactoryPtr CreateWorkloadFactory(const IMemoryManagerSharedPtr &memoryManager=nullptr) const =0
The SubgraphView class represents a subgraph of a Graph.
Copyright (c) 2021 ARM Limited and Contributors.
unsigned int MemorySourceFlags
std::vector< BackendOptions > ModelOptions
std::ostream & operator<<(std::ostream &os, const std::vector< Compute > &compute)
Deprecated function that will be removed together with the Compute enum.
Definition BackendId.hpp:50
BackendOptions BackendCapabilities
std::unique_ptr< IBackendInternal > IBackendInternalUniquePtr
void IgnoreUnused(Ts &&...)
bool operator<=(const BackendVersion &other) const
constexpr BackendVersion(uint32_t major, uint32_t minor)
bool operator==(const BackendVersion &other) const
bool operator>=(const BackendVersion &other) const