ArmNN
 25.11
Loading...
Searching...
No Matches
ClWorkloadFactory.hpp
Go to the documentation of this file.
1//
2// Copyright © 2017-2023 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#pragma once
6
7#include <armnn/IRuntime.hpp>
8#include <armnn/Optional.hpp>
9
11
14
15#include <arm_compute/core/CL/CLCompileContext.h>
16
17namespace armnn
18{
19
20// ARM Compute OpenCL workload factory.
22{
23public:
24 ClWorkloadFactory(const std::shared_ptr<ClMemoryManager>& memoryManager);
25
26 ClWorkloadFactory(const std::shared_ptr<ClMemoryManager>& memoryManager,
28
29 void AfterWorkloadsCreated() override;
30
31 const BackendId& GetBackendId() const override;
32
33 static bool IsLayerSupported(const Layer& layer,
34 Optional<DataType> dataType,
35 std::string& outReasonIfUnsupported);
36
37 static bool IsLayerSupported(const IConnectableLayer& layer,
38 Optional<DataType> dataType,
39 std::string& outReasonIfUnsupported,
40 const ModelOptions& modelOptions);
41
42 bool SupportsSubTensors() const override { return true; }
43
44 ARMNN_DEPRECATED_MSG("Use ITensorHandleFactory::CreateSubTensorHandle instead")
46 TensorShape const& subTensorShape,
47 unsigned int const* subTensorOrigin) const override;
48
49 ARMNN_DEPRECATED_MSG("Use ITensorHandleFactory::CreateTensorHandle instead")
50 std::unique_ptr<ITensorHandle> CreateTensorHandle(const TensorInfo& tensorInfo,
51 const bool IsMemoryManaged = true) const override;
52
53 ARMNN_DEPRECATED_MSG("Use ITensorHandleFactory::CreateTensorHandle instead")
54 std::unique_ptr<ITensorHandle> CreateTensorHandle(const TensorInfo& tensorInfo,
55 DataLayout dataLayout,
56 const bool IsMemoryManaged = true) const override;
57 std::unique_ptr<IWorkload> CreateWorkload(LayerType type,
58 const QueueDescriptor& descriptor,
59 const WorkloadInfo& info) const override;
60
61private:
62 template<typename FloatWorkload, typename Uint8Workload, typename QueueDescriptorType, typename... Args>
63 static std::unique_ptr<IWorkload> MakeWorkload(const QueueDescriptorType& descriptor,
64 const WorkloadInfo& info,
65 Args&&... args);
66
67 template <typename Workload, typename QueueDescriptorType, typename... Args>
68 static std::unique_ptr<IWorkload> MakeWorkload(const QueueDescriptorType& descriptor,
69 const WorkloadInfo& info,
70 Args&&... args);
71
72 void InitializeCLCompileContext();
73
74 mutable std::shared_ptr<ClMemoryManager> m_MemoryManager;
75 const IBackendInternal::IBackendSpecificModelContextPtr m_ModelContextPtr;
76 arm_compute::CLCompileContext m_CLCompileContext;
77};
78
79} // namespace armnn
#define ARMNN_DEPRECATED_MSG(message)
std::unique_ptr< ITensorHandle > CreateTensorHandle(const TensorInfo &tensorInfo, const bool IsMemoryManaged=true) const override
std::unique_ptr< IWorkload > CreateWorkload(LayerType type, const QueueDescriptor &descriptor, const WorkloadInfo &info) const override
Backends should implement their own CreateWorkload function with a switch statement.
static bool IsLayerSupported(const Layer &layer, Optional< DataType > dataType, std::string &outReasonIfUnsupported)
std::unique_ptr< ITensorHandle > CreateSubTensorHandle(ITensorHandle &parent, TensorShape const &subTensorShape, unsigned int const *subTensorOrigin) const override
const BackendId & GetBackendId() const override
bool SupportsSubTensors() const override
ClWorkloadFactory(const std::shared_ptr< ClMemoryManager > &memoryManager)
void AfterWorkloadsCreated() override
std::shared_ptr< IBackendModelContext > IBackendSpecificModelContextPtr
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition INetwork.hpp:81
Workload interface to enqueue a layer computation.
Definition IWorkload.hpp:14
Copyright (c) 2021 ARM Limited and Contributors.
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below.
Definition Types.hpp:494
std::vector< BackendOptions > ModelOptions
TypedWorkload< QueueDescriptor, armnn::DataType::Float16, armnn::DataType::Float32 > FloatWorkload
Definition Workload.hpp:195
DataLayout
Definition Types.hpp:63
TypedWorkload< QueueDescriptor, armnn::DataType::QAsymmU8 > Uint8Workload
Definition Workload.hpp:203
Contains information about TensorInfos of a layer.