24 : m_MemoryManager(memoryManager)
40 std::string& outReasonIfUnsupported)
47 const bool isMemoryManaged)
const
49 return std::make_unique<armnn::ScopedTensorHandle>(tensorInfo);
55 const bool isMemoryManaged)
const
57 return std::make_unique<armnn::ScopedTensorHandle>(tensorInfo);
64 return std::make_unique<SampleDynamicAdditionWorkload>(descriptor, info);
71 return std::make_unique<armnn::CopyMemGenericWorkload>(descriptor, info);
78 return std::make_unique<armnn::CopyMemGenericWorkload>(descriptor, info);
86 using namespace armnn;
90 case LayerType::Addition:
92 auto additionQueueDescriptor = PolymorphicDowncast<const AdditionQueueDescriptor*>(&descriptor);
93 return std::make_unique<SampleDynamicAdditionWorkload>(*additionQueueDescriptor,
info);
95 case LayerType::Input:
97 auto inputQueueDescriptor = PolymorphicDowncast<const InputQueueDescriptor*>(&descriptor);
98 return std::make_unique<CopyMemGenericWorkload>(*inputQueueDescriptor,
info);
100 case LayerType::Output:
102 auto outputQueueDescriptor = PolymorphicDowncast<const OutputQueueDescriptor*>(&descriptor);
103 return std::make_unique<CopyMemGenericWorkload>(*outputQueueDescriptor,
info);
const char * GetBackendId()
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
static bool IsLayerSupported(const armnn::IConnectableLayer &layer, armnn::Optional< armnn::DataType > dataType, std::string &outReasonIfUnsupported)
std::unique_ptr< armnn::IWorkload > CreateAddition(const armnn::AdditionQueueDescriptor &descriptor, const armnn::WorkloadInfo &info) const
std::unique_ptr< armnn::IWorkload > CreateInput(const armnn::InputQueueDescriptor &descriptor, const armnn::WorkloadInfo &info) const
SampleDynamicWorkloadFactory()
std::unique_ptr< armnn::IWorkload > CreateOutput(const armnn::OutputQueueDescriptor &descriptor, const armnn::WorkloadInfo &info) const
const armnn::BackendId & GetBackendId() const override
std::unique_ptr< armnn::IWorkload > CreateWorkload(armnn::LayerType type, const armnn::QueueDescriptor &descriptor, const armnn::WorkloadInfo &info) const override
Backends should implement their own CreateWorkload function with a switch statement.
std::unique_ptr< armnn::ITensorHandle > CreateTensorHandle(const armnn::TensorInfo &tensorInfo, const bool IsMemoryManaged=true) const override
Copyright (c) 2021 ARM Limited and Contributors.
bool IsLayerSupported(const armnn::Layer *layer)
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below.
Contains information about TensorInfos of a layer.