24.02
RefRankWorkload.hpp
Go to the documentation of this file.
1
//
2
// Copyright © 2020-2023 Arm Ltd and Contributors. All rights reserved.
3
// SPDX-License-Identifier: MIT
4
//
5
6
#pragma once
7
8
#include "
RefBaseWorkload.hpp
"
9
#include <
armnn/backends/WorkloadData.hpp
>
10
11
#include "
RefWorkloadUtils.hpp
"
12
13
namespace
armnn
14
{
15
16
struct
RefRankWorkload
:
public
RefBaseWorkload
<RankQueueDescriptor>
17
{
18
public
:
19
using
RefBaseWorkload<RankQueueDescriptor>::RefBaseWorkload
;
20
virtual
void
Execute
()
const override
21
{
22
Execute
(
m_Data
.
m_Inputs
,
m_Data
.
m_Outputs
);
23
24
}
25
void
ExecuteAsync
(
ExecutionData
& executionData)
override
26
{
27
WorkingMemDescriptor
* workingMemDescriptor =
static_cast<
WorkingMemDescriptor
*
>
(executionData.
m_Data
);
28
Execute
(workingMemDescriptor->
m_Inputs
, workingMemDescriptor->
m_Outputs
);
29
}
30
31
private
:
32
void
Execute
(std::vector<ITensorHandle*> inputs, std::vector<ITensorHandle*> outputs)
const
33
{
34
ARMNN_SCOPED_PROFILING_EVENT_REF_NAME_GUID
(
"RefRankWorkload_Execute"
);
35
const
int32_t rank =
static_cast<
int32_t
>
(
GetTensorInfo
(inputs[0]).
GetNumDimensions
());
36
37
std::memcpy(outputs[0]->
Map
(), &rank,
sizeof
(int32_t));
38
outputs[0]->Unmap();
39
}
40
};
41
42
}
//namespace armnn
43
44
45
46
WorkloadData.hpp
armnn::experimental::ExecutionData::m_Data
void * m_Data
Definition:
ExecutionData.hpp:16
armnn::TensorInfo::GetNumDimensions
unsigned int GetNumDimensions() const
Definition:
Tensor.hpp:197
armnn::RefRankWorkload::Execute
virtual void Execute() const override
Definition:
RefRankWorkload.hpp:20
ARMNN_SCOPED_PROFILING_EVENT_REF_NAME_GUID
#define ARMNN_SCOPED_PROFILING_EVENT_REF_NAME_GUID(label)
Creates a profiling event that uses GetGuid() and GetName() from the calling class.
Definition:
RefWorkloadUtils.hpp:22
armnn::RefRankWorkload
Definition:
RefRankWorkload.hpp:16
armnn::RefRankWorkload::ExecuteAsync
void ExecuteAsync(ExecutionData &executionData) override
Definition:
RefRankWorkload.hpp:25
armnn::GetTensorInfo
const TensorInfo & GetTensorInfo(const ITensorHandle *tensorHandle)
float32 helpers
Definition:
RefWorkloadUtils.hpp:33
armnn::QueueDescriptor::m_Outputs
std::vector< ITensorHandle * > m_Outputs
Definition:
WorkloadData.hpp:27
RefWorkloadUtils.hpp
armnn::BaseWorkload< RankQueueDescriptor >::m_Data
RankQueueDescriptor m_Data
Definition:
Workload.hpp:89
armnn::LayerType::Map
@ Map
armnn::experimental::WorkingMemDescriptor::m_Inputs
std::vector< ITensorHandle * > m_Inputs
Definition:
WorkingMemDescriptor.hpp:20
armnn
Copyright (c) 2021 ARM Limited and Contributors.
Definition:
01_00_quick_start.dox:6
armnn::experimental::WorkingMemDescriptor
Definition:
WorkingMemDescriptor.hpp:18
RefBaseWorkload.hpp
armnn::RefBaseWorkload
Definition:
RefBaseWorkload.hpp:13
armnn::experimental::WorkingMemDescriptor::m_Outputs
std::vector< ITensorHandle * > m_Outputs
Definition:
WorkingMemDescriptor.hpp:21
armnn::QueueDescriptor::m_Inputs
std::vector< ITensorHandle * > m_Inputs
Definition:
WorkloadData.hpp:26
armnn::experimental::ExecutionData
Definition:
ExecutionData.hpp:14
src
backends
reference
workloads
RefRankWorkload.hpp
Generated on Wed Feb 14 2024 16:36:17 for Arm NN by
1.8.17