36 #include "tests/validation/fixtures/ComputeAllAnchorsFixture.h"
47 using NEComputeAllAnchors = NESynthetizeFunction<NEComputeAllAnchorsKernel>;
49 template <
typename U,
typename T>
52 std::memcpy(
tensor.data(), v.data(),
sizeof(T) * v.size());
60 std::memcpy(
tensor.data(), v.data(),
sizeof(T) * v.size());
64 const int channels =
tensor.shape()[0];
65 const int width =
tensor.shape()[1];
66 const int height =
tensor.shape()[2];
67 for(
int x = 0; x < width; ++x)
69 for(
int y = 0; y < height; ++y)
71 for(
int c = 0; c < channels; ++c)
73 *(
reinterpret_cast<T *
>(
tensor(Coordinates(c, x, y)))) = *(
reinterpret_cast<const T *
>(v.data() + x + y * width + c * height * width));
82 ComputeAnchorsInfo(10U, 10U, 1. / 16.f),
83 ComputeAnchorsInfo(100U, 1U, 1. / 2.f),
84 ComputeAnchorsInfo(100U, 1U, 1. / 4.f),
85 ComputeAnchorsInfo(100U, 100U, 1. / 4.f),
89 constexpr AbsoluteTolerance<int16_t> tolerance_qsymm16(1);
109 TensorInfo(TensorShape(100U, 100U, 38U), 1,
DataType::F32)})),
121 TensorInfo(TensorShape(5U, 100U*100U*9U), 1,
DataType::F32)})),
135 GenerateProposalsInfo(10.f, 10.f, 1.f),
136 GenerateProposalsInfo(10.f, 10.f, 1.f),
137 GenerateProposalsInfo(10.f, 10.f, 1.f),
138 GenerateProposalsInfo(10.f, 10.f, 1.f),
139 GenerateProposalsInfo(10.f, 10.f, 1.f)})),
141 scores, deltas, anchors, proposals, scores_out, num_valid_proposals, generate_proposals_info,
expected)
144 &deltas.clone()->set_is_resizable(
true),
145 &anchors.clone()->set_is_resizable(
true),
146 &proposals.clone()->set_is_resizable(
true),
147 &scores_out.clone()->set_is_resizable(
true),
148 &num_valid_proposals.clone()->set_is_resizable(
true),
154 template <
typename T>
162 const int values_per_roi = 4;
163 const int num_anchors = 3;
164 const int feature_height = 4;
165 const int feature_width = 3;
168 fill_tensor(anchors_expected, std::vector<float> { -26, -19, 87, 86,
210 NEComputeAllAnchors compute_anchors;
211 compute_anchors.configure(&anchors, &all_anchors,
ComputeAnchorsInfo(feature_width, feature_height, 1. / 16.0));
213 all_anchors.allocator()->allocate();
215 fill_tensor(Accessor(anchors), std::vector<float> { -26, -19, 87, 86,
220 compute_anchors.run();
221 validate(Accessor(all_anchors), anchors_expected);
228 const int values_per_roi = 4;
229 const int num_anchors = 2;
230 const int feature_height = 4;
231 const int feature_width = 5;
233 std::vector<float> scores_vector
235 5.055894435664012e-04f, 1.270304909820112e-03f, 2.492271113912067e-03f, 5.951663827809190e-03f,
236 7.846917156877404e-03f, 6.776275276294789e-03f, 6.761571012891965e-03f, 4.898292096237725e-03f,
237 6.044472332578605e-04f, 3.203334118759474e-03f, 2.947527908919908e-03f, 6.313238560015770e-03f,
238 7.931767757095738e-03f, 8.764345805102866e-03f, 7.325012199914913e-03f, 4.317069470446271e-03f,
239 2.372537409795522e-03f, 1.589227460352735e-03f, 7.419477503600818e-03f, 3.157690354133824e-05f,
240 1.125915135986472e-03f, 9.865363483872330e-03f, 2.429454743386769e-03f, 2.724460564167563e-03f,
241 7.670409838207963e-03f, 5.558891552328172e-03f, 7.876904873099614e-03f, 6.824746047239291e-03f,
242 7.023817548067892e-03f, 3.651314909238673e-04f, 6.720443709032501e-03f, 5.935615511606155e-03f,
243 2.837349642759774e-03f, 1.787235113610299e-03f, 4.538568889918262e-03f, 3.391510678188818e-03f,
244 7.328474239481874e-03f, 6.306967923936016e-03f, 8.102218904895860e-04f, 3.366646521610209e-03f
247 std::vector<float> bbx_vector
249 5.066650471856862e-03, -7.638671742936328e-03, 2.549596503988635e-03, -8.316416756423296e-03,
250 -2.397471917924575e-04, 7.370595187754891e-03, -2.771880178185262e-03, 3.958364873973579e-03,
251 4.493661094712284e-03, 2.016487051533088e-03, -5.893883038142033e-03, 7.570636080807809e-03,
252 -1.395511229386785e-03, 3.686686052704696e-03, -7.738166245767079e-03, -1.947306329828059e-03,
253 -9.299719716045681e-03, -3.476410493413708e-03, -2.390761190919604e-03, 4.359281254364210e-03,
254 -2.135251160164030e-04, 9.203299843371962e-03, 4.042322775006053e-03, -9.464271243910754e-03,
255 2.566239543229305e-03, -9.691093900220627e-03, -4.019283034310979e-03, 8.145470429508792e-03,
256 7.345087308315662e-04, 7.049642787384043e-03, -2.768492313674294e-03, 6.997160053405803e-03,
257 6.675346697112969e-03, 2.353293365652274e-03, -3.612002585241749e-04, 1.592076522068768e-03,
258 -8.354188900818149e-04, -5.232515333564140e-04, 6.946683728847089e-03, -8.469757407935994e-03,
259 -8.985324496496555e-03, 4.885832859017961e-03, -7.662967577576512e-03, 7.284124004335807e-03,
260 -5.812167510299458e-03, -5.760336800482398e-03, 6.040416930336549e-03, 5.861508595443691e-03,
261 -5.509243096133549e-04, -2.006142470055888e-03, -7.205925340416066e-03, -1.117459082969758e-03,
262 4.233247017623154e-03, 8.079257498201178e-03, 2.962639022639513e-03, 7.069474943472751e-03,
263 -8.562946284971293e-03, -8.228634642768271e-03, -6.116245322799971e-04, -7.213122000180859e-03,
264 1.693094399433209e-03, -4.287504459132290e-03, 8.740365683925144e-03, 3.751788160720638e-03,
265 7.006764222862830e-03, 9.676754678358187e-03, -6.458757235812945e-03, -4.486506575589758e-03,
266 -4.371087196816259e-03, 3.542166755953152e-03, -2.504808998699504e-03, 5.666601724512010e-03,
267 -3.691862724546129e-03, 3.689809719085287e-03, 9.079930264704458e-03, 6.365127787359476e-03,
268 2.881681788246101e-06, 9.991866069315165e-03, -1.104757466496565e-03, -2.668455405633477e-03,
269 -1.225748887087659e-03, 6.530536159094015e-03, 3.629468917975644e-03, 1.374426066950348e-03,
270 -2.404098881570632e-03, -4.791365049441602e-03, -2.970654027009094e-03, 7.807553690294366e-03,
271 -1.198321129505323e-03, -3.574885336949881e-03, -5.380848303732298e-03, 9.705151282165116e-03,
272 -1.005217683242201e-03, 9.178094036278405e-03, -5.615977269541644e-03, 5.333533158509859e-03,
273 -2.817116206168516e-03, 6.672609782000503e-03, 6.575769501651313e-03, 8.987596634989362e-03,
274 -1.283530791296188e-03, 1.687717120057778e-03, 3.242391851439037e-03, -7.312060454341677e-03,
275 4.735335326324270e-03, -6.832367028817463e-03, -5.414854835884652e-03, -9.352380213755996e-03,
276 -3.682662043703889e-03, -6.127508590419776e-04, -7.682256596819467e-03, 9.569532628790246e-03,
277 -1.572157284518933e-03, -6.023034366859191e-03, -5.110873282582924e-03, -8.697072236660256e-03,
278 -3.235150419663566e-03, -8.286320236471386e-03, -5.229472409112913e-03, 9.920785896115053e-03,
279 -2.478413362126123e-03, -9.261324796935007e-03, 1.718512310840434e-04, 3.015875488208480e-03,
280 -6.172932549255669e-03, -4.031715551985103e-03, -9.263878005853677e-03, -2.815310738453385e-03,
281 7.075307462133643e-03, 1.404611747938669e-03, -1.518548732533266e-03, -9.293430941655778e-03,
282 6.382186966633246e-03, 8.256835789169248e-03, 3.196907843506736e-03, 8.821615689753433e-03,
283 -7.661543424832439e-03, 1.636273081822326e-03, -8.792373335756125e-03, 2.958775812049877e-03,
284 -6.269300278071262e-03, 6.248285790856450e-03, -3.675414624536002e-03, -1.692616700318762e-03,
285 4.126007647815893e-03, -9.155291689759584e-03, -8.432616039924004e-03, 4.899980636213323e-03,
286 3.511535019681671e-03, -1.582745757177339e-03, -2.703657774917963e-03, 6.738168990840388e-03,
287 4.300455303937919e-03, 9.618312854781494e-03, 2.762142918402472e-03, -6.590025003382154e-03,
288 -2.071168373801788e-03, 8.613893943683627e-03, 9.411190295341036e-03, -6.129018930548372e-03
291 const std::vector<float> anchors_vector{ -26, -19, 87, 86, -81, -27, 58, 63 };
292 SimpleTensor<float> proposals_expected(TensorShape(5, 9),
DataType::F32);
295 0, 0, 0, 75.269, 64.4388,
296 0, 21.9579, 13.0535, 119, 99,
297 0, 38.303, 0, 119, 87.6447,
298 0, 0, 0, 119, 64.619,
299 0, 0, 20.7997, 74.0714, 99,
300 0, 0, 0, 91.8963, 79.3724,
301 0, 0, 4.42377, 58.1405, 95.1781,
302 0, 0, 13.4405, 104.799, 99,
303 0, 38.9066, 28.2434, 119, 99,
307 SimpleTensor<float> scores_expected(TensorShape(9),
DataType::F32);
321 TensorShape scores_shape = TensorShape(feature_width, feature_height, num_anchors);
322 TensorShape deltas_shape = TensorShape(feature_width, feature_height, values_per_roi * num_anchors);
331 Tensor anchors = create_tensor<Tensor>(TensorShape(values_per_roi, num_anchors),
data_type);
335 Tensor num_valid_proposals;
337 num_valid_proposals.allocator()->init(TensorInfo(TensorShape(1), 1,
DataType::U32));
339 NEGenerateProposalsLayer generate_proposals;
340 generate_proposals.configure(&scores, &bbox_deltas, &anchors, &proposals, &scores_out, &num_valid_proposals,
341 GenerateProposalsInfo(120, 100, 0.166667f, 1 / 16.0, 6000, 300, 0.7f, 16.0f));
344 scores.allocator()->allocate();
345 bbox_deltas.allocator()->allocate();
346 anchors.allocator()->allocate();
347 proposals.allocator()->allocate();
348 num_valid_proposals.allocator()->allocate();
349 scores_out.allocator()->allocate();
356 generate_proposals.run();
358 const uint32_t
N = *
reinterpret_cast<uint32_t *
>(num_valid_proposals.ptr_to_element(Coordinates(0, 0)));
362 NESlice select_proposals;
363 select_proposals.configure(&proposals, &proposals_final, Coordinates(0, 0), Coordinates(values_per_roi + 1,
N));
365 proposals_final.allocator()->allocate();
366 select_proposals.run();
370 NESlice select_scores;
371 select_scores.configure(&scores_out, &scores_final, Coordinates(0), Coordinates(
N));
372 scores_final.allocator()->allocate();
385 validate(Accessor(_target), _reference);
388 #ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
394 validate(Accessor(_target), _reference);
397 #endif // __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
401 template <typename T>
402 using NEComputeAllAnchorsQuantizedFixture = ComputeAllAnchorsQuantizedFixture<Tensor, Accessor, NEComputeAllAnchors, T>;
412 validate(Accessor(_target), _reference, tolerance_qsymm16);