24 #ifndef ARM_COMPUTE_HELPER_H 25 #define ARM_COMPUTE_HELPER_H 29 #if defined(ARM_COMPUTE_OPENCL_FP16_ENABLED) && defined(cl_khr_fp16) 30 #pragma OPENCL EXTENSION cl_khr_fp16 : enable 31 #endif // defined(ARM_COMPUTE_OPENCL_FP16_ENABLED) && defined(cl_khr_fp16) 33 #if defined(ARM_COMPUTE_OPENCL_DOT8_ENABLED) && defined(cl_arm_integer_dot_product_int8) 34 #pragma OPENCL EXTENSION cl_arm_integer_dot_product_int8 : enable 35 #endif // defined(ARM_COMPUTE_OPENCL_DOT8_ENABLED) && defined(cl_arm_integer_dot_product_int8) 37 #if defined(ARM_COMPUTE_OPENCL_DOT8_ACC_ENABLED) && defined(cl_arm_integer_dot_product_accumulate_int8) 38 #pragma OPENCL EXTENSION cl_arm_integer_dot_product_accumulate_int8 : enable 39 #endif // defined(ARM_COMPUTE_OPENCL_DOT8_ACC_ENABLED) && defined(cl_arm_integer_dot_product_accumulate_int8) 41 #if defined(ARM_COMPUTE_DEBUG_ENABLED) && defined(cl_arm_printf) 42 #pragma OPENCL EXTENSION cl_arm_printf : enable 43 #endif // defined(ARM_COMPUTE_DEBUG_ENABLED) && defined(cl_arm_printf) 45 #define GPU_ARCH_MIDGARD 0x100 46 #define GPU_ARCH_BIFROST 0x200 55 #define CONCAT(a, b) a##b 73 #define CLAMP(x, min_val, max_val) min(max(x, min_val), max_val) 84 #define REV2(x) ((x).s10) 85 #define REV3(x) ((x).s210) 86 #define REV4(x) ((x).s3210) 87 #define REV8(x) ((x).s76543210) 88 #define REV16(x) ((x).sFEDCBA9876543210) 100 #define REVERSE_STR(x, s) REV##s((x)) 101 #define REVERSE(x, s) REVERSE_STR(x, s) 112 #define ROT1_0(x) ((x)) 113 #define ROT1_1(x) ((x)) 115 #define ROT2_0(x) ((x)) 116 #define ROT2_1(x) ((x).s10) 117 #define ROT2_2(x) ((x)) 119 #define ROT3_0(x) ((x)) 120 #define ROT3_1(x) ((x).s201) 121 #define ROT3_2(x) ((x).s120) 122 #define ROT3_3(x) ((x)) 124 #define ROT4_0(x) ((x)) 125 #define ROT4_1(x) ((x).s3012) 126 #define ROT4_2(x) ((x).s2301) 127 #define ROT4_3(x) ((x).s1230) 128 #define ROT4_4(x) ((x)) 130 #define ROT8_0(x) ((x)) 131 #define ROT8_1(x) ((x).s70123456) 132 #define ROT8_2(x) ((x).s67012345) 133 #define ROT8_3(x) ((x).s56701234) 134 #define ROT8_4(x) ((x).s45670123) 135 #define ROT8_5(x) ((x).s34567012) 136 #define ROT8_6(x) ((x).s23456701) 137 #define ROT8_7(x) ((x).s12345670) 138 #define ROT8_8(x) ((x)) 140 #define ROT16_0(x) ((x)) 141 #define ROT16_1(x) ((x).sF0123456789ABCDE) 142 #define ROT16_2(x) ((x).sEF0123456789ABCD) 143 #define ROT16_3(x) ((x).sDEF0123456789ABC) 144 #define ROT16_4(x) ((x).sCDEF0123456789AB) 145 #define ROT16_5(x) ((x).sBCDEF0123456789A) 146 #define ROT16_6(x) ((x).sABCDEF0123456789) 147 #define ROT16_7(x) ((x).s9ABCDEF012345678) 148 #define ROT16_8(x) ((x).s89ABCDEF01234567) 149 #define ROT16_9(x) ((x).s789ABCDEF0123456) 150 #define ROT16_10(x) ((x).s6789ABCDEF012345) 151 #define ROT16_11(x) ((x).s56789ABCDEF01234) 152 #define ROT16_12(x) ((x).s456789ABCDEF0123) 153 #define ROT16_13(x) ((x).s3456789ABCDEF012) 154 #define ROT16_14(x) ((x).s23456789ABCDEF01) 155 #define ROT16_15(x) ((x).s123456789ABCDEF0) 156 #define ROT16_16(x) ((x)) 169 #define ROTATE_STR(x, s, n) ROT##s##_##n(x) 170 #define ROTATE(x, s, n) ROTATE_STR(x, s, n) 181 #define V_OFFS1(dt) (dt##1)(0) 182 #define V_OFFS2(dt) (dt##2)(0, 1) 183 #define V_OFFS3(dt) (dt##3)(0, 1, 2) 184 #define V_OFFS4(dt) (dt##4)(0, 1, 2, 3) 185 #define V_OFFS8(dt) (dt##8)(0, 1, 2, 3, 4, 5, 6, 7) 186 #define V_OFFS16(dt) (dt##16)(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15) 198 #define VEC_OFFS_STR(dt, s) V_OFFS##s(dt) 199 #define VEC_OFFS(dt, s) VEC_OFFS_STR(dt, s) 202 #define VLOAD_STR(size) vload##size 203 #define VLOAD(size) VLOAD_STR(size) 220 #define VLOAD_PARTIAL_STR(size, load_size) vload_partial_##size##_##load_size 221 #define VLOAD_PARTIAL(size, load_size) VLOAD_PARTIAL_STR(size, load_size) 223 #define NO_LOAD(data, offs, ptr) \ 228 #define vload_partial_1_0 NO_LOAD 229 #define vload_partial_1_1 vload1 230 #define vload_partial_1_2 NO_LOAD 231 #define vload_partial_1_3 NO_LOAD 232 #define vload_partial_1_4 NO_LOAD 233 #define vload_partial_1_5 NO_LOAD 234 #define vload_partial_1_6 NO_LOAD 235 #define vload_partial_1_7 NO_LOAD 236 #define vload_partial_1_8 NO_LOAD 237 #define vload_partial_1_9 NO_LOAD 238 #define vload_partial_1_10 NO_LOAD 239 #define vload_partial_1_11 NO_LOAD 240 #define vload_partial_1_12 NO_LOAD 241 #define vload_partial_1_13 NO_LOAD 242 #define vload_partial_1_14 NO_LOAD 243 #define vload_partial_1_15 NO_LOAD 244 #define vload_partial_1_16 NO_LOAD 246 #define vload_partial_2_0 NO_LOAD 247 #define vload_partial_2_1 vload_partial_1 248 #define vload_partial_2_2 vload_partial_2 249 #define vload_partial_2_3 NO_LOAD 250 #define vload_partial_2_4 NO_LOAD 251 #define vload_partial_2_5 NO_LOAD 252 #define vload_partial_2_6 NO_LOAD 253 #define vload_partial_2_7 NO_LOAD 254 #define vload_partial_2_8 NO_LOAD 255 #define vload_partial_2_9 NO_LOAD 256 #define vload_partial_2_10 NO_LOAD 257 #define vload_partial_2_11 NO_LOAD 258 #define vload_partial_2_12 NO_LOAD 259 #define vload_partial_2_13 NO_LOAD 260 #define vload_partial_2_14 NO_LOAD 261 #define vload_partial_2_15 NO_LOAD 262 #define vload_partial_2_16 NO_LOAD 264 #define vload_partial_3_0 NO_LOAD 265 #define vload_partial_3_1 vload_partial_1 266 #define vload_partial_3_2 vload_partial_2 267 #define vload_partial_3_3 vload_partial_3 268 #define vload_partial_3_4 NO_LOAD 269 #define vload_partial_3_5 NO_LOAD 270 #define vload_partial_3_6 NO_LOAD 271 #define vload_partial_3_7 NO_LOAD 272 #define vload_partial_3_8 NO_LOAD 273 #define vload_partial_3_9 NO_LOAD 274 #define vload_partial_3_10 NO_LOAD 275 #define vload_partial_3_11 NO_LOAD 276 #define vload_partial_3_12 NO_LOAD 277 #define vload_partial_3_13 NO_LOAD 278 #define vload_partial_3_14 NO_LOAD 279 #define vload_partial_3_15 NO_LOAD 280 #define vload_partial_3_16 NO_LOAD 282 #define vload_partial_4_0 NO_LOAD 283 #define vload_partial_4_1 vload_partial_1 284 #define vload_partial_4_2 vload_partial_2 285 #define vload_partial_4_3 vload_partial_3 286 #define vload_partial_4_4 vload_partial_4 287 #define vload_partial_4_5 NO_LOAD 288 #define vload_partial_4_6 NO_LOAD 289 #define vload_partial_4_7 NO_LOAD 290 #define vload_partial_4_8 NO_LOAD 291 #define vload_partial_4_9 NO_LOAD 292 #define vload_partial_4_10 NO_LOAD 293 #define vload_partial_4_11 NO_LOAD 294 #define vload_partial_4_12 NO_LOAD 295 #define vload_partial_4_13 NO_LOAD 296 #define vload_partial_4_14 NO_LOAD 297 #define vload_partial_4_15 NO_LOAD 298 #define vload_partial_4_16 NO_LOAD 300 #define vload_partial_8_0 NO_LOAD 301 #define vload_partial_8_1 vload_partial_1 302 #define vload_partial_8_2 vload_partial_2 303 #define vload_partial_8_3 vload_partial_3 304 #define vload_partial_8_4 vload_partial_4 305 #define vload_partial_8_5 vload_partial_5 306 #define vload_partial_8_6 vload_partial_6 307 #define vload_partial_8_7 vload_partial_7 308 #define vload_partial_8_8 vload_partial_8 309 #define vload_partial_8_9 NO_LOAD 310 #define vload_partial_8_10 NO_LOAD 311 #define vload_partial_8_11 NO_LOAD 312 #define vload_partial_8_12 NO_LOAD 313 #define vload_partial_8_13 NO_LOAD 314 #define vload_partial_8_14 NO_LOAD 315 #define vload_partial_8_15 NO_LOAD 316 #define vload_partial_8_16 NO_LOAD 318 #define vload_partial_16_0 NO_LOAD 319 #define vload_partial_16_1 vload_partial_1 320 #define vload_partial_16_2 vload_partial_2 321 #define vload_partial_16_3 vload_partial_3 322 #define vload_partial_16_4 vload_partial_4 323 #define vload_partial_16_5 vload_partial_5 324 #define vload_partial_16_6 vload_partial_6 325 #define vload_partial_16_7 vload_partial_7 326 #define vload_partial_16_8 vload_partial_8 327 #define vload_partial_16_9 vload_partial_9 328 #define vload_partial_16_10 vload_partial_10 329 #define vload_partial_16_11 vload_partial_11 330 #define vload_partial_16_12 vload_partial_12 331 #define vload_partial_16_13 vload_partial_13 332 #define vload_partial_16_14 vload_partial_14 333 #define vload_partial_16_15 vload_partial_15 334 #define vload_partial_16_16 vload_partial_16 353 #define vload_partial_1(DATA, OFFSET, PTR) \ 354 DATA.s0 = vload1(OFFSET, PTR); 356 #define vload_partial_2(DATA, OFFSET, PTR) \ 357 DATA.s01 = vload2(OFFSET, PTR); 359 #define vload_partial_3(DATA, OFFSET, PTR) \ 360 DATA.s012 = vload3(OFFSET, PTR); 362 #define vload_partial_4(DATA, OFFSET, PTR) \ 363 DATA.s0123 = vload4(OFFSET, PTR); 365 #define vload_partial_5(DATA, OFFSET, PTR) \ 366 vload_partial_4(DATA.s0123, OFFSET, PTR); \ 367 DATA.s4 = vload1(OFFSET, PTR + 4); 369 #define vload_partial_6(DATA, OFFSET, PTR) \ 370 vload_partial_4(DATA.s0123, OFFSET, PTR); \ 371 vload_partial_2(DATA.s45, OFFSET, PTR + 4); 373 #define vload_partial_7(DATA, OFFSET, PTR) \ 374 vload_partial_4(DATA.s0123, OFFSET, PTR); \ 375 vload_partial_3(DATA.s456, OFFSET, PTR + 4); 377 #define vload_partial_8(DATA, OFFSET, PTR) \ 378 DATA.s01234567 = vload8(OFFSET, PTR); 380 #define vload_partial_9(DATA, OFFSET, PTR) \ 381 vload_partial_8(DATA.s01234567, OFFSET, PTR); \ 382 DATA.s8 = vload1(OFFSET, PTR + 8); 384 #define vload_partial_10(DATA, OFFSET, PTR) \ 385 vload_partial_8(DATA.s01234567, OFFSET, PTR); \ 386 vload_partial_2(DATA.s89, OFFSET, PTR + 8); 388 #define vload_partial_11(DATA, OFFSET, PTR) \ 389 vload_partial_8(DATA.s01234567, OFFSET, PTR); \ 390 vload_partial_3(DATA.s89A, OFFSET, PTR + 8); 392 #define vload_partial_12(DATA, OFFSET, PTR) \ 393 vload_partial_8(DATA.s01234567, OFFSET, PTR); \ 394 vload_partial_4(DATA.s89AB, OFFSET, PTR + 8); 396 #define vload_partial_13(DATA, OFFSET, PTR) \ 397 vload_partial_8(DATA.s01234567, OFFSET, PTR); \ 398 vload_partial_5(DATA.s89ABC, OFFSET, PTR + 8); 400 #define vload_partial_14(DATA, OFFSET, PTR) \ 401 vload_partial_8(DATA.s01234567, OFFSET, PTR); \ 402 vload_partial_6(DATA.s89ABCD, OFFSET, PTR + 8); 404 #define vload_partial_15(DATA, OFFSET, PTR) \ 405 vload_partial_8(DATA.s01234567, OFFSET, PTR); \ 406 vload_partial_7(DATA.s89ABCDE, OFFSET, PTR + 8); 408 #define vload_partial_16(DATA, OFFSET, PTR) \ 409 DATA = vload16(OFFSET, PTR); 413 #define PIXEL_UNIT4 1 414 #define PIXEL_UNIT8 2 415 #define PIXEL_UNIT16 4 426 #define CONVERT_VECTOR_SIZE_TO_PIXEL_UNIT_STR(vec_size) PIXEL_UNIT##vec_size 427 #define CONVERT_VECTOR_SIZE_TO_PIXEL_UNIT(vec_size) CONVERT_VECTOR_SIZE_TO_PIXEL_UNIT_STR(vec_size) 430 #define read_image2d_floatx1(img, x_coord, y_coord) (float4)(read_imagef(img, (int2)(x_coord, y_coord))); 431 #define read_image2d_floatx2(img, x_coord, y_coord) (float8)(read_imagef(img, (int2)(x_coord, y_coord)), read_imagef(img, (int2)(x_coord + 1, y_coord))); 432 #define read_image2d_floatx4(img, x_coord, y_coord) (float16)(read_imagef(img, (int2)(x_coord, y_coord)), read_imagef(img, (int2)(x_coord + 1, y_coord)), read_imagef(img, (int2)(x_coord + 2, y_coord)), read_imagef(img, (int2)(x_coord + 3, y_coord))); 434 #if defined(ARM_COMPUTE_OPENCL_FP16_ENABLED) && defined(cl_khr_fp16) 435 #define read_image2d_halfx1(img, x_coord, y_coord) (half4)(read_imageh(img, (int2)(x_coord, y_coord))); 436 #define read_image2d_halfx2(img, x_coord, y_coord) (half8)(read_imageh(img, (int2)(x_coord, y_coord)), read_imageh(img, (int2)(x_coord + 1, y_coord))); 437 #define read_image2d_halfx4(img, x_coord, y_coord) (half16)(read_imageh(img, (int2)(x_coord, y_coord)), read_imageh(img, (int2)(x_coord + 1, y_coord)), read_imageh(img, (int2)(x_coord + 2, y_coord)), read_imageh(img, (int2)(x_coord + 3, y_coord))); 438 #endif // defined(ARM_COMPUTE_OPENCL_FP16_ENABLED) && defined(cl_khr_fp16) 453 #define READ_IMAGE2D_STR(data_type, n0, img, x_coord, y_coord) read_image2d_##data_type##x##n0(img, x_coord, y_coord) 454 #define READ_IMAGE2D(data_type, n0, img, x_coord, y_coord) READ_IMAGE2D_STR(data_type, n0, img, x_coord, y_coord) 456 #define VSTORE_STR(size) vstore##size 457 #define VSTORE(size) VSTORE_STR(size) 464 #define ushort1 ushort 469 #define double1 double 471 #define vload1(OFFSET, PTR) *(OFFSET + PTR) 472 #define vstore1(DATA, OFFSET, PTR) *(OFFSET + PTR) = DATA 489 #define VSTORE_PARTIAL_STR(size, store_size) vstore_partial_##size##_##store_size 490 #define VSTORE_PARTIAL(size, store_size) VSTORE_PARTIAL_STR(size, store_size) 492 #define NO_STORE(data, offs, ptr) \ 497 #define vstore_partial_1_0 NO_STORE 498 #define vstore_partial_1_1 vstore1 499 #define vstore_partial_1_2 NO_STORE 500 #define vstore_partial_1_3 NO_STORE 501 #define vstore_partial_1_4 NO_STORE 502 #define vstore_partial_1_5 NO_STORE 503 #define vstore_partial_1_6 NO_STORE 504 #define vstore_partial_1_7 NO_STORE 505 #define vstore_partial_1_8 NO_STORE 506 #define vstore_partial_1_9 NO_STORE 507 #define vstore_partial_1_10 NO_STORE 508 #define vstore_partial_1_11 NO_STORE 509 #define vstore_partial_1_12 NO_STORE 510 #define vstore_partial_1_13 NO_STORE 511 #define vstore_partial_1_14 NO_STORE 512 #define vstore_partial_1_15 NO_STORE 513 #define vstore_partial_1_16 NO_STORE 515 #define vstore_partial_2_0 NO_STORE 516 #define vstore_partial_2_1 vstore_partial_1 517 #define vstore_partial_2_2 vstore_partial_2 518 #define vstore_partial_2_3 NO_STORE 519 #define vstore_partial_2_4 NO_STORE 520 #define vstore_partial_2_5 NO_STORE 521 #define vstore_partial_2_6 NO_STORE 522 #define vstore_partial_2_7 NO_STORE 523 #define vstore_partial_2_8 NO_STORE 524 #define vstore_partial_2_9 NO_STORE 525 #define vstore_partial_2_10 NO_STORE 526 #define vstore_partial_2_11 NO_STORE 527 #define vstore_partial_2_12 NO_STORE 528 #define vstore_partial_2_13 NO_STORE 529 #define vstore_partial_2_14 NO_STORE 530 #define vstore_partial_2_15 NO_STORE 531 #define vstore_partial_2_16 NO_STORE 533 #define vstore_partial_3_0 NO_STORE 534 #define vstore_partial_3_1 vstore_partial_1 535 #define vstore_partial_3_2 vstore_partial_2 536 #define vstore_partial_3_3 vstore_partial_3 537 #define vstore_partial_3_4 NO_STORE 538 #define vstore_partial_3_5 NO_STORE 539 #define vstore_partial_3_6 NO_STORE 540 #define vstore_partial_3_7 NO_STORE 541 #define vstore_partial_3_8 NO_STORE 542 #define vstore_partial_3_9 NO_STORE 543 #define vstore_partial_3_10 NO_STORE 544 #define vstore_partial_3_11 NO_STORE 545 #define vstore_partial_3_12 NO_STORE 546 #define vstore_partial_3_13 NO_STORE 547 #define vstore_partial_3_14 NO_STORE 548 #define vstore_partial_3_15 NO_STORE 549 #define vstore_partial_3_16 NO_STORE 551 #define vstore_partial_4_0 NO_STORE 552 #define vstore_partial_4_1 vstore_partial_1 553 #define vstore_partial_4_2 vstore_partial_2 554 #define vstore_partial_4_3 vstore_partial_3 555 #define vstore_partial_4_4 vstore_partial_4 556 #define vstore_partial_4_5 NO_STORE 557 #define vstore_partial_4_6 NO_STORE 558 #define vstore_partial_4_7 NO_STORE 559 #define vstore_partial_4_8 NO_STORE 560 #define vstore_partial_4_9 NO_STORE 561 #define vstore_partial_4_10 NO_STORE 562 #define vstore_partial_4_11 NO_STORE 563 #define vstore_partial_4_12 NO_STORE 564 #define vstore_partial_4_13 NO_STORE 565 #define vstore_partial_4_14 NO_STORE 566 #define vstore_partial_4_15 NO_STORE 567 #define vstore_partial_4_16 NO_STORE 569 #define vstore_partial_8_0 NO_STORE 570 #define vstore_partial_8_1 vstore_partial_1 571 #define vstore_partial_8_2 vstore_partial_2 572 #define vstore_partial_8_3 vstore_partial_3 573 #define vstore_partial_8_4 vstore_partial_4 574 #define vstore_partial_8_5 vstore_partial_5 575 #define vstore_partial_8_6 vstore_partial_6 576 #define vstore_partial_8_7 vstore_partial_7 577 #define vstore_partial_8_8 vstore_partial_8 578 #define vstore_partial_8_9 NO_STORE 579 #define vstore_partial_8_10 NO_STORE 580 #define vstore_partial_8_11 NO_STORE 581 #define vstore_partial_8_12 NO_STORE 582 #define vstore_partial_8_13 NO_STORE 583 #define vstore_partial_8_14 NO_STORE 584 #define vstore_partial_8_15 NO_STORE 585 #define vstore_partial_8_16 NO_STORE 587 #define vstore_partial_16_0 NO_STORE 588 #define vstore_partial_16_1 vstore_partial_1 589 #define vstore_partial_16_2 vstore_partial_2 590 #define vstore_partial_16_3 vstore_partial_3 591 #define vstore_partial_16_4 vstore_partial_4 592 #define vstore_partial_16_5 vstore_partial_5 593 #define vstore_partial_16_6 vstore_partial_6 594 #define vstore_partial_16_7 vstore_partial_7 595 #define vstore_partial_16_8 vstore_partial_8 596 #define vstore_partial_16_9 vstore_partial_9 597 #define vstore_partial_16_10 vstore_partial_10 598 #define vstore_partial_16_11 vstore_partial_11 599 #define vstore_partial_16_12 vstore_partial_12 600 #define vstore_partial_16_13 vstore_partial_13 601 #define vstore_partial_16_14 vstore_partial_14 602 #define vstore_partial_16_15 vstore_partial_15 603 #define vstore_partial_16_16 vstore_partial_16 622 #define vstore_partial_1(DATA, OFFSET, PTR) \ 623 vstore1(DATA.s0, OFFSET, PTR); 625 #define vstore_partial_2(DATA, OFFSET, PTR) \ 626 vstore2(DATA.s01, OFFSET, PTR); 628 #define vstore_partial_3(DATA, OFFSET, PTR) \ 629 vstore3(DATA.s012, OFFSET, PTR); 631 #define vstore_partial_4(DATA, OFFSET, PTR) \ 632 vstore4(DATA.s0123, OFFSET, PTR); 634 #define vstore_partial_5(DATA, OFFSET, PTR) \ 635 vstore_partial_4(DATA.s0123, OFFSET, PTR); \ 636 vstore1(DATA.s4, OFFSET, PTR + 4); 638 #define vstore_partial_6(DATA, OFFSET, PTR) \ 639 vstore_partial_4(DATA.s0123, OFFSET, PTR); \ 640 vstore_partial_2(DATA.s45, OFFSET, PTR + 4); 642 #define vstore_partial_7(DATA, OFFSET, PTR) \ 643 vstore_partial_4(DATA.s0123, OFFSET, PTR); \ 644 vstore_partial_3(DATA.s456, OFFSET, PTR + 4); 646 #define vstore_partial_8(DATA, OFFSET, PTR) \ 647 vstore8(DATA.s01234567, OFFSET, PTR); 649 #define vstore_partial_9(DATA, OFFSET, PTR) \ 650 vstore_partial_8(DATA.s01234567, OFFSET, PTR); \ 651 vstore1(DATA.s8, OFFSET, PTR + 8); 653 #define vstore_partial_10(DATA, OFFSET, PTR) \ 654 vstore_partial_8(DATA.s01234567, OFFSET, PTR); \ 655 vstore_partial_2(DATA.s89, OFFSET, PTR + 8); 657 #define vstore_partial_11(DATA, OFFSET, PTR) \ 658 vstore_partial_8(DATA.s01234567, OFFSET, PTR); \ 659 vstore_partial_3(DATA.s89a, OFFSET, PTR + 8); 661 #define vstore_partial_12(DATA, OFFSET, PTR) \ 662 vstore_partial_8(DATA.s01234567, OFFSET, PTR); \ 663 vstore_partial_4(DATA.s89ab, OFFSET, PTR + 8); 665 #define vstore_partial_13(DATA, OFFSET, PTR) \ 666 vstore_partial_8(DATA.s01234567, OFFSET, PTR); \ 667 vstore_partial_5(DATA.s89abcdef, OFFSET, PTR + 8); 669 #define vstore_partial_14(DATA, OFFSET, PTR) \ 670 vstore_partial_8(DATA.s01234567, OFFSET, PTR); \ 671 vstore_partial_6(DATA.s89abcdef, OFFSET, PTR + 8); 673 #define vstore_partial_15(DATA, OFFSET, PTR) \ 674 vstore_partial_8(DATA.s01234567, OFFSET, PTR); \ 675 vstore_partial_7(DATA.s89abcdef, OFFSET, PTR + 8); 677 #define vstore_partial_16(DATA, OFFSET, PTR) \ 678 vstore16(DATA, OFFSET, PTR); 684 #define convert_float_sat convert_float 685 #define convert_float1_sat convert_float 686 #define convert_float2_sat convert_float2 687 #define convert_float3_sat convert_float3 688 #define convert_float4_sat convert_float4 689 #define convert_float8_sat convert_float8 690 #define convert_float16_sat convert_float16 691 #define convert_half_sat convert_float 692 #define convert_half1_sat convert_half 693 #define convert_half2_sat convert_half2 694 #define convert_half3_sat convert_half3 695 #define convert_half4_sat convert_half4 696 #define convert_half8_sat convert_half8 697 #define convert_half16_sat convert_half16 699 #define convert_float1 convert_float 700 #define convert_half1 convert_half 701 #define convert_char1 convert_char 702 #define convert_uchar1 convert_uchar 703 #define convert_short1 convert_short 704 #define convert_ushort1 convert_ushort 705 #define convert_int1 convert_int 706 #define convert_uint1 convert_uint 707 #define convert_long1 convert_long 708 #define convert_ulong1 convert_ulong 709 #define convert_double1 convert_double 711 #define convert_char1_sat convert_char_sat 712 #define convert_uchar1_sat convert_uchar_sat 713 #define convert_uchar2_sat convert_uchar2_sat 714 #define convert_uchar3_sat convert_uchar3_sat 715 #define convert_uchar4_sat convert_uchar4_sat 716 #define convert_uchar8_sat convert_uchar8_sat 717 #define convert_uchar16_sat convert_uchar16_sat 718 #define convert_short1_sat convert_short_sat 719 #define convert_ushort1_sat convert_ushort_sat 720 #define convert_int1_sat convert_int_sat 721 #define convert_uint1_sat convert_uint_sat 722 #define convert_long1_sat convert_long_sat 723 #define convert_ulong1_sat convert_ulong_sat 724 #define convert_double1_sat convert_double_sat 726 #define VEC_DATA_TYPE_STR(type, size) type##size 727 #define VEC_DATA_TYPE(type, size) VEC_DATA_TYPE_STR(type, size) 729 #define CONVERT_STR(x, type) (convert_##type((x))) 730 #define CONVERT(x, type) CONVERT_STR(x, type) 732 #define CONVERT_SAT_STR(x, type) (convert_##type##_sat((x))) 733 #define CONVERT_SAT(x, type) CONVERT_SAT_STR(x, type) 735 #define CONVERT_SAT_ROUND_STR(x, type, round) (convert_##type##_sat_##round((x))) 736 #define CONVERT_SAT_ROUND(x, type, round) CONVERT_SAT_ROUND_STR(x, type, round) 738 #define select_vec_dt_uchar(size) uchar##size 739 #define select_vec_dt_char(size) char##size 740 #define select_vec_dt_ushort(size) ushort##size 741 #define select_vec_dt_short(size) short##size 742 #define select_vec_dt_half(size) short##size 743 #define select_vec_dt_uint(size) uint##size 744 #define select_vec_dt_int(size) int##size 745 #define select_vec_dt_float(size) int##size 746 #define select_vec_dt_ulong(size) ulong##size 747 #define select_vec_dt_long(size) long##size 749 #define SELECT_VEC_DATA_TYPE_STR(type, size) select_vec_dt_##type(size) 750 #define SELECT_VEC_DATA_TYPE(type, size) SELECT_VEC_DATA_TYPE_STR(type, size) 751 #define SELECT_DATA_TYPE(type) SELECT_VEC_DATA_TYPE_STR(type, 1) 753 #define signed_int_vec_dt_uchar(size) char##size 754 #define signed_int_vec_dt_char(size) char##size 755 #define signed_int_vec_dt_ushort(size) short##size 756 #define signed_int_vec_dt_short(size) short##size 757 #define signed_int_vec_dt_half(size) short##size 758 #define signed_int_vec_dt_uint(size) int##size 759 #define signed_int_vec_dt_int(size) int##size 760 #define signed_int_vec_dt_float(size) int##size 761 #define signed_int_vec_dt_ulong(size) long##size 762 #define signed_int_vec_dt_long(size) long##size 764 #define SIGNED_INT_VEC_DATA_TYPE_STR(type, size) signed_int_vec_dt_##type(size) 765 #define SIGNED_INT_VEC_DATA_TYPE(type, size) SIGNED_INT_VEC_DATA_TYPE_STR(type, size) 766 #define SIGNED_INT_DATA_TYPE(type) SIGNED_INT_VEC_DATA_TYPE_STR(type, 1) 768 #define sum_reduce_1(x) (x) 769 #define sum_reduce_2(x) ((x).s0) + ((x).s1) 770 #define sum_reduce_3(x) sum_reduce_2((x).s01) + ((x).s2) 771 #define sum_reduce_4(x) sum_reduce_2((x).s01) + sum_reduce_2((x).s23) 772 #define sum_reduce_8(x) sum_reduce_4((x).s0123) + sum_reduce_4((x).s4567) 773 #define sum_reduce_16(x) sum_reduce_8((x).s01234567) + sum_reduce_8((x).s89ABCDEF) 775 #define SUM_REDUCE_STR(x, size) sum_reduce_##size(x) 776 #define SUM_REDUCE(x, size) SUM_REDUCE_STR(x, size) 778 #define prod_reduce_1(x) (x) 779 #define prod_reduce_2(x) ((x).s0) * ((x).s1) 780 #define prod_reduce_3(x) prod_reduce_2((x).s01) * ((x).s2) 781 #define prod_reduce_4(x) prod_reduce_2((x).s01) * prod_reduce_2((x).s23) 782 #define prod_reduce_8(x) prod_reduce_4((x).s0123) * prod_reduce_4((x).s4567) 783 #define prod_reduce_16(x) prod_reduce_8((x).s01234567) * prod_reduce_8((x).s89ABCDEF) 785 #define PROD_REDUCE_STR(x, size) prod_reduce_##size(x) 786 #define PROD_REDUCE(x, size) PROD_REDUCE_STR(x, size) 788 #define max_reduce_1(x) (x) 789 #define max_reduce_2(x) max(((x).s0), ((x).s1)) 790 #define max_reduce_3(x) max(max_reduce_2((x).s01), ((x).s2)) 791 #define max_reduce_4(x) max(max_reduce_2((x).s01), max_reduce_2((x).s23)) 792 #define max_reduce_8(x) max(max_reduce_4((x).s0123), max_reduce_4((x).s4567)) 793 #define max_reduce_16(x) max(max_reduce_8((x).s01234567), max_reduce_8((x).s89ABCDEF)) 795 #define MAX_REDUCE_STR(x, size) max_reduce_##size(x) 796 #define MAX_REDUCE(x, size) MAX_REDUCE_STR(x, size) 798 #define VECTOR_DECLARATION(name) \ 799 __global uchar *name##_ptr, \ 800 uint name##_stride_x, \ 801 uint name##_step_x, \ 802 uint name##_offset_first_element_in_bytes 804 #define IMAGE_DECLARATION(name) \ 805 __global uchar *name##_ptr, \ 806 uint name##_stride_x, \ 807 uint name##_step_x, \ 808 uint name##_stride_y, \ 809 uint name##_step_y, \ 810 uint name##_offset_first_element_in_bytes 812 #define TENSOR3D_DECLARATION(name) \ 813 __global uchar *name##_ptr, \ 814 uint name##_stride_x, \ 815 uint name##_step_x, \ 816 uint name##_stride_y, \ 817 uint name##_step_y, \ 818 uint name##_stride_z, \ 819 uint name##_step_z, \ 820 uint name##_offset_first_element_in_bytes 822 #define TENSOR4D_DECLARATION(name) \ 823 __global uchar *name##_ptr, \ 824 uint name##_stride_x, \ 825 uint name##_step_x, \ 826 uint name##_stride_y, \ 827 uint name##_step_y, \ 828 uint name##_stride_z, \ 829 uint name##_step_z, \ 830 uint name##_stride_w, \ 831 uint name##_step_w, \ 832 uint name##_offset_first_element_in_bytes 834 #define CONVERT_TO_VECTOR_STRUCT(name) \ 835 update_vector_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, name##_step_x) 837 #define CONVERT_TO_VECTOR_STRUCT_NO_STEP(name) \ 838 update_vector_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, 0) 840 #define CONVERT_TO_IMAGE_STRUCT(name) \ 841 update_image_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, name##_step_x, name##_stride_y, name##_step_y) 843 #define CONVERT_TO_IMAGE_STRUCT_NO_STEP(name) \ 844 update_image_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, 0, name##_stride_y, 0) 846 #define CONVERT_TENSOR3D_TO_IMAGE_STRUCT(name) \ 847 update_image_from_tensor3D_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, name##_step_x, name##_stride_y, name##_step_y, name##_stride_z, name##_step_z) 849 #define CONVERT_TENSOR3D_TO_IMAGE_STRUCT_NO_STEP(name) \ 850 update_image_from_tensor3D_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, 0, name##_stride_y, 0, name##_stride_z, name##_step_z) 852 #define CONVERT_TENSOR3D_TO_IMAGE_STRUCT(name) \ 853 update_image_from_tensor3D_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, name##_step_x, name##_stride_y, name##_step_y, name##_stride_z, name##_step_z) 855 #define CONVERT_TO_TENSOR3D_STRUCT(name) \ 856 update_tensor3D_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, name##_step_x, name##_stride_y, name##_step_y, \ 857 name##_stride_z, name##_step_z) 859 #define CONVERT_TO_TENSOR3D_STRUCT_NO_STEP(name) \ 860 update_tensor3D_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, 0, name##_stride_y, 0, name##_stride_z, 0) 862 #define CONVERT_TO_TENSOR4D_STRUCT(name, mod_size) \ 863 update_tensor4D_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, name##_step_x, name##_stride_y, name##_step_y, \ 864 name##_stride_z, name##_step_z, name##_stride_w, name##_step_w, mod_size) 866 #define CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(name, mod_size) \ 867 update_tensor4D_workitem_ptr(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, 0, name##_stride_y, 0, name##_stride_z, 0, name##_stride_w, 0, mod_size) 869 #define CONVERT_TO_TENSOR3D_STRUCT_NO_UPDATE_PTR(name) \ 870 tensor3D_ptr_no_update(name##_ptr, name##_offset_first_element_in_bytes, name##_stride_x, name##_step_x, name##_stride_y, name##_step_y, \ 871 name##_stride_z, name##_step_z) 1002 .stride_y = stride_y,
1003 .stride_z = stride_z
1029 .stride_y = stride_y,
1030 .stride_z = stride_z
1044 .stride_y = stride_y,
1045 .stride_z = stride_z,
1046 .stride_w = stride_w
1049 tensor.
ptr += tensor.
offset_first_element_in_bytes + get_global_id(0) * step_x + get_global_id(1) * step_y + (get_global_id(2) % mod_size) * step_z + (get_global_id(2) / mod_size) * step_w;
1109 uint num_elements = width * height;
1111 const uint z = index / num_elements;
1113 index %= num_elements;
1115 const uint y = index / width;
1119 const uint x = index;
Structure to hold Vector information.
__global uchar * offset(const Image *img, int x, int y)
Get the pointer position of a Image.
Image update_image_workitem_ptr(__global uchar *ptr, uint offset_first_element_in_bytes, uint stride_x, uint step_x, uint stride_y, uint step_y)
Wrap image information into an Image structure, and make the pointer point at this workitem's data...
Tensor3D update_tensor3D_workitem_ptr(__global uchar *ptr, uint offset_first_element_in_bytes, uint stride_x, uint step_x, uint stride_y, uint step_y, uint stride_z, uint step_z)
Wrap 3D tensor information into an tensor structure, and make the pointer point at this workitem's da...
__global uchar * ptr
Pointer to the starting postion of the buffer.
int stride_z
Stride of the image in Z dimension (in bytes)
struct Image Image
Structure to hold Image information.
struct Tensor3D Tensor3D
Structure to hold 3D tensor information.
__global const uchar * tensor3D_index2ptr(const Tensor3D *tensor, uint width, uint height, uint depth, uint index)
Get the offset for a given linear index of a Tensor3D.
int stride_x
Stride of the image in X dimension (in bytes)
struct Tensor4D Tensor4D
Structure to hold 4D tensor information.
int offset_first_element_in_bytes
The offset of the first element in the source image.
Structure to hold 3D tensor information.
Structure to hold 4D tensor information.
int stride_w
Stride of the image in W dimension (in bytes)
__global const uchar * tensor4D_offset(const Tensor4D *tensor, int x, int y, int z, int w)
Get the pointer position of a Tensor4D.
int stride_x
Stride of the image in X dimension (in bytes)
__global uchar * ptr
Pointer to the starting postion of the buffer.
Image update_image_from_tensor3D_workitem_ptr(__global uchar *ptr, uint offset_first_element_in_bytes, uint stride_x, uint step_x, uint stride_y, uint step_y, uint stride_z, uint step_z)
Wrap 3D tensor information into an image structure, and make the pointer point at this workitem's dat...
int stride_y
Stride of the image in Y dimension (in bytes)
Structure to hold Image information.
int offset_first_element_in_bytes
The offset of the first element in the source image.
int offset_first_element_in_bytes
The offset of the first element in the source image.
__global uchar * ptr
Pointer to the starting postion of the buffer.
__global const uchar * vector_offset(const Vector *vec, int x)
Get the pointer position of a Vector.
struct Vector Vector
Structure to hold Vector information.
Vector update_vector_workitem_ptr(__global uchar *ptr, uint offset_first_element_in_bytes, uint stride_x, uint step_x)
Wrap vector information into an Vector structure, and make the pointer point at this workitem's data...
__global uchar * ptr
Pointer to the starting postion of the buffer.
int stride_x
Stride of the image in X dimension (in bytes)
int stride_y
Stride of the image in Y dimension (in bytes)
int stride_z
Stride of the image in Z dimension (in bytes)
int offset_first_element_in_bytes
The offset of the first element in the source image.
Tensor4D update_tensor4D_workitem_ptr(__global uchar *ptr, uint offset_first_element_in_bytes, uint stride_x, uint step_x, uint stride_y, uint step_y, uint stride_z, uint step_z, uint stride_w, uint step_w, uint mod_size)
int stride_y
Stride of the image in Y dimension (in bytes)
Tensor3D tensor3D_ptr_no_update(__global uchar *ptr, uint offset_first_element_in_bytes, uint stride_x, uint step_x, uint stride_y, uint step_y, uint stride_z, uint step_z)
Wrap 3D tensor information into an tensor structure.
__global const uchar * tensor3D_offset(const Tensor3D *tensor, int x, int y, int z)
Get the pointer position of a Tensor3D.
int stride_x
Stride of the image in X dimension (in bytes)