File indexing completed on 2026-01-09 10:29:31
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073 #ifndef __VALGRIND_H
0074 #define __VALGRIND_H
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091 #define __VALGRIND_MAJOR__ 3
0092 #define __VALGRIND_MINOR__ 25
0093
0094
0095 #include <stdarg.h>
0096
0097
0098
0099
0100
0101
0102
0103
0104
0105
0106
0107
0108
0109
0110
0111 #undef PLAT_x86_darwin
0112 #undef PLAT_amd64_darwin
0113 #undef PLAT_x86_freebsd
0114 #undef PLAT_amd64_freebsd
0115 #undef PLAT_arm64_freebsd
0116 #undef PLAT_x86_win32
0117 #undef PLAT_amd64_win64
0118 #undef PLAT_x86_linux
0119 #undef PLAT_amd64_linux
0120 #undef PLAT_ppc32_linux
0121 #undef PLAT_ppc64be_linux
0122 #undef PLAT_ppc64le_linux
0123 #undef PLAT_arm_linux
0124 #undef PLAT_arm64_linux
0125 #undef PLAT_s390x_linux
0126 #undef PLAT_mips32_linux
0127 #undef PLAT_mips64_linux
0128 #undef PLAT_nanomips_linux
0129 #undef PLAT_riscv64_linux
0130 #undef PLAT_x86_solaris
0131 #undef PLAT_amd64_solaris
0132
0133
0134 #if defined(__APPLE__) && defined(__i386__)
0135 # define PLAT_x86_darwin 1
0136 #elif defined(__APPLE__) && defined(__x86_64__)
0137 # define PLAT_amd64_darwin 1
0138 #elif defined(__FreeBSD__) && defined(__i386__)
0139 # define PLAT_x86_freebsd 1
0140 #elif defined(__FreeBSD__) && defined(__amd64__)
0141 # define PLAT_amd64_freebsd 1
0142 #elif defined(__FreeBSD__) && defined(__aarch64__) && !defined(__arm__)
0143 # define PLAT_arm64_freebsd 1
0144 #elif (defined(__MINGW32__) && defined(__i386__)) \
0145 || defined(__CYGWIN32__) \
0146 || (defined(_WIN32) && defined(_M_IX86))
0147 # define PLAT_x86_win32 1
0148 #elif (defined(__MINGW32__) && defined(__x86_64__)) \
0149 || (defined(_WIN32) && defined(_M_X64))
0150
0151 # define PLAT_amd64_win64 1
0152 #elif defined(__linux__) && defined(__i386__)
0153 # define PLAT_x86_linux 1
0154 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
0155 # define PLAT_amd64_linux 1
0156 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
0157 # define PLAT_ppc32_linux 1
0158 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
0159
0160 # define PLAT_ppc64be_linux 1
0161 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
0162
0163 # define PLAT_ppc64le_linux 1
0164 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
0165 # define PLAT_arm_linux 1
0166 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
0167 # define PLAT_arm64_linux 1
0168 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
0169 # define PLAT_s390x_linux 1
0170 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
0171 # define PLAT_mips64_linux 1
0172 #elif defined(__linux__) && defined(__mips__) && (__mips==32)
0173 # define PLAT_mips32_linux 1
0174 #elif defined(__linux__) && defined(__nanomips__)
0175 # define PLAT_nanomips_linux 1
0176 #elif defined(__linux__) && defined(__riscv) && (__riscv_xlen == 64)
0177 # define PLAT_riscv64_linux 1
0178 #elif defined(__sun) && defined(__i386__)
0179 # define PLAT_x86_solaris 1
0180 #elif defined(__sun) && defined(__x86_64__)
0181 # define PLAT_amd64_solaris 1
0182 #else
0183
0184
0185 # if !defined(NVALGRIND)
0186 # define NVALGRIND 1
0187 # endif
0188 #endif
0189
0190
0191
0192
0193
0194
0195
0196
0197
0198
0199
0200
0201
0202
0203
0204
0205
0206
0207
0208
0209 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
0210 _zzq_request, _zzq_arg1, _zzq_arg2, \
0211 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0212 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
0213 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
0214 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
0215
0216 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
0217 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0218 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
0219 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
0220 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
0221
0222 #if defined(NVALGRIND)
0223
0224
0225
0226
0227 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0228 _zzq_default, _zzq_request, \
0229 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0230 (_zzq_default)
0231
0232 #else
0233
0234
0235
0236
0237
0238
0239
0240
0241
0242
0243
0244
0245
0246
0247
0248
0249
0250
0251
0252
0253
0254
0255
0256
0257
0258
0259
0260
0261
0262
0263
0264
0265
0266
0267
0268
0269
0270
0271 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
0272 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
0273 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
0274
0275 typedef
0276 struct {
0277 unsigned int nraddr;
0278 }
0279 OrigFn;
0280
0281 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0282 "roll $3, %%edi ; roll $13, %%edi\n\t" \
0283 "roll $29, %%edi ; roll $19, %%edi\n\t"
0284
0285 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0286 _zzq_default, _zzq_request, \
0287 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0288 __extension__ \
0289 ({volatile unsigned int _zzq_args[6]; \
0290 volatile unsigned int _zzq_result; \
0291 _zzq_args[0] = (unsigned int)(_zzq_request); \
0292 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0293 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0294 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0295 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0296 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0297 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0298 \
0299 "xchgl %%ebx,%%ebx" \
0300 : "=d" (_zzq_result) \
0301 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
0302 : "cc", "memory" \
0303 ); \
0304 _zzq_result; \
0305 })
0306
0307 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0308 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0309 volatile unsigned int __addr; \
0310 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0311 \
0312 "xchgl %%ecx,%%ecx" \
0313 : "=a" (__addr) \
0314 : \
0315 : "cc", "memory" \
0316 ); \
0317 _zzq_orig->nraddr = __addr; \
0318 }
0319
0320 #define VALGRIND_CALL_NOREDIR_EAX \
0321 __SPECIAL_INSTRUCTION_PREAMBLE \
0322 \
0323 "xchgl %%edx,%%edx\n\t"
0324
0325 #define VALGRIND_VEX_INJECT_IR() \
0326 do { \
0327 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0328 "xchgl %%edi,%%edi\n\t" \
0329 : : : "cc", "memory" \
0330 ); \
0331 } while (0)
0332
0333 #endif
0334
0335
0336
0337
0338 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
0339
0340 typedef
0341 struct {
0342 unsigned int nraddr;
0343 }
0344 OrigFn;
0345
0346 #if defined(_MSC_VER)
0347
0348 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0349 __asm rol edi, 3 __asm rol edi, 13 \
0350 __asm rol edi, 29 __asm rol edi, 19
0351
0352 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0353 _zzq_default, _zzq_request, \
0354 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0355 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
0356 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
0357 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
0358 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
0359
0360 static __inline uintptr_t
0361 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
0362 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
0363 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
0364 uintptr_t _zzq_arg5)
0365 {
0366 volatile uintptr_t _zzq_args[6];
0367 volatile unsigned int _zzq_result;
0368 _zzq_args[0] = (uintptr_t)(_zzq_request);
0369 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
0370 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
0371 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
0372 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
0373 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
0374 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
0375 __SPECIAL_INSTRUCTION_PREAMBLE
0376
0377 __asm xchg ebx,ebx
0378 __asm mov _zzq_result, edx
0379 }
0380 return _zzq_result;
0381 }
0382
0383 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0384 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0385 volatile unsigned int __addr; \
0386 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
0387 \
0388 __asm xchg ecx,ecx \
0389 __asm mov __addr, eax \
0390 } \
0391 _zzq_orig->nraddr = __addr; \
0392 }
0393
0394 #define VALGRIND_CALL_NOREDIR_EAX ERROR
0395
0396 #define VALGRIND_VEX_INJECT_IR() \
0397 do { \
0398 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
0399 __asm xchg edi,edi \
0400 } \
0401 } while (0)
0402
0403 #else
0404 #error Unsupported compiler.
0405 #endif
0406
0407 #endif
0408
0409
0410
0411 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
0412 || defined(PLAT_amd64_solaris) \
0413 || defined(PLAT_amd64_freebsd) \
0414 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
0415
0416 typedef
0417 struct {
0418 unsigned long int nraddr;
0419 }
0420 OrigFn;
0421
0422 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0423 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
0424 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
0425
0426 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0427 _zzq_default, _zzq_request, \
0428 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0429 __extension__ \
0430 ({ volatile unsigned long int _zzq_args[6]; \
0431 volatile unsigned long int _zzq_result; \
0432 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0433 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0434 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0435 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0436 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0437 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0438 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0439 \
0440 "xchgq %%rbx,%%rbx" \
0441 : "=d" (_zzq_result) \
0442 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
0443 : "cc", "memory" \
0444 ); \
0445 _zzq_result; \
0446 })
0447
0448 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0449 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0450 volatile unsigned long int __addr; \
0451 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0452 \
0453 "xchgq %%rcx,%%rcx" \
0454 : "=a" (__addr) \
0455 : \
0456 : "cc", "memory" \
0457 ); \
0458 _zzq_orig->nraddr = __addr; \
0459 }
0460
0461 #define VALGRIND_CALL_NOREDIR_RAX \
0462 __SPECIAL_INSTRUCTION_PREAMBLE \
0463 \
0464 "xchgq %%rdx,%%rdx\n\t"
0465
0466 #define VALGRIND_VEX_INJECT_IR() \
0467 do { \
0468 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0469 "xchgq %%rdi,%%rdi\n\t" \
0470 : : : "cc", "memory" \
0471 ); \
0472 } while (0)
0473
0474 #endif
0475
0476
0477
0478 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
0479
0480 #error Unsupported compiler.
0481
0482 #endif
0483
0484
0485
0486 #if defined(PLAT_ppc32_linux)
0487
0488 typedef
0489 struct {
0490 unsigned int nraddr;
0491 }
0492 OrigFn;
0493
0494 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0495 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
0496 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
0497
0498 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0499 _zzq_default, _zzq_request, \
0500 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0501 \
0502 __extension__ \
0503 ({ unsigned int _zzq_args[6]; \
0504 unsigned int _zzq_result; \
0505 unsigned int* _zzq_ptr; \
0506 _zzq_args[0] = (unsigned int)(_zzq_request); \
0507 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0508 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0509 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0510 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0511 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0512 _zzq_ptr = _zzq_args; \
0513 __asm__ volatile("mr 3,%1\n\t" \
0514 "mr 4,%2\n\t" \
0515 __SPECIAL_INSTRUCTION_PREAMBLE \
0516 \
0517 "or 1,1,1\n\t" \
0518 "mr %0,3" \
0519 : "=b" (_zzq_result) \
0520 : "b" (_zzq_default), "b" (_zzq_ptr) \
0521 : "cc", "memory", "r3", "r4"); \
0522 _zzq_result; \
0523 })
0524
0525 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0526 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0527 unsigned int __addr; \
0528 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0529 \
0530 "or 2,2,2\n\t" \
0531 "mr %0,3" \
0532 : "=b" (__addr) \
0533 : \
0534 : "cc", "memory", "r3" \
0535 ); \
0536 _zzq_orig->nraddr = __addr; \
0537 }
0538
0539 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
0540 __SPECIAL_INSTRUCTION_PREAMBLE \
0541 \
0542 "or 3,3,3\n\t"
0543
0544 #define VALGRIND_VEX_INJECT_IR() \
0545 do { \
0546 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0547 "or 5,5,5\n\t" \
0548 ); \
0549 } while (0)
0550
0551 #endif
0552
0553
0554
0555 #if defined(PLAT_ppc64be_linux)
0556
0557 typedef
0558 struct {
0559 unsigned long int nraddr;
0560 unsigned long int r2;
0561 }
0562 OrigFn;
0563
0564 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0565 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
0566 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
0567
0568 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0569 _zzq_default, _zzq_request, \
0570 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0571 \
0572 __extension__ \
0573 ({ unsigned long int _zzq_args[6]; \
0574 unsigned long int _zzq_result; \
0575 unsigned long int* _zzq_ptr; \
0576 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0577 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0578 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0579 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0580 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0581 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0582 _zzq_ptr = _zzq_args; \
0583 __asm__ volatile("mr 3,%1\n\t" \
0584 "mr 4,%2\n\t" \
0585 __SPECIAL_INSTRUCTION_PREAMBLE \
0586 \
0587 "or 1,1,1\n\t" \
0588 "mr %0,3" \
0589 : "=b" (_zzq_result) \
0590 : "b" (_zzq_default), "b" (_zzq_ptr) \
0591 : "cc", "memory", "r3", "r4"); \
0592 _zzq_result; \
0593 })
0594
0595 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0596 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0597 unsigned long int __addr; \
0598 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0599 \
0600 "or 2,2,2\n\t" \
0601 "mr %0,3" \
0602 : "=b" (__addr) \
0603 : \
0604 : "cc", "memory", "r3" \
0605 ); \
0606 _zzq_orig->nraddr = __addr; \
0607 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0608 \
0609 "or 4,4,4\n\t" \
0610 "mr %0,3" \
0611 : "=b" (__addr) \
0612 : \
0613 : "cc", "memory", "r3" \
0614 ); \
0615 _zzq_orig->r2 = __addr; \
0616 }
0617
0618 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
0619 __SPECIAL_INSTRUCTION_PREAMBLE \
0620 \
0621 "or 3,3,3\n\t"
0622
0623 #define VALGRIND_VEX_INJECT_IR() \
0624 do { \
0625 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0626 "or 5,5,5\n\t" \
0627 ); \
0628 } while (0)
0629
0630 #endif
0631
0632 #if defined(PLAT_ppc64le_linux)
0633
0634 typedef
0635 struct {
0636 unsigned long int nraddr;
0637 unsigned long int r2;
0638 }
0639 OrigFn;
0640
0641 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0642 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
0643 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
0644
0645 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0646 _zzq_default, _zzq_request, \
0647 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0648 \
0649 __extension__ \
0650 ({ unsigned long int _zzq_args[6]; \
0651 unsigned long int _zzq_result; \
0652 unsigned long int* _zzq_ptr; \
0653 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0654 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0655 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0656 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0657 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0658 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0659 _zzq_ptr = _zzq_args; \
0660 __asm__ volatile("mr 3,%1\n\t" \
0661 "mr 4,%2\n\t" \
0662 __SPECIAL_INSTRUCTION_PREAMBLE \
0663 \
0664 "or 1,1,1\n\t" \
0665 "mr %0,3" \
0666 : "=b" (_zzq_result) \
0667 : "b" (_zzq_default), "b" (_zzq_ptr) \
0668 : "cc", "memory", "r3", "r4"); \
0669 _zzq_result; \
0670 })
0671
0672 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0673 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0674 unsigned long int __addr; \
0675 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0676 \
0677 "or 2,2,2\n\t" \
0678 "mr %0,3" \
0679 : "=b" (__addr) \
0680 : \
0681 : "cc", "memory", "r3" \
0682 ); \
0683 _zzq_orig->nraddr = __addr; \
0684 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0685 \
0686 "or 4,4,4\n\t" \
0687 "mr %0,3" \
0688 : "=b" (__addr) \
0689 : \
0690 : "cc", "memory", "r3" \
0691 ); \
0692 _zzq_orig->r2 = __addr; \
0693 }
0694
0695 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
0696 __SPECIAL_INSTRUCTION_PREAMBLE \
0697 \
0698 "or 3,3,3\n\t"
0699
0700 #define VALGRIND_VEX_INJECT_IR() \
0701 do { \
0702 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0703 "or 5,5,5\n\t" \
0704 ); \
0705 } while (0)
0706
0707 #endif
0708
0709
0710
0711 #if defined(PLAT_arm_linux)
0712
0713 typedef
0714 struct {
0715 unsigned int nraddr;
0716 }
0717 OrigFn;
0718
0719 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0720 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
0721 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
0722
0723 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0724 _zzq_default, _zzq_request, \
0725 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0726 \
0727 __extension__ \
0728 ({volatile unsigned int _zzq_args[6]; \
0729 volatile unsigned int _zzq_result; \
0730 _zzq_args[0] = (unsigned int)(_zzq_request); \
0731 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0732 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0733 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0734 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0735 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0736 __asm__ volatile("mov r3, %1\n\t" \
0737 "mov r4, %2\n\t" \
0738 __SPECIAL_INSTRUCTION_PREAMBLE \
0739 \
0740 "orr r10, r10, r10\n\t" \
0741 "mov %0, r3" \
0742 : "=r" (_zzq_result) \
0743 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
0744 : "cc","memory", "r3", "r4"); \
0745 _zzq_result; \
0746 })
0747
0748 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0749 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0750 unsigned int __addr; \
0751 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0752 \
0753 "orr r11, r11, r11\n\t" \
0754 "mov %0, r3" \
0755 : "=r" (__addr) \
0756 : \
0757 : "cc", "memory", "r3" \
0758 ); \
0759 _zzq_orig->nraddr = __addr; \
0760 }
0761
0762 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
0763 __SPECIAL_INSTRUCTION_PREAMBLE \
0764 \
0765 "orr r12, r12, r12\n\t"
0766
0767 #define VALGRIND_VEX_INJECT_IR() \
0768 do { \
0769 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0770 "orr r9, r9, r9\n\t" \
0771 : : : "cc", "memory" \
0772 ); \
0773 } while (0)
0774
0775 #endif
0776
0777
0778
0779 #if defined(PLAT_arm64_linux) || defined(PLAT_arm64_freebsd)
0780
0781 typedef
0782 struct {
0783 unsigned long int nraddr;
0784 }
0785 OrigFn;
0786
0787 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0788 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
0789 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
0790
0791 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0792 _zzq_default, _zzq_request, \
0793 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0794 \
0795 __extension__ \
0796 ({volatile unsigned long int _zzq_args[6]; \
0797 volatile unsigned long int _zzq_result; \
0798 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0799 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0800 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0801 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0802 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0803 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0804 __asm__ volatile("mov x3, %1\n\t" \
0805 "mov x4, %2\n\t" \
0806 __SPECIAL_INSTRUCTION_PREAMBLE \
0807 \
0808 "orr x10, x10, x10\n\t" \
0809 "mov %0, x3" \
0810 : "=r" (_zzq_result) \
0811 : "r" ((unsigned long int)(_zzq_default)), \
0812 "r" (&_zzq_args[0]) \
0813 : "cc","memory", "x3", "x4"); \
0814 _zzq_result; \
0815 })
0816
0817 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0818 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0819 unsigned long int __addr; \
0820 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0821 \
0822 "orr x11, x11, x11\n\t" \
0823 "mov %0, x3" \
0824 : "=r" (__addr) \
0825 : \
0826 : "cc", "memory", "x3" \
0827 ); \
0828 _zzq_orig->nraddr = __addr; \
0829 }
0830
0831 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
0832 __SPECIAL_INSTRUCTION_PREAMBLE \
0833 \
0834 "orr x12, x12, x12\n\t"
0835
0836 #define VALGRIND_VEX_INJECT_IR() \
0837 do { \
0838 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0839 "orr x9, x9, x9\n\t" \
0840 : : : "cc", "memory" \
0841 ); \
0842 } while (0)
0843
0844 #endif
0845
0846
0847
0848 #if defined(PLAT_s390x_linux)
0849
0850 typedef
0851 struct {
0852 unsigned long int nraddr;
0853 }
0854 OrigFn;
0855
0856
0857
0858
0859
0860 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0861 "lr 15,15\n\t" \
0862 "lr 1,1\n\t" \
0863 "lr 2,2\n\t" \
0864 "lr 3,3\n\t"
0865
0866 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
0867 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
0868 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
0869 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
0870
0871 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0872 _zzq_default, _zzq_request, \
0873 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0874 __extension__ \
0875 ({volatile unsigned long int _zzq_args[6]; \
0876 volatile unsigned long int _zzq_result; \
0877 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0878 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0879 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0880 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0881 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0882 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0883 __asm__ volatile( \
0884 "lgr 2,%1\n\t" \
0885 \
0886 "lgr 3,%2\n\t" \
0887 __SPECIAL_INSTRUCTION_PREAMBLE \
0888 __CLIENT_REQUEST_CODE \
0889 \
0890 "lgr %0, 3\n\t" \
0891 : "=d" (_zzq_result) \
0892 : "a" (&_zzq_args[0]), \
0893 "0" ((unsigned long int)_zzq_default) \
0894 : "cc", "2", "3", "memory" \
0895 ); \
0896 _zzq_result; \
0897 })
0898
0899 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0900 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0901 volatile unsigned long int __addr; \
0902 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0903 __GET_NR_CONTEXT_CODE \
0904 "lgr %0, 3\n\t" \
0905 : "=a" (__addr) \
0906 : \
0907 : "cc", "3", "memory" \
0908 ); \
0909 _zzq_orig->nraddr = __addr; \
0910 }
0911
0912 #define VALGRIND_CALL_NOREDIR_R1 \
0913 __SPECIAL_INSTRUCTION_PREAMBLE \
0914 __CALL_NO_REDIR_CODE
0915
0916 #define VALGRIND_VEX_INJECT_IR() \
0917 do { \
0918 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0919 __VEX_INJECT_IR_CODE); \
0920 } while (0)
0921
0922 #endif
0923
0924
0925
0926 #if defined(PLAT_mips32_linux)
0927
0928 typedef
0929 struct {
0930 unsigned int nraddr;
0931 }
0932 OrigFn;
0933
0934
0935
0936
0937
0938 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0939 "srl $0, $0, 13\n\t" \
0940 "srl $0, $0, 29\n\t" \
0941 "srl $0, $0, 3\n\t" \
0942 "srl $0, $0, 19\n\t"
0943
0944 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0945 _zzq_default, _zzq_request, \
0946 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0947 __extension__ \
0948 ({ volatile unsigned int _zzq_args[6]; \
0949 volatile unsigned int _zzq_result; \
0950 _zzq_args[0] = (unsigned int)(_zzq_request); \
0951 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0952 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0953 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0954 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0955 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0956 __asm__ volatile("move $11, %1\n\t" \
0957 "move $12, %2\n\t" \
0958 __SPECIAL_INSTRUCTION_PREAMBLE \
0959 \
0960 "or $13, $13, $13\n\t" \
0961 "move %0, $11\n\t" \
0962 : "=r" (_zzq_result) \
0963 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
0964 : "$11", "$12", "memory"); \
0965 _zzq_result; \
0966 })
0967
0968 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0969 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0970 volatile unsigned int __addr; \
0971 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0972 \
0973 "or $14, $14, $14\n\t" \
0974 "move %0, $11" \
0975 : "=r" (__addr) \
0976 : \
0977 : "$11" \
0978 ); \
0979 _zzq_orig->nraddr = __addr; \
0980 }
0981
0982 #define VALGRIND_CALL_NOREDIR_T9 \
0983 __SPECIAL_INSTRUCTION_PREAMBLE \
0984 \
0985 "or $15, $15, $15\n\t"
0986
0987 #define VALGRIND_VEX_INJECT_IR() \
0988 do { \
0989 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0990 "or $11, $11, $11\n\t" \
0991 ); \
0992 } while (0)
0993
0994
0995 #endif
0996
0997
0998
0999 #if defined(PLAT_mips64_linux)
1000
1001 typedef
1002 struct {
1003 unsigned long nraddr;
1004 }
1005 OrigFn;
1006
1007
1008
1009
1010
1011 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1012 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
1013 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1014
1015 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1016 _zzq_default, _zzq_request, \
1017 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1018 __extension__ \
1019 ({ volatile unsigned long int _zzq_args[6]; \
1020 volatile unsigned long int _zzq_result; \
1021 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1022 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1023 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1024 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1025 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1026 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1027 __asm__ volatile("move $11, %1\n\t" \
1028 "move $12, %2\n\t" \
1029 __SPECIAL_INSTRUCTION_PREAMBLE \
1030 \
1031 "or $13, $13, $13\n\t" \
1032 "move %0, $11\n\t" \
1033 : "=r" (_zzq_result) \
1034 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1035 : "$11", "$12", "memory"); \
1036 _zzq_result; \
1037 })
1038
1039 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1040 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1041 volatile unsigned long int __addr; \
1042 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1043 \
1044 "or $14, $14, $14\n\t" \
1045 "move %0, $11" \
1046 : "=r" (__addr) \
1047 : \
1048 : "$11"); \
1049 _zzq_orig->nraddr = __addr; \
1050 }
1051
1052 #define VALGRIND_CALL_NOREDIR_T9 \
1053 __SPECIAL_INSTRUCTION_PREAMBLE \
1054 \
1055 "or $15, $15, $15\n\t"
1056
1057 #define VALGRIND_VEX_INJECT_IR() \
1058 do { \
1059 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1060 "or $11, $11, $11\n\t" \
1061 ); \
1062 } while (0)
1063
1064 #endif
1065
1066 #if defined(PLAT_nanomips_linux)
1067
1068 typedef
1069 struct {
1070 unsigned int nraddr;
1071 }
1072 OrigFn;
1073
1074
1075
1076
1077
1078
1079
1080 #define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1081 "srl[32] $zero, $zero, 29 \n\t" \
1082 "srl[32] $zero, $zero, 3 \n\t" \
1083 "srl[32] $zero, $zero, 19 \n\t"
1084
1085 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1086 _zzq_default, _zzq_request, \
1087 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1088 __extension__ \
1089 ({ volatile unsigned int _zzq_args[6]; \
1090 volatile unsigned int _zzq_result; \
1091 _zzq_args[0] = (unsigned int)(_zzq_request); \
1092 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1093 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1094 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1095 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1096 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1097 __asm__ volatile("move $a7, %1\n\t" \
1098 "move $t0, %2\n\t" \
1099 __SPECIAL_INSTRUCTION_PREAMBLE \
1100 \
1101 "or[32] $t0, $t0, $t0\n\t" \
1102 "move %0, $a7\n\t" \
1103 : "=r" (_zzq_result) \
1104 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1105 : "$a7", "$t0", "memory"); \
1106 _zzq_result; \
1107 })
1108
1109 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1110 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1111 volatile unsigned long int __addr; \
1112 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1113 \
1114 "or[32] $t1, $t1, $t1\n\t" \
1115 "move %0, $a7" \
1116 : "=r" (__addr) \
1117 : \
1118 : "$a7"); \
1119 _zzq_orig->nraddr = __addr; \
1120 }
1121
1122 #define VALGRIND_CALL_NOREDIR_T9 \
1123 __SPECIAL_INSTRUCTION_PREAMBLE \
1124 \
1125 "or[32] $t2, $t2, $t2\n\t"
1126
1127 #define VALGRIND_VEX_INJECT_IR() \
1128 do { \
1129 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1130 "or[32] $t3, $t3, $t3\n\t" \
1131 ); \
1132 } while (0)
1133
1134 #endif
1135
1136
1137
1138 #if defined(PLAT_riscv64_linux)
1139
1140 typedef
1141 struct {
1142 unsigned long int nraddr;
1143 }
1144 OrigFn;
1145
1146 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1147 ".option push\n\t" \
1148 ".option norvc\n\t" \
1149 "srli zero, zero, 3\n\t" \
1150 "srli zero, zero, 13\n\t" \
1151 "srli zero, zero, 51\n\t" \
1152 "srli zero, zero, 61\n\t"
1153
1154 #define __SPECIAL_INSTRUCTION_POSTAMBLE \
1155 ".option pop\n\t" \
1156
1157 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1158 _zzq_default, _zzq_request, \
1159 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1160 \
1161 __extension__ \
1162 ({volatile unsigned long int _zzq_args[6]; \
1163 volatile unsigned long int _zzq_result; \
1164 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1165 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1166 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1167 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1168 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1169 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1170 __asm__ volatile("mv a3, %1\n\t" \
1171 "mv a4, %2\n\t" \
1172 __SPECIAL_INSTRUCTION_PREAMBLE \
1173 \
1174 "or a0, a0, a0\n\t" \
1175 __SPECIAL_INSTRUCTION_POSTAMBLE \
1176 "mv %0, a3" \
1177 : "=r" (_zzq_result) \
1178 : "r" ((unsigned long int)(_zzq_default)), \
1179 "r" (&_zzq_args[0]) \
1180 : "memory", "a3", "a4"); \
1181 _zzq_result; \
1182 })
1183
1184 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1185 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1186 unsigned long int __addr; \
1187 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1188 \
1189 "or a1, a1, a1\n\t" \
1190 __SPECIAL_INSTRUCTION_POSTAMBLE \
1191 "mv %0, a3" \
1192 : "=r" (__addr) \
1193 : \
1194 : "memory", "a3" \
1195 ); \
1196 _zzq_orig->nraddr = __addr; \
1197 }
1198
1199 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
1200 __SPECIAL_INSTRUCTION_PREAMBLE \
1201 \
1202 "or a2, a2, a2\n\t" \
1203 __SPECIAL_INSTRUCTION_POSTAMBLE
1204
1205 #define VALGRIND_VEX_INJECT_IR() \
1206 do { \
1207 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1208 "or a3, a3, a3\n\t" \
1209 __SPECIAL_INSTRUCTION_POSTAMBLE \
1210 : : : "memory" \
1211 ); \
1212 } while (0)
1213
1214 #endif
1215
1216
1217
1218 #endif
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1253
1254 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1255 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1256
1257 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1258 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1259
1260
1261
1262
1263
1264 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1265
1266
1267
1268
1269
1270
1271
1272 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1273 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1274
1275 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1276 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1277
1278
1279
1280
1281 #define CALL_FN_v_v(fnptr) \
1282 do { volatile unsigned long _junk; \
1283 CALL_FN_W_v(_junk,fnptr); } while (0)
1284
1285 #define CALL_FN_v_W(fnptr, arg1) \
1286 do { volatile unsigned long _junk; \
1287 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1288
1289 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1290 do { volatile unsigned long _junk; \
1291 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1292
1293 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1294 do { volatile unsigned long _junk; \
1295 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1296
1297 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1298 do { volatile unsigned long _junk; \
1299 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1300
1301 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1302 do { volatile unsigned long _junk; \
1303 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1304
1305 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1306 do { volatile unsigned long _junk; \
1307 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1308
1309 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1310 do { volatile unsigned long _junk; \
1311 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1312
1313
1314
1315 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1316 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
1317
1318
1319
1320 #define __CALLER_SAVED_REGS "ecx", "edx"
1321
1322
1323
1324
1325
1326
1327 #define VALGRIND_ALIGN_STACK \
1328 "movl %%esp,%%edi\n\t" \
1329 "andl $0xfffffff0,%%esp\n\t"
1330 #define VALGRIND_RESTORE_STACK \
1331 "movl %%edi,%%esp\n\t"
1332
1333
1334
1335
1336 #define CALL_FN_W_v(lval, orig) \
1337 do { \
1338 volatile OrigFn _orig = (orig); \
1339 volatile unsigned long _argvec[1]; \
1340 volatile unsigned long _res; \
1341 _argvec[0] = (unsigned long)_orig.nraddr; \
1342 __asm__ volatile( \
1343 VALGRIND_ALIGN_STACK \
1344 "movl (%%eax), %%eax\n\t" \
1345 VALGRIND_CALL_NOREDIR_EAX \
1346 VALGRIND_RESTORE_STACK \
1347 : "=a" (_res) \
1348 : "a" (&_argvec[0]) \
1349 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1350 ); \
1351 lval = (__typeof__(lval)) _res; \
1352 } while (0)
1353
1354 #define CALL_FN_W_W(lval, orig, arg1) \
1355 do { \
1356 volatile OrigFn _orig = (orig); \
1357 volatile unsigned long _argvec[2]; \
1358 volatile unsigned long _res; \
1359 _argvec[0] = (unsigned long)_orig.nraddr; \
1360 _argvec[1] = (unsigned long)(arg1); \
1361 __asm__ volatile( \
1362 VALGRIND_ALIGN_STACK \
1363 "subl $12, %%esp\n\t" \
1364 "pushl 4(%%eax)\n\t" \
1365 "movl (%%eax), %%eax\n\t" \
1366 VALGRIND_CALL_NOREDIR_EAX \
1367 VALGRIND_RESTORE_STACK \
1368 : "=a" (_res) \
1369 : "a" (&_argvec[0]) \
1370 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1371 ); \
1372 lval = (__typeof__(lval)) _res; \
1373 } while (0)
1374
1375 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1376 do { \
1377 volatile OrigFn _orig = (orig); \
1378 volatile unsigned long _argvec[3]; \
1379 volatile unsigned long _res; \
1380 _argvec[0] = (unsigned long)_orig.nraddr; \
1381 _argvec[1] = (unsigned long)(arg1); \
1382 _argvec[2] = (unsigned long)(arg2); \
1383 __asm__ volatile( \
1384 VALGRIND_ALIGN_STACK \
1385 "subl $8, %%esp\n\t" \
1386 "pushl 8(%%eax)\n\t" \
1387 "pushl 4(%%eax)\n\t" \
1388 "movl (%%eax), %%eax\n\t" \
1389 VALGRIND_CALL_NOREDIR_EAX \
1390 VALGRIND_RESTORE_STACK \
1391 : "=a" (_res) \
1392 : "a" (&_argvec[0]) \
1393 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1394 ); \
1395 lval = (__typeof__(lval)) _res; \
1396 } while (0)
1397
1398 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1399 do { \
1400 volatile OrigFn _orig = (orig); \
1401 volatile unsigned long _argvec[4]; \
1402 volatile unsigned long _res; \
1403 _argvec[0] = (unsigned long)_orig.nraddr; \
1404 _argvec[1] = (unsigned long)(arg1); \
1405 _argvec[2] = (unsigned long)(arg2); \
1406 _argvec[3] = (unsigned long)(arg3); \
1407 __asm__ volatile( \
1408 VALGRIND_ALIGN_STACK \
1409 "subl $4, %%esp\n\t" \
1410 "pushl 12(%%eax)\n\t" \
1411 "pushl 8(%%eax)\n\t" \
1412 "pushl 4(%%eax)\n\t" \
1413 "movl (%%eax), %%eax\n\t" \
1414 VALGRIND_CALL_NOREDIR_EAX \
1415 VALGRIND_RESTORE_STACK \
1416 : "=a" (_res) \
1417 : "a" (&_argvec[0]) \
1418 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1419 ); \
1420 lval = (__typeof__(lval)) _res; \
1421 } while (0)
1422
1423 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1424 do { \
1425 volatile OrigFn _orig = (orig); \
1426 volatile unsigned long _argvec[5]; \
1427 volatile unsigned long _res; \
1428 _argvec[0] = (unsigned long)_orig.nraddr; \
1429 _argvec[1] = (unsigned long)(arg1); \
1430 _argvec[2] = (unsigned long)(arg2); \
1431 _argvec[3] = (unsigned long)(arg3); \
1432 _argvec[4] = (unsigned long)(arg4); \
1433 __asm__ volatile( \
1434 VALGRIND_ALIGN_STACK \
1435 "pushl 16(%%eax)\n\t" \
1436 "pushl 12(%%eax)\n\t" \
1437 "pushl 8(%%eax)\n\t" \
1438 "pushl 4(%%eax)\n\t" \
1439 "movl (%%eax), %%eax\n\t" \
1440 VALGRIND_CALL_NOREDIR_EAX \
1441 VALGRIND_RESTORE_STACK \
1442 : "=a" (_res) \
1443 : "a" (&_argvec[0]) \
1444 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1445 ); \
1446 lval = (__typeof__(lval)) _res; \
1447 } while (0)
1448
1449 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1450 do { \
1451 volatile OrigFn _orig = (orig); \
1452 volatile unsigned long _argvec[6]; \
1453 volatile unsigned long _res; \
1454 _argvec[0] = (unsigned long)_orig.nraddr; \
1455 _argvec[1] = (unsigned long)(arg1); \
1456 _argvec[2] = (unsigned long)(arg2); \
1457 _argvec[3] = (unsigned long)(arg3); \
1458 _argvec[4] = (unsigned long)(arg4); \
1459 _argvec[5] = (unsigned long)(arg5); \
1460 __asm__ volatile( \
1461 VALGRIND_ALIGN_STACK \
1462 "subl $12, %%esp\n\t" \
1463 "pushl 20(%%eax)\n\t" \
1464 "pushl 16(%%eax)\n\t" \
1465 "pushl 12(%%eax)\n\t" \
1466 "pushl 8(%%eax)\n\t" \
1467 "pushl 4(%%eax)\n\t" \
1468 "movl (%%eax), %%eax\n\t" \
1469 VALGRIND_CALL_NOREDIR_EAX \
1470 VALGRIND_RESTORE_STACK \
1471 : "=a" (_res) \
1472 : "a" (&_argvec[0]) \
1473 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1474 ); \
1475 lval = (__typeof__(lval)) _res; \
1476 } while (0)
1477
1478 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1479 do { \
1480 volatile OrigFn _orig = (orig); \
1481 volatile unsigned long _argvec[7]; \
1482 volatile unsigned long _res; \
1483 _argvec[0] = (unsigned long)_orig.nraddr; \
1484 _argvec[1] = (unsigned long)(arg1); \
1485 _argvec[2] = (unsigned long)(arg2); \
1486 _argvec[3] = (unsigned long)(arg3); \
1487 _argvec[4] = (unsigned long)(arg4); \
1488 _argvec[5] = (unsigned long)(arg5); \
1489 _argvec[6] = (unsigned long)(arg6); \
1490 __asm__ volatile( \
1491 VALGRIND_ALIGN_STACK \
1492 "subl $8, %%esp\n\t" \
1493 "pushl 24(%%eax)\n\t" \
1494 "pushl 20(%%eax)\n\t" \
1495 "pushl 16(%%eax)\n\t" \
1496 "pushl 12(%%eax)\n\t" \
1497 "pushl 8(%%eax)\n\t" \
1498 "pushl 4(%%eax)\n\t" \
1499 "movl (%%eax), %%eax\n\t" \
1500 VALGRIND_CALL_NOREDIR_EAX \
1501 VALGRIND_RESTORE_STACK \
1502 : "=a" (_res) \
1503 : "a" (&_argvec[0]) \
1504 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1505 ); \
1506 lval = (__typeof__(lval)) _res; \
1507 } while (0)
1508
1509 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1510 arg7) \
1511 do { \
1512 volatile OrigFn _orig = (orig); \
1513 volatile unsigned long _argvec[8]; \
1514 volatile unsigned long _res; \
1515 _argvec[0] = (unsigned long)_orig.nraddr; \
1516 _argvec[1] = (unsigned long)(arg1); \
1517 _argvec[2] = (unsigned long)(arg2); \
1518 _argvec[3] = (unsigned long)(arg3); \
1519 _argvec[4] = (unsigned long)(arg4); \
1520 _argvec[5] = (unsigned long)(arg5); \
1521 _argvec[6] = (unsigned long)(arg6); \
1522 _argvec[7] = (unsigned long)(arg7); \
1523 __asm__ volatile( \
1524 VALGRIND_ALIGN_STACK \
1525 "subl $4, %%esp\n\t" \
1526 "pushl 28(%%eax)\n\t" \
1527 "pushl 24(%%eax)\n\t" \
1528 "pushl 20(%%eax)\n\t" \
1529 "pushl 16(%%eax)\n\t" \
1530 "pushl 12(%%eax)\n\t" \
1531 "pushl 8(%%eax)\n\t" \
1532 "pushl 4(%%eax)\n\t" \
1533 "movl (%%eax), %%eax\n\t" \
1534 VALGRIND_CALL_NOREDIR_EAX \
1535 VALGRIND_RESTORE_STACK \
1536 : "=a" (_res) \
1537 : "a" (&_argvec[0]) \
1538 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1539 ); \
1540 lval = (__typeof__(lval)) _res; \
1541 } while (0)
1542
1543 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1544 arg7,arg8) \
1545 do { \
1546 volatile OrigFn _orig = (orig); \
1547 volatile unsigned long _argvec[9]; \
1548 volatile unsigned long _res; \
1549 _argvec[0] = (unsigned long)_orig.nraddr; \
1550 _argvec[1] = (unsigned long)(arg1); \
1551 _argvec[2] = (unsigned long)(arg2); \
1552 _argvec[3] = (unsigned long)(arg3); \
1553 _argvec[4] = (unsigned long)(arg4); \
1554 _argvec[5] = (unsigned long)(arg5); \
1555 _argvec[6] = (unsigned long)(arg6); \
1556 _argvec[7] = (unsigned long)(arg7); \
1557 _argvec[8] = (unsigned long)(arg8); \
1558 __asm__ volatile( \
1559 VALGRIND_ALIGN_STACK \
1560 "pushl 32(%%eax)\n\t" \
1561 "pushl 28(%%eax)\n\t" \
1562 "pushl 24(%%eax)\n\t" \
1563 "pushl 20(%%eax)\n\t" \
1564 "pushl 16(%%eax)\n\t" \
1565 "pushl 12(%%eax)\n\t" \
1566 "pushl 8(%%eax)\n\t" \
1567 "pushl 4(%%eax)\n\t" \
1568 "movl (%%eax), %%eax\n\t" \
1569 VALGRIND_CALL_NOREDIR_EAX \
1570 VALGRIND_RESTORE_STACK \
1571 : "=a" (_res) \
1572 : "a" (&_argvec[0]) \
1573 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1574 ); \
1575 lval = (__typeof__(lval)) _res; \
1576 } while (0)
1577
1578 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1579 arg7,arg8,arg9) \
1580 do { \
1581 volatile OrigFn _orig = (orig); \
1582 volatile unsigned long _argvec[10]; \
1583 volatile unsigned long _res; \
1584 _argvec[0] = (unsigned long)_orig.nraddr; \
1585 _argvec[1] = (unsigned long)(arg1); \
1586 _argvec[2] = (unsigned long)(arg2); \
1587 _argvec[3] = (unsigned long)(arg3); \
1588 _argvec[4] = (unsigned long)(arg4); \
1589 _argvec[5] = (unsigned long)(arg5); \
1590 _argvec[6] = (unsigned long)(arg6); \
1591 _argvec[7] = (unsigned long)(arg7); \
1592 _argvec[8] = (unsigned long)(arg8); \
1593 _argvec[9] = (unsigned long)(arg9); \
1594 __asm__ volatile( \
1595 VALGRIND_ALIGN_STACK \
1596 "subl $12, %%esp\n\t" \
1597 "pushl 36(%%eax)\n\t" \
1598 "pushl 32(%%eax)\n\t" \
1599 "pushl 28(%%eax)\n\t" \
1600 "pushl 24(%%eax)\n\t" \
1601 "pushl 20(%%eax)\n\t" \
1602 "pushl 16(%%eax)\n\t" \
1603 "pushl 12(%%eax)\n\t" \
1604 "pushl 8(%%eax)\n\t" \
1605 "pushl 4(%%eax)\n\t" \
1606 "movl (%%eax), %%eax\n\t" \
1607 VALGRIND_CALL_NOREDIR_EAX \
1608 VALGRIND_RESTORE_STACK \
1609 : "=a" (_res) \
1610 : "a" (&_argvec[0]) \
1611 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1612 ); \
1613 lval = (__typeof__(lval)) _res; \
1614 } while (0)
1615
1616 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1617 arg7,arg8,arg9,arg10) \
1618 do { \
1619 volatile OrigFn _orig = (orig); \
1620 volatile unsigned long _argvec[11]; \
1621 volatile unsigned long _res; \
1622 _argvec[0] = (unsigned long)_orig.nraddr; \
1623 _argvec[1] = (unsigned long)(arg1); \
1624 _argvec[2] = (unsigned long)(arg2); \
1625 _argvec[3] = (unsigned long)(arg3); \
1626 _argvec[4] = (unsigned long)(arg4); \
1627 _argvec[5] = (unsigned long)(arg5); \
1628 _argvec[6] = (unsigned long)(arg6); \
1629 _argvec[7] = (unsigned long)(arg7); \
1630 _argvec[8] = (unsigned long)(arg8); \
1631 _argvec[9] = (unsigned long)(arg9); \
1632 _argvec[10] = (unsigned long)(arg10); \
1633 __asm__ volatile( \
1634 VALGRIND_ALIGN_STACK \
1635 "subl $8, %%esp\n\t" \
1636 "pushl 40(%%eax)\n\t" \
1637 "pushl 36(%%eax)\n\t" \
1638 "pushl 32(%%eax)\n\t" \
1639 "pushl 28(%%eax)\n\t" \
1640 "pushl 24(%%eax)\n\t" \
1641 "pushl 20(%%eax)\n\t" \
1642 "pushl 16(%%eax)\n\t" \
1643 "pushl 12(%%eax)\n\t" \
1644 "pushl 8(%%eax)\n\t" \
1645 "pushl 4(%%eax)\n\t" \
1646 "movl (%%eax), %%eax\n\t" \
1647 VALGRIND_CALL_NOREDIR_EAX \
1648 VALGRIND_RESTORE_STACK \
1649 : "=a" (_res) \
1650 : "a" (&_argvec[0]) \
1651 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1652 ); \
1653 lval = (__typeof__(lval)) _res; \
1654 } while (0)
1655
1656 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1657 arg6,arg7,arg8,arg9,arg10, \
1658 arg11) \
1659 do { \
1660 volatile OrigFn _orig = (orig); \
1661 volatile unsigned long _argvec[12]; \
1662 volatile unsigned long _res; \
1663 _argvec[0] = (unsigned long)_orig.nraddr; \
1664 _argvec[1] = (unsigned long)(arg1); \
1665 _argvec[2] = (unsigned long)(arg2); \
1666 _argvec[3] = (unsigned long)(arg3); \
1667 _argvec[4] = (unsigned long)(arg4); \
1668 _argvec[5] = (unsigned long)(arg5); \
1669 _argvec[6] = (unsigned long)(arg6); \
1670 _argvec[7] = (unsigned long)(arg7); \
1671 _argvec[8] = (unsigned long)(arg8); \
1672 _argvec[9] = (unsigned long)(arg9); \
1673 _argvec[10] = (unsigned long)(arg10); \
1674 _argvec[11] = (unsigned long)(arg11); \
1675 __asm__ volatile( \
1676 VALGRIND_ALIGN_STACK \
1677 "subl $4, %%esp\n\t" \
1678 "pushl 44(%%eax)\n\t" \
1679 "pushl 40(%%eax)\n\t" \
1680 "pushl 36(%%eax)\n\t" \
1681 "pushl 32(%%eax)\n\t" \
1682 "pushl 28(%%eax)\n\t" \
1683 "pushl 24(%%eax)\n\t" \
1684 "pushl 20(%%eax)\n\t" \
1685 "pushl 16(%%eax)\n\t" \
1686 "pushl 12(%%eax)\n\t" \
1687 "pushl 8(%%eax)\n\t" \
1688 "pushl 4(%%eax)\n\t" \
1689 "movl (%%eax), %%eax\n\t" \
1690 VALGRIND_CALL_NOREDIR_EAX \
1691 VALGRIND_RESTORE_STACK \
1692 : "=a" (_res) \
1693 : "a" (&_argvec[0]) \
1694 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1695 ); \
1696 lval = (__typeof__(lval)) _res; \
1697 } while (0)
1698
1699 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1700 arg6,arg7,arg8,arg9,arg10, \
1701 arg11,arg12) \
1702 do { \
1703 volatile OrigFn _orig = (orig); \
1704 volatile unsigned long _argvec[13]; \
1705 volatile unsigned long _res; \
1706 _argvec[0] = (unsigned long)_orig.nraddr; \
1707 _argvec[1] = (unsigned long)(arg1); \
1708 _argvec[2] = (unsigned long)(arg2); \
1709 _argvec[3] = (unsigned long)(arg3); \
1710 _argvec[4] = (unsigned long)(arg4); \
1711 _argvec[5] = (unsigned long)(arg5); \
1712 _argvec[6] = (unsigned long)(arg6); \
1713 _argvec[7] = (unsigned long)(arg7); \
1714 _argvec[8] = (unsigned long)(arg8); \
1715 _argvec[9] = (unsigned long)(arg9); \
1716 _argvec[10] = (unsigned long)(arg10); \
1717 _argvec[11] = (unsigned long)(arg11); \
1718 _argvec[12] = (unsigned long)(arg12); \
1719 __asm__ volatile( \
1720 VALGRIND_ALIGN_STACK \
1721 "pushl 48(%%eax)\n\t" \
1722 "pushl 44(%%eax)\n\t" \
1723 "pushl 40(%%eax)\n\t" \
1724 "pushl 36(%%eax)\n\t" \
1725 "pushl 32(%%eax)\n\t" \
1726 "pushl 28(%%eax)\n\t" \
1727 "pushl 24(%%eax)\n\t" \
1728 "pushl 20(%%eax)\n\t" \
1729 "pushl 16(%%eax)\n\t" \
1730 "pushl 12(%%eax)\n\t" \
1731 "pushl 8(%%eax)\n\t" \
1732 "pushl 4(%%eax)\n\t" \
1733 "movl (%%eax), %%eax\n\t" \
1734 VALGRIND_CALL_NOREDIR_EAX \
1735 VALGRIND_RESTORE_STACK \
1736 : "=a" (_res) \
1737 : "a" (&_argvec[0]) \
1738 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1739 ); \
1740 lval = (__typeof__(lval)) _res; \
1741 } while (0)
1742
1743 #endif
1744
1745
1746
1747 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1748 || defined(PLAT_amd64_solaris) || defined(PLAT_amd64_freebsd)
1749
1750
1751
1752
1753 #define __CALLER_SAVED_REGS "rcx", "rdx", "rsi", \
1754 "rdi", "r8", "r9", "r10", "r11"
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1811 # define __FRAME_POINTER \
1812 ,"r"(__builtin_dwarf_cfa())
1813 # define VALGRIND_CFI_PROLOGUE \
1814 "movq %%rbp, %%r15\n\t" \
1815 "movq %2, %%rbp\n\t" \
1816 ".cfi_remember_state\n\t" \
1817 ".cfi_def_cfa rbp, 0\n\t"
1818 # define VALGRIND_CFI_EPILOGUE \
1819 "movq %%r15, %%rbp\n\t" \
1820 ".cfi_restore_state\n\t"
1821 #else
1822 # define __FRAME_POINTER
1823 # define VALGRIND_CFI_PROLOGUE
1824 # define VALGRIND_CFI_EPILOGUE
1825 #endif
1826
1827
1828
1829
1830
1831
1832 #define VALGRIND_ALIGN_STACK \
1833 "movq %%rsp,%%r14\n\t" \
1834 "andq $0xfffffffffffffff0,%%rsp\n\t"
1835 #define VALGRIND_RESTORE_STACK \
1836 "movq %%r14,%%rsp\n\t"
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862 #define CALL_FN_W_v(lval, orig) \
1863 do { \
1864 volatile OrigFn _orig = (orig); \
1865 volatile unsigned long _argvec[1]; \
1866 volatile unsigned long _res; \
1867 _argvec[0] = (unsigned long)_orig.nraddr; \
1868 __asm__ volatile( \
1869 VALGRIND_CFI_PROLOGUE \
1870 VALGRIND_ALIGN_STACK \
1871 "subq $128,%%rsp\n\t" \
1872 "movq (%%rax), %%rax\n\t" \
1873 VALGRIND_CALL_NOREDIR_RAX \
1874 VALGRIND_RESTORE_STACK \
1875 VALGRIND_CFI_EPILOGUE \
1876 : "=a" (_res) \
1877 : "a" (&_argvec[0]) __FRAME_POINTER \
1878 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1879 ); \
1880 lval = (__typeof__(lval)) _res; \
1881 } while (0)
1882
1883 #define CALL_FN_W_W(lval, orig, arg1) \
1884 do { \
1885 volatile OrigFn _orig = (orig); \
1886 volatile unsigned long _argvec[2]; \
1887 volatile unsigned long _res; \
1888 _argvec[0] = (unsigned long)_orig.nraddr; \
1889 _argvec[1] = (unsigned long)(arg1); \
1890 __asm__ volatile( \
1891 VALGRIND_CFI_PROLOGUE \
1892 VALGRIND_ALIGN_STACK \
1893 "subq $128,%%rsp\n\t" \
1894 "movq 8(%%rax), %%rdi\n\t" \
1895 "movq (%%rax), %%rax\n\t" \
1896 VALGRIND_CALL_NOREDIR_RAX \
1897 VALGRIND_RESTORE_STACK \
1898 VALGRIND_CFI_EPILOGUE \
1899 : "=a" (_res) \
1900 : "a" (&_argvec[0]) __FRAME_POINTER \
1901 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1902 ); \
1903 lval = (__typeof__(lval)) _res; \
1904 } while (0)
1905
1906 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1907 do { \
1908 volatile OrigFn _orig = (orig); \
1909 volatile unsigned long _argvec[3]; \
1910 volatile unsigned long _res; \
1911 _argvec[0] = (unsigned long)_orig.nraddr; \
1912 _argvec[1] = (unsigned long)(arg1); \
1913 _argvec[2] = (unsigned long)(arg2); \
1914 __asm__ volatile( \
1915 VALGRIND_CFI_PROLOGUE \
1916 VALGRIND_ALIGN_STACK \
1917 "subq $128,%%rsp\n\t" \
1918 "movq 16(%%rax), %%rsi\n\t" \
1919 "movq 8(%%rax), %%rdi\n\t" \
1920 "movq (%%rax), %%rax\n\t" \
1921 VALGRIND_CALL_NOREDIR_RAX \
1922 VALGRIND_RESTORE_STACK \
1923 VALGRIND_CFI_EPILOGUE \
1924 : "=a" (_res) \
1925 : "a" (&_argvec[0]) __FRAME_POINTER \
1926 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1927 ); \
1928 lval = (__typeof__(lval)) _res; \
1929 } while (0)
1930
1931 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1932 do { \
1933 volatile OrigFn _orig = (orig); \
1934 volatile unsigned long _argvec[4]; \
1935 volatile unsigned long _res; \
1936 _argvec[0] = (unsigned long)_orig.nraddr; \
1937 _argvec[1] = (unsigned long)(arg1); \
1938 _argvec[2] = (unsigned long)(arg2); \
1939 _argvec[3] = (unsigned long)(arg3); \
1940 __asm__ volatile( \
1941 VALGRIND_CFI_PROLOGUE \
1942 VALGRIND_ALIGN_STACK \
1943 "subq $128,%%rsp\n\t" \
1944 "movq 24(%%rax), %%rdx\n\t" \
1945 "movq 16(%%rax), %%rsi\n\t" \
1946 "movq 8(%%rax), %%rdi\n\t" \
1947 "movq (%%rax), %%rax\n\t" \
1948 VALGRIND_CALL_NOREDIR_RAX \
1949 VALGRIND_RESTORE_STACK \
1950 VALGRIND_CFI_EPILOGUE \
1951 : "=a" (_res) \
1952 : "a" (&_argvec[0]) __FRAME_POINTER \
1953 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1954 ); \
1955 lval = (__typeof__(lval)) _res; \
1956 } while (0)
1957
1958 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1959 do { \
1960 volatile OrigFn _orig = (orig); \
1961 volatile unsigned long _argvec[5]; \
1962 volatile unsigned long _res; \
1963 _argvec[0] = (unsigned long)_orig.nraddr; \
1964 _argvec[1] = (unsigned long)(arg1); \
1965 _argvec[2] = (unsigned long)(arg2); \
1966 _argvec[3] = (unsigned long)(arg3); \
1967 _argvec[4] = (unsigned long)(arg4); \
1968 __asm__ volatile( \
1969 VALGRIND_CFI_PROLOGUE \
1970 VALGRIND_ALIGN_STACK \
1971 "subq $128,%%rsp\n\t" \
1972 "movq 32(%%rax), %%rcx\n\t" \
1973 "movq 24(%%rax), %%rdx\n\t" \
1974 "movq 16(%%rax), %%rsi\n\t" \
1975 "movq 8(%%rax), %%rdi\n\t" \
1976 "movq (%%rax), %%rax\n\t" \
1977 VALGRIND_CALL_NOREDIR_RAX \
1978 VALGRIND_RESTORE_STACK \
1979 VALGRIND_CFI_EPILOGUE \
1980 : "=a" (_res) \
1981 : "a" (&_argvec[0]) __FRAME_POINTER \
1982 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1983 ); \
1984 lval = (__typeof__(lval)) _res; \
1985 } while (0)
1986
1987 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1988 do { \
1989 volatile OrigFn _orig = (orig); \
1990 volatile unsigned long _argvec[6]; \
1991 volatile unsigned long _res; \
1992 _argvec[0] = (unsigned long)_orig.nraddr; \
1993 _argvec[1] = (unsigned long)(arg1); \
1994 _argvec[2] = (unsigned long)(arg2); \
1995 _argvec[3] = (unsigned long)(arg3); \
1996 _argvec[4] = (unsigned long)(arg4); \
1997 _argvec[5] = (unsigned long)(arg5); \
1998 __asm__ volatile( \
1999 VALGRIND_CFI_PROLOGUE \
2000 VALGRIND_ALIGN_STACK \
2001 "subq $128,%%rsp\n\t" \
2002 "movq 40(%%rax), %%r8\n\t" \
2003 "movq 32(%%rax), %%rcx\n\t" \
2004 "movq 24(%%rax), %%rdx\n\t" \
2005 "movq 16(%%rax), %%rsi\n\t" \
2006 "movq 8(%%rax), %%rdi\n\t" \
2007 "movq (%%rax), %%rax\n\t" \
2008 VALGRIND_CALL_NOREDIR_RAX \
2009 VALGRIND_RESTORE_STACK \
2010 VALGRIND_CFI_EPILOGUE \
2011 : "=a" (_res) \
2012 : "a" (&_argvec[0]) __FRAME_POINTER \
2013 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2014 ); \
2015 lval = (__typeof__(lval)) _res; \
2016 } while (0)
2017
2018 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2019 do { \
2020 volatile OrigFn _orig = (orig); \
2021 volatile unsigned long _argvec[7]; \
2022 volatile unsigned long _res; \
2023 _argvec[0] = (unsigned long)_orig.nraddr; \
2024 _argvec[1] = (unsigned long)(arg1); \
2025 _argvec[2] = (unsigned long)(arg2); \
2026 _argvec[3] = (unsigned long)(arg3); \
2027 _argvec[4] = (unsigned long)(arg4); \
2028 _argvec[5] = (unsigned long)(arg5); \
2029 _argvec[6] = (unsigned long)(arg6); \
2030 __asm__ volatile( \
2031 VALGRIND_CFI_PROLOGUE \
2032 VALGRIND_ALIGN_STACK \
2033 "subq $128,%%rsp\n\t" \
2034 "movq 48(%%rax), %%r9\n\t" \
2035 "movq 40(%%rax), %%r8\n\t" \
2036 "movq 32(%%rax), %%rcx\n\t" \
2037 "movq 24(%%rax), %%rdx\n\t" \
2038 "movq 16(%%rax), %%rsi\n\t" \
2039 "movq 8(%%rax), %%rdi\n\t" \
2040 "movq (%%rax), %%rax\n\t" \
2041 VALGRIND_CALL_NOREDIR_RAX \
2042 VALGRIND_RESTORE_STACK \
2043 VALGRIND_CFI_EPILOGUE \
2044 : "=a" (_res) \
2045 : "a" (&_argvec[0]) __FRAME_POINTER \
2046 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2047 ); \
2048 lval = (__typeof__(lval)) _res; \
2049 } while (0)
2050
2051 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2052 arg7) \
2053 do { \
2054 volatile OrigFn _orig = (orig); \
2055 volatile unsigned long _argvec[8]; \
2056 volatile unsigned long _res; \
2057 _argvec[0] = (unsigned long)_orig.nraddr; \
2058 _argvec[1] = (unsigned long)(arg1); \
2059 _argvec[2] = (unsigned long)(arg2); \
2060 _argvec[3] = (unsigned long)(arg3); \
2061 _argvec[4] = (unsigned long)(arg4); \
2062 _argvec[5] = (unsigned long)(arg5); \
2063 _argvec[6] = (unsigned long)(arg6); \
2064 _argvec[7] = (unsigned long)(arg7); \
2065 __asm__ volatile( \
2066 VALGRIND_CFI_PROLOGUE \
2067 VALGRIND_ALIGN_STACK \
2068 "subq $136,%%rsp\n\t" \
2069 "pushq 56(%%rax)\n\t" \
2070 "movq 48(%%rax), %%r9\n\t" \
2071 "movq 40(%%rax), %%r8\n\t" \
2072 "movq 32(%%rax), %%rcx\n\t" \
2073 "movq 24(%%rax), %%rdx\n\t" \
2074 "movq 16(%%rax), %%rsi\n\t" \
2075 "movq 8(%%rax), %%rdi\n\t" \
2076 "movq (%%rax), %%rax\n\t" \
2077 VALGRIND_CALL_NOREDIR_RAX \
2078 VALGRIND_RESTORE_STACK \
2079 VALGRIND_CFI_EPILOGUE \
2080 : "=a" (_res) \
2081 : "a" (&_argvec[0]) __FRAME_POINTER \
2082 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2083 ); \
2084 lval = (__typeof__(lval)) _res; \
2085 } while (0)
2086
2087 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2088 arg7,arg8) \
2089 do { \
2090 volatile OrigFn _orig = (orig); \
2091 volatile unsigned long _argvec[9]; \
2092 volatile unsigned long _res; \
2093 _argvec[0] = (unsigned long)_orig.nraddr; \
2094 _argvec[1] = (unsigned long)(arg1); \
2095 _argvec[2] = (unsigned long)(arg2); \
2096 _argvec[3] = (unsigned long)(arg3); \
2097 _argvec[4] = (unsigned long)(arg4); \
2098 _argvec[5] = (unsigned long)(arg5); \
2099 _argvec[6] = (unsigned long)(arg6); \
2100 _argvec[7] = (unsigned long)(arg7); \
2101 _argvec[8] = (unsigned long)(arg8); \
2102 __asm__ volatile( \
2103 VALGRIND_CFI_PROLOGUE \
2104 VALGRIND_ALIGN_STACK \
2105 "subq $128,%%rsp\n\t" \
2106 "pushq 64(%%rax)\n\t" \
2107 "pushq 56(%%rax)\n\t" \
2108 "movq 48(%%rax), %%r9\n\t" \
2109 "movq 40(%%rax), %%r8\n\t" \
2110 "movq 32(%%rax), %%rcx\n\t" \
2111 "movq 24(%%rax), %%rdx\n\t" \
2112 "movq 16(%%rax), %%rsi\n\t" \
2113 "movq 8(%%rax), %%rdi\n\t" \
2114 "movq (%%rax), %%rax\n\t" \
2115 VALGRIND_CALL_NOREDIR_RAX \
2116 VALGRIND_RESTORE_STACK \
2117 VALGRIND_CFI_EPILOGUE \
2118 : "=a" (_res) \
2119 : "a" (&_argvec[0]) __FRAME_POINTER \
2120 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2121 ); \
2122 lval = (__typeof__(lval)) _res; \
2123 } while (0)
2124
2125 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2126 arg7,arg8,arg9) \
2127 do { \
2128 volatile OrigFn _orig = (orig); \
2129 volatile unsigned long _argvec[10]; \
2130 volatile unsigned long _res; \
2131 _argvec[0] = (unsigned long)_orig.nraddr; \
2132 _argvec[1] = (unsigned long)(arg1); \
2133 _argvec[2] = (unsigned long)(arg2); \
2134 _argvec[3] = (unsigned long)(arg3); \
2135 _argvec[4] = (unsigned long)(arg4); \
2136 _argvec[5] = (unsigned long)(arg5); \
2137 _argvec[6] = (unsigned long)(arg6); \
2138 _argvec[7] = (unsigned long)(arg7); \
2139 _argvec[8] = (unsigned long)(arg8); \
2140 _argvec[9] = (unsigned long)(arg9); \
2141 __asm__ volatile( \
2142 VALGRIND_CFI_PROLOGUE \
2143 VALGRIND_ALIGN_STACK \
2144 "subq $136,%%rsp\n\t" \
2145 "pushq 72(%%rax)\n\t" \
2146 "pushq 64(%%rax)\n\t" \
2147 "pushq 56(%%rax)\n\t" \
2148 "movq 48(%%rax), %%r9\n\t" \
2149 "movq 40(%%rax), %%r8\n\t" \
2150 "movq 32(%%rax), %%rcx\n\t" \
2151 "movq 24(%%rax), %%rdx\n\t" \
2152 "movq 16(%%rax), %%rsi\n\t" \
2153 "movq 8(%%rax), %%rdi\n\t" \
2154 "movq (%%rax), %%rax\n\t" \
2155 VALGRIND_CALL_NOREDIR_RAX \
2156 VALGRIND_RESTORE_STACK \
2157 VALGRIND_CFI_EPILOGUE \
2158 : "=a" (_res) \
2159 : "a" (&_argvec[0]) __FRAME_POINTER \
2160 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2161 ); \
2162 lval = (__typeof__(lval)) _res; \
2163 } while (0)
2164
2165 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2166 arg7,arg8,arg9,arg10) \
2167 do { \
2168 volatile OrigFn _orig = (orig); \
2169 volatile unsigned long _argvec[11]; \
2170 volatile unsigned long _res; \
2171 _argvec[0] = (unsigned long)_orig.nraddr; \
2172 _argvec[1] = (unsigned long)(arg1); \
2173 _argvec[2] = (unsigned long)(arg2); \
2174 _argvec[3] = (unsigned long)(arg3); \
2175 _argvec[4] = (unsigned long)(arg4); \
2176 _argvec[5] = (unsigned long)(arg5); \
2177 _argvec[6] = (unsigned long)(arg6); \
2178 _argvec[7] = (unsigned long)(arg7); \
2179 _argvec[8] = (unsigned long)(arg8); \
2180 _argvec[9] = (unsigned long)(arg9); \
2181 _argvec[10] = (unsigned long)(arg10); \
2182 __asm__ volatile( \
2183 VALGRIND_CFI_PROLOGUE \
2184 VALGRIND_ALIGN_STACK \
2185 "subq $128,%%rsp\n\t" \
2186 "pushq 80(%%rax)\n\t" \
2187 "pushq 72(%%rax)\n\t" \
2188 "pushq 64(%%rax)\n\t" \
2189 "pushq 56(%%rax)\n\t" \
2190 "movq 48(%%rax), %%r9\n\t" \
2191 "movq 40(%%rax), %%r8\n\t" \
2192 "movq 32(%%rax), %%rcx\n\t" \
2193 "movq 24(%%rax), %%rdx\n\t" \
2194 "movq 16(%%rax), %%rsi\n\t" \
2195 "movq 8(%%rax), %%rdi\n\t" \
2196 "movq (%%rax), %%rax\n\t" \
2197 VALGRIND_CALL_NOREDIR_RAX \
2198 VALGRIND_RESTORE_STACK \
2199 VALGRIND_CFI_EPILOGUE \
2200 : "=a" (_res) \
2201 : "a" (&_argvec[0]) __FRAME_POINTER \
2202 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2203 ); \
2204 lval = (__typeof__(lval)) _res; \
2205 } while (0)
2206
2207 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2208 arg7,arg8,arg9,arg10,arg11) \
2209 do { \
2210 volatile OrigFn _orig = (orig); \
2211 volatile unsigned long _argvec[12]; \
2212 volatile unsigned long _res; \
2213 _argvec[0] = (unsigned long)_orig.nraddr; \
2214 _argvec[1] = (unsigned long)(arg1); \
2215 _argvec[2] = (unsigned long)(arg2); \
2216 _argvec[3] = (unsigned long)(arg3); \
2217 _argvec[4] = (unsigned long)(arg4); \
2218 _argvec[5] = (unsigned long)(arg5); \
2219 _argvec[6] = (unsigned long)(arg6); \
2220 _argvec[7] = (unsigned long)(arg7); \
2221 _argvec[8] = (unsigned long)(arg8); \
2222 _argvec[9] = (unsigned long)(arg9); \
2223 _argvec[10] = (unsigned long)(arg10); \
2224 _argvec[11] = (unsigned long)(arg11); \
2225 __asm__ volatile( \
2226 VALGRIND_CFI_PROLOGUE \
2227 VALGRIND_ALIGN_STACK \
2228 "subq $136,%%rsp\n\t" \
2229 "pushq 88(%%rax)\n\t" \
2230 "pushq 80(%%rax)\n\t" \
2231 "pushq 72(%%rax)\n\t" \
2232 "pushq 64(%%rax)\n\t" \
2233 "pushq 56(%%rax)\n\t" \
2234 "movq 48(%%rax), %%r9\n\t" \
2235 "movq 40(%%rax), %%r8\n\t" \
2236 "movq 32(%%rax), %%rcx\n\t" \
2237 "movq 24(%%rax), %%rdx\n\t" \
2238 "movq 16(%%rax), %%rsi\n\t" \
2239 "movq 8(%%rax), %%rdi\n\t" \
2240 "movq (%%rax), %%rax\n\t" \
2241 VALGRIND_CALL_NOREDIR_RAX \
2242 VALGRIND_RESTORE_STACK \
2243 VALGRIND_CFI_EPILOGUE \
2244 : "=a" (_res) \
2245 : "a" (&_argvec[0]) __FRAME_POINTER \
2246 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2247 ); \
2248 lval = (__typeof__(lval)) _res; \
2249 } while (0)
2250
2251 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2252 arg7,arg8,arg9,arg10,arg11,arg12) \
2253 do { \
2254 volatile OrigFn _orig = (orig); \
2255 volatile unsigned long _argvec[13]; \
2256 volatile unsigned long _res; \
2257 _argvec[0] = (unsigned long)_orig.nraddr; \
2258 _argvec[1] = (unsigned long)(arg1); \
2259 _argvec[2] = (unsigned long)(arg2); \
2260 _argvec[3] = (unsigned long)(arg3); \
2261 _argvec[4] = (unsigned long)(arg4); \
2262 _argvec[5] = (unsigned long)(arg5); \
2263 _argvec[6] = (unsigned long)(arg6); \
2264 _argvec[7] = (unsigned long)(arg7); \
2265 _argvec[8] = (unsigned long)(arg8); \
2266 _argvec[9] = (unsigned long)(arg9); \
2267 _argvec[10] = (unsigned long)(arg10); \
2268 _argvec[11] = (unsigned long)(arg11); \
2269 _argvec[12] = (unsigned long)(arg12); \
2270 __asm__ volatile( \
2271 VALGRIND_CFI_PROLOGUE \
2272 VALGRIND_ALIGN_STACK \
2273 "subq $128,%%rsp\n\t" \
2274 "pushq 96(%%rax)\n\t" \
2275 "pushq 88(%%rax)\n\t" \
2276 "pushq 80(%%rax)\n\t" \
2277 "pushq 72(%%rax)\n\t" \
2278 "pushq 64(%%rax)\n\t" \
2279 "pushq 56(%%rax)\n\t" \
2280 "movq 48(%%rax), %%r9\n\t" \
2281 "movq 40(%%rax), %%r8\n\t" \
2282 "movq 32(%%rax), %%rcx\n\t" \
2283 "movq 24(%%rax), %%rdx\n\t" \
2284 "movq 16(%%rax), %%rsi\n\t" \
2285 "movq 8(%%rax), %%rdi\n\t" \
2286 "movq (%%rax), %%rax\n\t" \
2287 VALGRIND_CALL_NOREDIR_RAX \
2288 VALGRIND_RESTORE_STACK \
2289 VALGRIND_CFI_EPILOGUE \
2290 : "=a" (_res) \
2291 : "a" (&_argvec[0]) __FRAME_POINTER \
2292 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2293 ); \
2294 lval = (__typeof__(lval)) _res; \
2295 } while (0)
2296
2297 #endif
2298
2299
2300
2301 #if defined(PLAT_ppc32_linux)
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327 #define __CALLER_SAVED_REGS \
2328 "lr", "ctr", "xer", \
2329 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2330 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2331 "r11", "r12", "r13"
2332
2333
2334
2335
2336
2337
2338 #define VALGRIND_ALIGN_STACK \
2339 "mr 28,1\n\t" \
2340 "rlwinm 1,1,0,0,27\n\t"
2341 #define VALGRIND_RESTORE_STACK \
2342 "mr 1,28\n\t"
2343
2344
2345
2346
2347 #define CALL_FN_W_v(lval, orig) \
2348 do { \
2349 volatile OrigFn _orig = (orig); \
2350 volatile unsigned long _argvec[1]; \
2351 volatile unsigned long _res; \
2352 _argvec[0] = (unsigned long)_orig.nraddr; \
2353 __asm__ volatile( \
2354 VALGRIND_ALIGN_STACK \
2355 "mr 11,%1\n\t" \
2356 "lwz 11,0(11)\n\t" \
2357 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2358 VALGRIND_RESTORE_STACK \
2359 "mr %0,3" \
2360 : "=r" (_res) \
2361 : "r" (&_argvec[0]) \
2362 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2363 ); \
2364 lval = (__typeof__(lval)) _res; \
2365 } while (0)
2366
2367 #define CALL_FN_W_W(lval, orig, arg1) \
2368 do { \
2369 volatile OrigFn _orig = (orig); \
2370 volatile unsigned long _argvec[2]; \
2371 volatile unsigned long _res; \
2372 _argvec[0] = (unsigned long)_orig.nraddr; \
2373 _argvec[1] = (unsigned long)arg1; \
2374 __asm__ volatile( \
2375 VALGRIND_ALIGN_STACK \
2376 "mr 11,%1\n\t" \
2377 "lwz 3,4(11)\n\t" \
2378 "lwz 11,0(11)\n\t" \
2379 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2380 VALGRIND_RESTORE_STACK \
2381 "mr %0,3" \
2382 : "=r" (_res) \
2383 : "r" (&_argvec[0]) \
2384 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2385 ); \
2386 lval = (__typeof__(lval)) _res; \
2387 } while (0)
2388
2389 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2390 do { \
2391 volatile OrigFn _orig = (orig); \
2392 volatile unsigned long _argvec[3]; \
2393 volatile unsigned long _res; \
2394 _argvec[0] = (unsigned long)_orig.nraddr; \
2395 _argvec[1] = (unsigned long)arg1; \
2396 _argvec[2] = (unsigned long)arg2; \
2397 __asm__ volatile( \
2398 VALGRIND_ALIGN_STACK \
2399 "mr 11,%1\n\t" \
2400 "lwz 3,4(11)\n\t" \
2401 "lwz 4,8(11)\n\t" \
2402 "lwz 11,0(11)\n\t" \
2403 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2404 VALGRIND_RESTORE_STACK \
2405 "mr %0,3" \
2406 : "=r" (_res) \
2407 : "r" (&_argvec[0]) \
2408 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2409 ); \
2410 lval = (__typeof__(lval)) _res; \
2411 } while (0)
2412
2413 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2414 do { \
2415 volatile OrigFn _orig = (orig); \
2416 volatile unsigned long _argvec[4]; \
2417 volatile unsigned long _res; \
2418 _argvec[0] = (unsigned long)_orig.nraddr; \
2419 _argvec[1] = (unsigned long)arg1; \
2420 _argvec[2] = (unsigned long)arg2; \
2421 _argvec[3] = (unsigned long)arg3; \
2422 __asm__ volatile( \
2423 VALGRIND_ALIGN_STACK \
2424 "mr 11,%1\n\t" \
2425 "lwz 3,4(11)\n\t" \
2426 "lwz 4,8(11)\n\t" \
2427 "lwz 5,12(11)\n\t" \
2428 "lwz 11,0(11)\n\t" \
2429 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2430 VALGRIND_RESTORE_STACK \
2431 "mr %0,3" \
2432 : "=r" (_res) \
2433 : "r" (&_argvec[0]) \
2434 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2435 ); \
2436 lval = (__typeof__(lval)) _res; \
2437 } while (0)
2438
2439 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2440 do { \
2441 volatile OrigFn _orig = (orig); \
2442 volatile unsigned long _argvec[5]; \
2443 volatile unsigned long _res; \
2444 _argvec[0] = (unsigned long)_orig.nraddr; \
2445 _argvec[1] = (unsigned long)arg1; \
2446 _argvec[2] = (unsigned long)arg2; \
2447 _argvec[3] = (unsigned long)arg3; \
2448 _argvec[4] = (unsigned long)arg4; \
2449 __asm__ volatile( \
2450 VALGRIND_ALIGN_STACK \
2451 "mr 11,%1\n\t" \
2452 "lwz 3,4(11)\n\t" \
2453 "lwz 4,8(11)\n\t" \
2454 "lwz 5,12(11)\n\t" \
2455 "lwz 6,16(11)\n\t" \
2456 "lwz 11,0(11)\n\t" \
2457 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2458 VALGRIND_RESTORE_STACK \
2459 "mr %0,3" \
2460 : "=r" (_res) \
2461 : "r" (&_argvec[0]) \
2462 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2463 ); \
2464 lval = (__typeof__(lval)) _res; \
2465 } while (0)
2466
2467 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2468 do { \
2469 volatile OrigFn _orig = (orig); \
2470 volatile unsigned long _argvec[6]; \
2471 volatile unsigned long _res; \
2472 _argvec[0] = (unsigned long)_orig.nraddr; \
2473 _argvec[1] = (unsigned long)arg1; \
2474 _argvec[2] = (unsigned long)arg2; \
2475 _argvec[3] = (unsigned long)arg3; \
2476 _argvec[4] = (unsigned long)arg4; \
2477 _argvec[5] = (unsigned long)arg5; \
2478 __asm__ volatile( \
2479 VALGRIND_ALIGN_STACK \
2480 "mr 11,%1\n\t" \
2481 "lwz 3,4(11)\n\t" \
2482 "lwz 4,8(11)\n\t" \
2483 "lwz 5,12(11)\n\t" \
2484 "lwz 6,16(11)\n\t" \
2485 "lwz 7,20(11)\n\t" \
2486 "lwz 11,0(11)\n\t" \
2487 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2488 VALGRIND_RESTORE_STACK \
2489 "mr %0,3" \
2490 : "=r" (_res) \
2491 : "r" (&_argvec[0]) \
2492 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2493 ); \
2494 lval = (__typeof__(lval)) _res; \
2495 } while (0)
2496
2497 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2498 do { \
2499 volatile OrigFn _orig = (orig); \
2500 volatile unsigned long _argvec[7]; \
2501 volatile unsigned long _res; \
2502 _argvec[0] = (unsigned long)_orig.nraddr; \
2503 _argvec[1] = (unsigned long)arg1; \
2504 _argvec[2] = (unsigned long)arg2; \
2505 _argvec[3] = (unsigned long)arg3; \
2506 _argvec[4] = (unsigned long)arg4; \
2507 _argvec[5] = (unsigned long)arg5; \
2508 _argvec[6] = (unsigned long)arg6; \
2509 __asm__ volatile( \
2510 VALGRIND_ALIGN_STACK \
2511 "mr 11,%1\n\t" \
2512 "lwz 3,4(11)\n\t" \
2513 "lwz 4,8(11)\n\t" \
2514 "lwz 5,12(11)\n\t" \
2515 "lwz 6,16(11)\n\t" \
2516 "lwz 7,20(11)\n\t" \
2517 "lwz 8,24(11)\n\t" \
2518 "lwz 11,0(11)\n\t" \
2519 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2520 VALGRIND_RESTORE_STACK \
2521 "mr %0,3" \
2522 : "=r" (_res) \
2523 : "r" (&_argvec[0]) \
2524 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2525 ); \
2526 lval = (__typeof__(lval)) _res; \
2527 } while (0)
2528
2529 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2530 arg7) \
2531 do { \
2532 volatile OrigFn _orig = (orig); \
2533 volatile unsigned long _argvec[8]; \
2534 volatile unsigned long _res; \
2535 _argvec[0] = (unsigned long)_orig.nraddr; \
2536 _argvec[1] = (unsigned long)arg1; \
2537 _argvec[2] = (unsigned long)arg2; \
2538 _argvec[3] = (unsigned long)arg3; \
2539 _argvec[4] = (unsigned long)arg4; \
2540 _argvec[5] = (unsigned long)arg5; \
2541 _argvec[6] = (unsigned long)arg6; \
2542 _argvec[7] = (unsigned long)arg7; \
2543 __asm__ volatile( \
2544 VALGRIND_ALIGN_STACK \
2545 "mr 11,%1\n\t" \
2546 "lwz 3,4(11)\n\t" \
2547 "lwz 4,8(11)\n\t" \
2548 "lwz 5,12(11)\n\t" \
2549 "lwz 6,16(11)\n\t" \
2550 "lwz 7,20(11)\n\t" \
2551 "lwz 8,24(11)\n\t" \
2552 "lwz 9,28(11)\n\t" \
2553 "lwz 11,0(11)\n\t" \
2554 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2555 VALGRIND_RESTORE_STACK \
2556 "mr %0,3" \
2557 : "=r" (_res) \
2558 : "r" (&_argvec[0]) \
2559 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2560 ); \
2561 lval = (__typeof__(lval)) _res; \
2562 } while (0)
2563
2564 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2565 arg7,arg8) \
2566 do { \
2567 volatile OrigFn _orig = (orig); \
2568 volatile unsigned long _argvec[9]; \
2569 volatile unsigned long _res; \
2570 _argvec[0] = (unsigned long)_orig.nraddr; \
2571 _argvec[1] = (unsigned long)arg1; \
2572 _argvec[2] = (unsigned long)arg2; \
2573 _argvec[3] = (unsigned long)arg3; \
2574 _argvec[4] = (unsigned long)arg4; \
2575 _argvec[5] = (unsigned long)arg5; \
2576 _argvec[6] = (unsigned long)arg6; \
2577 _argvec[7] = (unsigned long)arg7; \
2578 _argvec[8] = (unsigned long)arg8; \
2579 __asm__ volatile( \
2580 VALGRIND_ALIGN_STACK \
2581 "mr 11,%1\n\t" \
2582 "lwz 3,4(11)\n\t" \
2583 "lwz 4,8(11)\n\t" \
2584 "lwz 5,12(11)\n\t" \
2585 "lwz 6,16(11)\n\t" \
2586 "lwz 7,20(11)\n\t" \
2587 "lwz 8,24(11)\n\t" \
2588 "lwz 9,28(11)\n\t" \
2589 "lwz 10,32(11)\n\t" \
2590 "lwz 11,0(11)\n\t" \
2591 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2592 VALGRIND_RESTORE_STACK \
2593 "mr %0,3" \
2594 : "=r" (_res) \
2595 : "r" (&_argvec[0]) \
2596 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2597 ); \
2598 lval = (__typeof__(lval)) _res; \
2599 } while (0)
2600
2601 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2602 arg7,arg8,arg9) \
2603 do { \
2604 volatile OrigFn _orig = (orig); \
2605 volatile unsigned long _argvec[10]; \
2606 volatile unsigned long _res; \
2607 _argvec[0] = (unsigned long)_orig.nraddr; \
2608 _argvec[1] = (unsigned long)arg1; \
2609 _argvec[2] = (unsigned long)arg2; \
2610 _argvec[3] = (unsigned long)arg3; \
2611 _argvec[4] = (unsigned long)arg4; \
2612 _argvec[5] = (unsigned long)arg5; \
2613 _argvec[6] = (unsigned long)arg6; \
2614 _argvec[7] = (unsigned long)arg7; \
2615 _argvec[8] = (unsigned long)arg8; \
2616 _argvec[9] = (unsigned long)arg9; \
2617 __asm__ volatile( \
2618 VALGRIND_ALIGN_STACK \
2619 "mr 11,%1\n\t" \
2620 "addi 1,1,-16\n\t" \
2621 \
2622 "lwz 3,36(11)\n\t" \
2623 "stw 3,8(1)\n\t" \
2624 \
2625 "lwz 3,4(11)\n\t" \
2626 "lwz 4,8(11)\n\t" \
2627 "lwz 5,12(11)\n\t" \
2628 "lwz 6,16(11)\n\t" \
2629 "lwz 7,20(11)\n\t" \
2630 "lwz 8,24(11)\n\t" \
2631 "lwz 9,28(11)\n\t" \
2632 "lwz 10,32(11)\n\t" \
2633 "lwz 11,0(11)\n\t" \
2634 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2635 VALGRIND_RESTORE_STACK \
2636 "mr %0,3" \
2637 : "=r" (_res) \
2638 : "r" (&_argvec[0]) \
2639 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2640 ); \
2641 lval = (__typeof__(lval)) _res; \
2642 } while (0)
2643
2644 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2645 arg7,arg8,arg9,arg10) \
2646 do { \
2647 volatile OrigFn _orig = (orig); \
2648 volatile unsigned long _argvec[11]; \
2649 volatile unsigned long _res; \
2650 _argvec[0] = (unsigned long)_orig.nraddr; \
2651 _argvec[1] = (unsigned long)arg1; \
2652 _argvec[2] = (unsigned long)arg2; \
2653 _argvec[3] = (unsigned long)arg3; \
2654 _argvec[4] = (unsigned long)arg4; \
2655 _argvec[5] = (unsigned long)arg5; \
2656 _argvec[6] = (unsigned long)arg6; \
2657 _argvec[7] = (unsigned long)arg7; \
2658 _argvec[8] = (unsigned long)arg8; \
2659 _argvec[9] = (unsigned long)arg9; \
2660 _argvec[10] = (unsigned long)arg10; \
2661 __asm__ volatile( \
2662 VALGRIND_ALIGN_STACK \
2663 "mr 11,%1\n\t" \
2664 "addi 1,1,-16\n\t" \
2665 \
2666 "lwz 3,40(11)\n\t" \
2667 "stw 3,12(1)\n\t" \
2668 \
2669 "lwz 3,36(11)\n\t" \
2670 "stw 3,8(1)\n\t" \
2671 \
2672 "lwz 3,4(11)\n\t" \
2673 "lwz 4,8(11)\n\t" \
2674 "lwz 5,12(11)\n\t" \
2675 "lwz 6,16(11)\n\t" \
2676 "lwz 7,20(11)\n\t" \
2677 "lwz 8,24(11)\n\t" \
2678 "lwz 9,28(11)\n\t" \
2679 "lwz 10,32(11)\n\t" \
2680 "lwz 11,0(11)\n\t" \
2681 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2682 VALGRIND_RESTORE_STACK \
2683 "mr %0,3" \
2684 : "=r" (_res) \
2685 : "r" (&_argvec[0]) \
2686 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2687 ); \
2688 lval = (__typeof__(lval)) _res; \
2689 } while (0)
2690
2691 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2692 arg7,arg8,arg9,arg10,arg11) \
2693 do { \
2694 volatile OrigFn _orig = (orig); \
2695 volatile unsigned long _argvec[12]; \
2696 volatile unsigned long _res; \
2697 _argvec[0] = (unsigned long)_orig.nraddr; \
2698 _argvec[1] = (unsigned long)arg1; \
2699 _argvec[2] = (unsigned long)arg2; \
2700 _argvec[3] = (unsigned long)arg3; \
2701 _argvec[4] = (unsigned long)arg4; \
2702 _argvec[5] = (unsigned long)arg5; \
2703 _argvec[6] = (unsigned long)arg6; \
2704 _argvec[7] = (unsigned long)arg7; \
2705 _argvec[8] = (unsigned long)arg8; \
2706 _argvec[9] = (unsigned long)arg9; \
2707 _argvec[10] = (unsigned long)arg10; \
2708 _argvec[11] = (unsigned long)arg11; \
2709 __asm__ volatile( \
2710 VALGRIND_ALIGN_STACK \
2711 "mr 11,%1\n\t" \
2712 "addi 1,1,-32\n\t" \
2713 \
2714 "lwz 3,44(11)\n\t" \
2715 "stw 3,16(1)\n\t" \
2716 \
2717 "lwz 3,40(11)\n\t" \
2718 "stw 3,12(1)\n\t" \
2719 \
2720 "lwz 3,36(11)\n\t" \
2721 "stw 3,8(1)\n\t" \
2722 \
2723 "lwz 3,4(11)\n\t" \
2724 "lwz 4,8(11)\n\t" \
2725 "lwz 5,12(11)\n\t" \
2726 "lwz 6,16(11)\n\t" \
2727 "lwz 7,20(11)\n\t" \
2728 "lwz 8,24(11)\n\t" \
2729 "lwz 9,28(11)\n\t" \
2730 "lwz 10,32(11)\n\t" \
2731 "lwz 11,0(11)\n\t" \
2732 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2733 VALGRIND_RESTORE_STACK \
2734 "mr %0,3" \
2735 : "=r" (_res) \
2736 : "r" (&_argvec[0]) \
2737 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2738 ); \
2739 lval = (__typeof__(lval)) _res; \
2740 } while (0)
2741
2742 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2743 arg7,arg8,arg9,arg10,arg11,arg12) \
2744 do { \
2745 volatile OrigFn _orig = (orig); \
2746 volatile unsigned long _argvec[13]; \
2747 volatile unsigned long _res; \
2748 _argvec[0] = (unsigned long)_orig.nraddr; \
2749 _argvec[1] = (unsigned long)arg1; \
2750 _argvec[2] = (unsigned long)arg2; \
2751 _argvec[3] = (unsigned long)arg3; \
2752 _argvec[4] = (unsigned long)arg4; \
2753 _argvec[5] = (unsigned long)arg5; \
2754 _argvec[6] = (unsigned long)arg6; \
2755 _argvec[7] = (unsigned long)arg7; \
2756 _argvec[8] = (unsigned long)arg8; \
2757 _argvec[9] = (unsigned long)arg9; \
2758 _argvec[10] = (unsigned long)arg10; \
2759 _argvec[11] = (unsigned long)arg11; \
2760 _argvec[12] = (unsigned long)arg12; \
2761 __asm__ volatile( \
2762 VALGRIND_ALIGN_STACK \
2763 "mr 11,%1\n\t" \
2764 "addi 1,1,-32\n\t" \
2765 \
2766 "lwz 3,48(11)\n\t" \
2767 "stw 3,20(1)\n\t" \
2768 \
2769 "lwz 3,44(11)\n\t" \
2770 "stw 3,16(1)\n\t" \
2771 \
2772 "lwz 3,40(11)\n\t" \
2773 "stw 3,12(1)\n\t" \
2774 \
2775 "lwz 3,36(11)\n\t" \
2776 "stw 3,8(1)\n\t" \
2777 \
2778 "lwz 3,4(11)\n\t" \
2779 "lwz 4,8(11)\n\t" \
2780 "lwz 5,12(11)\n\t" \
2781 "lwz 6,16(11)\n\t" \
2782 "lwz 7,20(11)\n\t" \
2783 "lwz 8,24(11)\n\t" \
2784 "lwz 9,28(11)\n\t" \
2785 "lwz 10,32(11)\n\t" \
2786 "lwz 11,0(11)\n\t" \
2787 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2788 VALGRIND_RESTORE_STACK \
2789 "mr %0,3" \
2790 : "=r" (_res) \
2791 : "r" (&_argvec[0]) \
2792 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2793 ); \
2794 lval = (__typeof__(lval)) _res; \
2795 } while (0)
2796
2797 #endif
2798
2799
2800
2801 #if defined(PLAT_ppc64be_linux)
2802
2803
2804
2805
2806 #define __CALLER_SAVED_REGS \
2807 "lr", "ctr", "xer", \
2808 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2809 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2810 "r11", "r12", "r13"
2811
2812
2813
2814
2815
2816
2817 #define VALGRIND_ALIGN_STACK \
2818 "mr 28,1\n\t" \
2819 "rldicr 1,1,0,59\n\t"
2820 #define VALGRIND_RESTORE_STACK \
2821 "mr 1,28\n\t"
2822
2823
2824
2825
2826 #define CALL_FN_W_v(lval, orig) \
2827 do { \
2828 volatile OrigFn _orig = (orig); \
2829 volatile unsigned long _argvec[3+0]; \
2830 volatile unsigned long _res; \
2831 \
2832 _argvec[1] = (unsigned long)_orig.r2; \
2833 _argvec[2] = (unsigned long)_orig.nraddr; \
2834 __asm__ volatile( \
2835 VALGRIND_ALIGN_STACK \
2836 "mr 11,%1\n\t" \
2837 "std 2,-16(11)\n\t" \
2838 "ld 2,-8(11)\n\t" \
2839 "ld 11, 0(11)\n\t" \
2840 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2841 "mr 11,%1\n\t" \
2842 "mr %0,3\n\t" \
2843 "ld 2,-16(11)\n\t" \
2844 VALGRIND_RESTORE_STACK \
2845 : "=r" (_res) \
2846 : "r" (&_argvec[2]) \
2847 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2848 ); \
2849 lval = (__typeof__(lval)) _res; \
2850 } while (0)
2851
2852 #define CALL_FN_W_W(lval, orig, arg1) \
2853 do { \
2854 volatile OrigFn _orig = (orig); \
2855 volatile unsigned long _argvec[3+1]; \
2856 volatile unsigned long _res; \
2857 \
2858 _argvec[1] = (unsigned long)_orig.r2; \
2859 _argvec[2] = (unsigned long)_orig.nraddr; \
2860 _argvec[2+1] = (unsigned long)arg1; \
2861 __asm__ volatile( \
2862 VALGRIND_ALIGN_STACK \
2863 "mr 11,%1\n\t" \
2864 "std 2,-16(11)\n\t" \
2865 "ld 2,-8(11)\n\t" \
2866 "ld 3, 8(11)\n\t" \
2867 "ld 11, 0(11)\n\t" \
2868 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2869 "mr 11,%1\n\t" \
2870 "mr %0,3\n\t" \
2871 "ld 2,-16(11)\n\t" \
2872 VALGRIND_RESTORE_STACK \
2873 : "=r" (_res) \
2874 : "r" (&_argvec[2]) \
2875 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2876 ); \
2877 lval = (__typeof__(lval)) _res; \
2878 } while (0)
2879
2880 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2881 do { \
2882 volatile OrigFn _orig = (orig); \
2883 volatile unsigned long _argvec[3+2]; \
2884 volatile unsigned long _res; \
2885 \
2886 _argvec[1] = (unsigned long)_orig.r2; \
2887 _argvec[2] = (unsigned long)_orig.nraddr; \
2888 _argvec[2+1] = (unsigned long)arg1; \
2889 _argvec[2+2] = (unsigned long)arg2; \
2890 __asm__ volatile( \
2891 VALGRIND_ALIGN_STACK \
2892 "mr 11,%1\n\t" \
2893 "std 2,-16(11)\n\t" \
2894 "ld 2,-8(11)\n\t" \
2895 "ld 3, 8(11)\n\t" \
2896 "ld 4, 16(11)\n\t" \
2897 "ld 11, 0(11)\n\t" \
2898 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2899 "mr 11,%1\n\t" \
2900 "mr %0,3\n\t" \
2901 "ld 2,-16(11)\n\t" \
2902 VALGRIND_RESTORE_STACK \
2903 : "=r" (_res) \
2904 : "r" (&_argvec[2]) \
2905 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2906 ); \
2907 lval = (__typeof__(lval)) _res; \
2908 } while (0)
2909
2910 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2911 do { \
2912 volatile OrigFn _orig = (orig); \
2913 volatile unsigned long _argvec[3+3]; \
2914 volatile unsigned long _res; \
2915 \
2916 _argvec[1] = (unsigned long)_orig.r2; \
2917 _argvec[2] = (unsigned long)_orig.nraddr; \
2918 _argvec[2+1] = (unsigned long)arg1; \
2919 _argvec[2+2] = (unsigned long)arg2; \
2920 _argvec[2+3] = (unsigned long)arg3; \
2921 __asm__ volatile( \
2922 VALGRIND_ALIGN_STACK \
2923 "mr 11,%1\n\t" \
2924 "std 2,-16(11)\n\t" \
2925 "ld 2,-8(11)\n\t" \
2926 "ld 3, 8(11)\n\t" \
2927 "ld 4, 16(11)\n\t" \
2928 "ld 5, 24(11)\n\t" \
2929 "ld 11, 0(11)\n\t" \
2930 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2931 "mr 11,%1\n\t" \
2932 "mr %0,3\n\t" \
2933 "ld 2,-16(11)\n\t" \
2934 VALGRIND_RESTORE_STACK \
2935 : "=r" (_res) \
2936 : "r" (&_argvec[2]) \
2937 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2938 ); \
2939 lval = (__typeof__(lval)) _res; \
2940 } while (0)
2941
2942 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2943 do { \
2944 volatile OrigFn _orig = (orig); \
2945 volatile unsigned long _argvec[3+4]; \
2946 volatile unsigned long _res; \
2947 \
2948 _argvec[1] = (unsigned long)_orig.r2; \
2949 _argvec[2] = (unsigned long)_orig.nraddr; \
2950 _argvec[2+1] = (unsigned long)arg1; \
2951 _argvec[2+2] = (unsigned long)arg2; \
2952 _argvec[2+3] = (unsigned long)arg3; \
2953 _argvec[2+4] = (unsigned long)arg4; \
2954 __asm__ volatile( \
2955 VALGRIND_ALIGN_STACK \
2956 "mr 11,%1\n\t" \
2957 "std 2,-16(11)\n\t" \
2958 "ld 2,-8(11)\n\t" \
2959 "ld 3, 8(11)\n\t" \
2960 "ld 4, 16(11)\n\t" \
2961 "ld 5, 24(11)\n\t" \
2962 "ld 6, 32(11)\n\t" \
2963 "ld 11, 0(11)\n\t" \
2964 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2965 "mr 11,%1\n\t" \
2966 "mr %0,3\n\t" \
2967 "ld 2,-16(11)\n\t" \
2968 VALGRIND_RESTORE_STACK \
2969 : "=r" (_res) \
2970 : "r" (&_argvec[2]) \
2971 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2972 ); \
2973 lval = (__typeof__(lval)) _res; \
2974 } while (0)
2975
2976 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2977 do { \
2978 volatile OrigFn _orig = (orig); \
2979 volatile unsigned long _argvec[3+5]; \
2980 volatile unsigned long _res; \
2981 \
2982 _argvec[1] = (unsigned long)_orig.r2; \
2983 _argvec[2] = (unsigned long)_orig.nraddr; \
2984 _argvec[2+1] = (unsigned long)arg1; \
2985 _argvec[2+2] = (unsigned long)arg2; \
2986 _argvec[2+3] = (unsigned long)arg3; \
2987 _argvec[2+4] = (unsigned long)arg4; \
2988 _argvec[2+5] = (unsigned long)arg5; \
2989 __asm__ volatile( \
2990 VALGRIND_ALIGN_STACK \
2991 "mr 11,%1\n\t" \
2992 "std 2,-16(11)\n\t" \
2993 "ld 2,-8(11)\n\t" \
2994 "ld 3, 8(11)\n\t" \
2995 "ld 4, 16(11)\n\t" \
2996 "ld 5, 24(11)\n\t" \
2997 "ld 6, 32(11)\n\t" \
2998 "ld 7, 40(11)\n\t" \
2999 "ld 11, 0(11)\n\t" \
3000 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3001 "mr 11,%1\n\t" \
3002 "mr %0,3\n\t" \
3003 "ld 2,-16(11)\n\t" \
3004 VALGRIND_RESTORE_STACK \
3005 : "=r" (_res) \
3006 : "r" (&_argvec[2]) \
3007 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3008 ); \
3009 lval = (__typeof__(lval)) _res; \
3010 } while (0)
3011
3012 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3013 do { \
3014 volatile OrigFn _orig = (orig); \
3015 volatile unsigned long _argvec[3+6]; \
3016 volatile unsigned long _res; \
3017 \
3018 _argvec[1] = (unsigned long)_orig.r2; \
3019 _argvec[2] = (unsigned long)_orig.nraddr; \
3020 _argvec[2+1] = (unsigned long)arg1; \
3021 _argvec[2+2] = (unsigned long)arg2; \
3022 _argvec[2+3] = (unsigned long)arg3; \
3023 _argvec[2+4] = (unsigned long)arg4; \
3024 _argvec[2+5] = (unsigned long)arg5; \
3025 _argvec[2+6] = (unsigned long)arg6; \
3026 __asm__ volatile( \
3027 VALGRIND_ALIGN_STACK \
3028 "mr 11,%1\n\t" \
3029 "std 2,-16(11)\n\t" \
3030 "ld 2,-8(11)\n\t" \
3031 "ld 3, 8(11)\n\t" \
3032 "ld 4, 16(11)\n\t" \
3033 "ld 5, 24(11)\n\t" \
3034 "ld 6, 32(11)\n\t" \
3035 "ld 7, 40(11)\n\t" \
3036 "ld 8, 48(11)\n\t" \
3037 "ld 11, 0(11)\n\t" \
3038 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3039 "mr 11,%1\n\t" \
3040 "mr %0,3\n\t" \
3041 "ld 2,-16(11)\n\t" \
3042 VALGRIND_RESTORE_STACK \
3043 : "=r" (_res) \
3044 : "r" (&_argvec[2]) \
3045 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3046 ); \
3047 lval = (__typeof__(lval)) _res; \
3048 } while (0)
3049
3050 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3051 arg7) \
3052 do { \
3053 volatile OrigFn _orig = (orig); \
3054 volatile unsigned long _argvec[3+7]; \
3055 volatile unsigned long _res; \
3056 \
3057 _argvec[1] = (unsigned long)_orig.r2; \
3058 _argvec[2] = (unsigned long)_orig.nraddr; \
3059 _argvec[2+1] = (unsigned long)arg1; \
3060 _argvec[2+2] = (unsigned long)arg2; \
3061 _argvec[2+3] = (unsigned long)arg3; \
3062 _argvec[2+4] = (unsigned long)arg4; \
3063 _argvec[2+5] = (unsigned long)arg5; \
3064 _argvec[2+6] = (unsigned long)arg6; \
3065 _argvec[2+7] = (unsigned long)arg7; \
3066 __asm__ volatile( \
3067 VALGRIND_ALIGN_STACK \
3068 "mr 11,%1\n\t" \
3069 "std 2,-16(11)\n\t" \
3070 "ld 2,-8(11)\n\t" \
3071 "ld 3, 8(11)\n\t" \
3072 "ld 4, 16(11)\n\t" \
3073 "ld 5, 24(11)\n\t" \
3074 "ld 6, 32(11)\n\t" \
3075 "ld 7, 40(11)\n\t" \
3076 "ld 8, 48(11)\n\t" \
3077 "ld 9, 56(11)\n\t" \
3078 "ld 11, 0(11)\n\t" \
3079 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3080 "mr 11,%1\n\t" \
3081 "mr %0,3\n\t" \
3082 "ld 2,-16(11)\n\t" \
3083 VALGRIND_RESTORE_STACK \
3084 : "=r" (_res) \
3085 : "r" (&_argvec[2]) \
3086 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3087 ); \
3088 lval = (__typeof__(lval)) _res; \
3089 } while (0)
3090
3091 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3092 arg7,arg8) \
3093 do { \
3094 volatile OrigFn _orig = (orig); \
3095 volatile unsigned long _argvec[3+8]; \
3096 volatile unsigned long _res; \
3097 \
3098 _argvec[1] = (unsigned long)_orig.r2; \
3099 _argvec[2] = (unsigned long)_orig.nraddr; \
3100 _argvec[2+1] = (unsigned long)arg1; \
3101 _argvec[2+2] = (unsigned long)arg2; \
3102 _argvec[2+3] = (unsigned long)arg3; \
3103 _argvec[2+4] = (unsigned long)arg4; \
3104 _argvec[2+5] = (unsigned long)arg5; \
3105 _argvec[2+6] = (unsigned long)arg6; \
3106 _argvec[2+7] = (unsigned long)arg7; \
3107 _argvec[2+8] = (unsigned long)arg8; \
3108 __asm__ volatile( \
3109 VALGRIND_ALIGN_STACK \
3110 "mr 11,%1\n\t" \
3111 "std 2,-16(11)\n\t" \
3112 "ld 2,-8(11)\n\t" \
3113 "ld 3, 8(11)\n\t" \
3114 "ld 4, 16(11)\n\t" \
3115 "ld 5, 24(11)\n\t" \
3116 "ld 6, 32(11)\n\t" \
3117 "ld 7, 40(11)\n\t" \
3118 "ld 8, 48(11)\n\t" \
3119 "ld 9, 56(11)\n\t" \
3120 "ld 10, 64(11)\n\t" \
3121 "ld 11, 0(11)\n\t" \
3122 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3123 "mr 11,%1\n\t" \
3124 "mr %0,3\n\t" \
3125 "ld 2,-16(11)\n\t" \
3126 VALGRIND_RESTORE_STACK \
3127 : "=r" (_res) \
3128 : "r" (&_argvec[2]) \
3129 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3130 ); \
3131 lval = (__typeof__(lval)) _res; \
3132 } while (0)
3133
3134 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3135 arg7,arg8,arg9) \
3136 do { \
3137 volatile OrigFn _orig = (orig); \
3138 volatile unsigned long _argvec[3+9]; \
3139 volatile unsigned long _res; \
3140 \
3141 _argvec[1] = (unsigned long)_orig.r2; \
3142 _argvec[2] = (unsigned long)_orig.nraddr; \
3143 _argvec[2+1] = (unsigned long)arg1; \
3144 _argvec[2+2] = (unsigned long)arg2; \
3145 _argvec[2+3] = (unsigned long)arg3; \
3146 _argvec[2+4] = (unsigned long)arg4; \
3147 _argvec[2+5] = (unsigned long)arg5; \
3148 _argvec[2+6] = (unsigned long)arg6; \
3149 _argvec[2+7] = (unsigned long)arg7; \
3150 _argvec[2+8] = (unsigned long)arg8; \
3151 _argvec[2+9] = (unsigned long)arg9; \
3152 __asm__ volatile( \
3153 VALGRIND_ALIGN_STACK \
3154 "mr 11,%1\n\t" \
3155 "std 2,-16(11)\n\t" \
3156 "ld 2,-8(11)\n\t" \
3157 "addi 1,1,-128\n\t" \
3158 \
3159 "ld 3,72(11)\n\t" \
3160 "std 3,112(1)\n\t" \
3161 \
3162 "ld 3, 8(11)\n\t" \
3163 "ld 4, 16(11)\n\t" \
3164 "ld 5, 24(11)\n\t" \
3165 "ld 6, 32(11)\n\t" \
3166 "ld 7, 40(11)\n\t" \
3167 "ld 8, 48(11)\n\t" \
3168 "ld 9, 56(11)\n\t" \
3169 "ld 10, 64(11)\n\t" \
3170 "ld 11, 0(11)\n\t" \
3171 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3172 "mr 11,%1\n\t" \
3173 "mr %0,3\n\t" \
3174 "ld 2,-16(11)\n\t" \
3175 VALGRIND_RESTORE_STACK \
3176 : "=r" (_res) \
3177 : "r" (&_argvec[2]) \
3178 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3179 ); \
3180 lval = (__typeof__(lval)) _res; \
3181 } while (0)
3182
3183 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3184 arg7,arg8,arg9,arg10) \
3185 do { \
3186 volatile OrigFn _orig = (orig); \
3187 volatile unsigned long _argvec[3+10]; \
3188 volatile unsigned long _res; \
3189 \
3190 _argvec[1] = (unsigned long)_orig.r2; \
3191 _argvec[2] = (unsigned long)_orig.nraddr; \
3192 _argvec[2+1] = (unsigned long)arg1; \
3193 _argvec[2+2] = (unsigned long)arg2; \
3194 _argvec[2+3] = (unsigned long)arg3; \
3195 _argvec[2+4] = (unsigned long)arg4; \
3196 _argvec[2+5] = (unsigned long)arg5; \
3197 _argvec[2+6] = (unsigned long)arg6; \
3198 _argvec[2+7] = (unsigned long)arg7; \
3199 _argvec[2+8] = (unsigned long)arg8; \
3200 _argvec[2+9] = (unsigned long)arg9; \
3201 _argvec[2+10] = (unsigned long)arg10; \
3202 __asm__ volatile( \
3203 VALGRIND_ALIGN_STACK \
3204 "mr 11,%1\n\t" \
3205 "std 2,-16(11)\n\t" \
3206 "ld 2,-8(11)\n\t" \
3207 "addi 1,1,-128\n\t" \
3208 \
3209 "ld 3,80(11)\n\t" \
3210 "std 3,120(1)\n\t" \
3211 \
3212 "ld 3,72(11)\n\t" \
3213 "std 3,112(1)\n\t" \
3214 \
3215 "ld 3, 8(11)\n\t" \
3216 "ld 4, 16(11)\n\t" \
3217 "ld 5, 24(11)\n\t" \
3218 "ld 6, 32(11)\n\t" \
3219 "ld 7, 40(11)\n\t" \
3220 "ld 8, 48(11)\n\t" \
3221 "ld 9, 56(11)\n\t" \
3222 "ld 10, 64(11)\n\t" \
3223 "ld 11, 0(11)\n\t" \
3224 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3225 "mr 11,%1\n\t" \
3226 "mr %0,3\n\t" \
3227 "ld 2,-16(11)\n\t" \
3228 VALGRIND_RESTORE_STACK \
3229 : "=r" (_res) \
3230 : "r" (&_argvec[2]) \
3231 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3232 ); \
3233 lval = (__typeof__(lval)) _res; \
3234 } while (0)
3235
3236 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3237 arg7,arg8,arg9,arg10,arg11) \
3238 do { \
3239 volatile OrigFn _orig = (orig); \
3240 volatile unsigned long _argvec[3+11]; \
3241 volatile unsigned long _res; \
3242 \
3243 _argvec[1] = (unsigned long)_orig.r2; \
3244 _argvec[2] = (unsigned long)_orig.nraddr; \
3245 _argvec[2+1] = (unsigned long)arg1; \
3246 _argvec[2+2] = (unsigned long)arg2; \
3247 _argvec[2+3] = (unsigned long)arg3; \
3248 _argvec[2+4] = (unsigned long)arg4; \
3249 _argvec[2+5] = (unsigned long)arg5; \
3250 _argvec[2+6] = (unsigned long)arg6; \
3251 _argvec[2+7] = (unsigned long)arg7; \
3252 _argvec[2+8] = (unsigned long)arg8; \
3253 _argvec[2+9] = (unsigned long)arg9; \
3254 _argvec[2+10] = (unsigned long)arg10; \
3255 _argvec[2+11] = (unsigned long)arg11; \
3256 __asm__ volatile( \
3257 VALGRIND_ALIGN_STACK \
3258 "mr 11,%1\n\t" \
3259 "std 2,-16(11)\n\t" \
3260 "ld 2,-8(11)\n\t" \
3261 "addi 1,1,-144\n\t" \
3262 \
3263 "ld 3,88(11)\n\t" \
3264 "std 3,128(1)\n\t" \
3265 \
3266 "ld 3,80(11)\n\t" \
3267 "std 3,120(1)\n\t" \
3268 \
3269 "ld 3,72(11)\n\t" \
3270 "std 3,112(1)\n\t" \
3271 \
3272 "ld 3, 8(11)\n\t" \
3273 "ld 4, 16(11)\n\t" \
3274 "ld 5, 24(11)\n\t" \
3275 "ld 6, 32(11)\n\t" \
3276 "ld 7, 40(11)\n\t" \
3277 "ld 8, 48(11)\n\t" \
3278 "ld 9, 56(11)\n\t" \
3279 "ld 10, 64(11)\n\t" \
3280 "ld 11, 0(11)\n\t" \
3281 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3282 "mr 11,%1\n\t" \
3283 "mr %0,3\n\t" \
3284 "ld 2,-16(11)\n\t" \
3285 VALGRIND_RESTORE_STACK \
3286 : "=r" (_res) \
3287 : "r" (&_argvec[2]) \
3288 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3289 ); \
3290 lval = (__typeof__(lval)) _res; \
3291 } while (0)
3292
3293 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3294 arg7,arg8,arg9,arg10,arg11,arg12) \
3295 do { \
3296 volatile OrigFn _orig = (orig); \
3297 volatile unsigned long _argvec[3+12]; \
3298 volatile unsigned long _res; \
3299 \
3300 _argvec[1] = (unsigned long)_orig.r2; \
3301 _argvec[2] = (unsigned long)_orig.nraddr; \
3302 _argvec[2+1] = (unsigned long)arg1; \
3303 _argvec[2+2] = (unsigned long)arg2; \
3304 _argvec[2+3] = (unsigned long)arg3; \
3305 _argvec[2+4] = (unsigned long)arg4; \
3306 _argvec[2+5] = (unsigned long)arg5; \
3307 _argvec[2+6] = (unsigned long)arg6; \
3308 _argvec[2+7] = (unsigned long)arg7; \
3309 _argvec[2+8] = (unsigned long)arg8; \
3310 _argvec[2+9] = (unsigned long)arg9; \
3311 _argvec[2+10] = (unsigned long)arg10; \
3312 _argvec[2+11] = (unsigned long)arg11; \
3313 _argvec[2+12] = (unsigned long)arg12; \
3314 __asm__ volatile( \
3315 VALGRIND_ALIGN_STACK \
3316 "mr 11,%1\n\t" \
3317 "std 2,-16(11)\n\t" \
3318 "ld 2,-8(11)\n\t" \
3319 "addi 1,1,-144\n\t" \
3320 \
3321 "ld 3,96(11)\n\t" \
3322 "std 3,136(1)\n\t" \
3323 \
3324 "ld 3,88(11)\n\t" \
3325 "std 3,128(1)\n\t" \
3326 \
3327 "ld 3,80(11)\n\t" \
3328 "std 3,120(1)\n\t" \
3329 \
3330 "ld 3,72(11)\n\t" \
3331 "std 3,112(1)\n\t" \
3332 \
3333 "ld 3, 8(11)\n\t" \
3334 "ld 4, 16(11)\n\t" \
3335 "ld 5, 24(11)\n\t" \
3336 "ld 6, 32(11)\n\t" \
3337 "ld 7, 40(11)\n\t" \
3338 "ld 8, 48(11)\n\t" \
3339 "ld 9, 56(11)\n\t" \
3340 "ld 10, 64(11)\n\t" \
3341 "ld 11, 0(11)\n\t" \
3342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3343 "mr 11,%1\n\t" \
3344 "mr %0,3\n\t" \
3345 "ld 2,-16(11)\n\t" \
3346 VALGRIND_RESTORE_STACK \
3347 : "=r" (_res) \
3348 : "r" (&_argvec[2]) \
3349 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3350 ); \
3351 lval = (__typeof__(lval)) _res; \
3352 } while (0)
3353
3354 #endif
3355
3356
3357 #if defined(PLAT_ppc64le_linux)
3358
3359
3360
3361
3362 #define __CALLER_SAVED_REGS \
3363 "lr", "ctr", "xer", \
3364 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3365 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3366 "r11", "r12", "r13"
3367
3368
3369
3370
3371
3372
3373 #define VALGRIND_ALIGN_STACK \
3374 "mr 28,1\n\t" \
3375 "rldicr 1,1,0,59\n\t"
3376 #define VALGRIND_RESTORE_STACK \
3377 "mr 1,28\n\t"
3378
3379
3380
3381
3382 #define CALL_FN_W_v(lval, orig) \
3383 do { \
3384 volatile OrigFn _orig = (orig); \
3385 volatile unsigned long _argvec[3+0]; \
3386 volatile unsigned long _res; \
3387 \
3388 _argvec[1] = (unsigned long)_orig.r2; \
3389 _argvec[2] = (unsigned long)_orig.nraddr; \
3390 __asm__ volatile( \
3391 VALGRIND_ALIGN_STACK \
3392 "mr 12,%1\n\t" \
3393 "std 2,-16(12)\n\t" \
3394 "ld 2,-8(12)\n\t" \
3395 "ld 12, 0(12)\n\t" \
3396 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3397 "mr 12,%1\n\t" \
3398 "mr %0,3\n\t" \
3399 "ld 2,-16(12)\n\t" \
3400 VALGRIND_RESTORE_STACK \
3401 : "=r" (_res) \
3402 : "r" (&_argvec[2]) \
3403 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3404 ); \
3405 lval = (__typeof__(lval)) _res; \
3406 } while (0)
3407
3408 #define CALL_FN_W_W(lval, orig, arg1) \
3409 do { \
3410 volatile OrigFn _orig = (orig); \
3411 volatile unsigned long _argvec[3+1]; \
3412 volatile unsigned long _res; \
3413 \
3414 _argvec[1] = (unsigned long)_orig.r2; \
3415 _argvec[2] = (unsigned long)_orig.nraddr; \
3416 _argvec[2+1] = (unsigned long)arg1; \
3417 __asm__ volatile( \
3418 VALGRIND_ALIGN_STACK \
3419 "mr 12,%1\n\t" \
3420 "std 2,-16(12)\n\t" \
3421 "ld 2,-8(12)\n\t" \
3422 "ld 3, 8(12)\n\t" \
3423 "ld 12, 0(12)\n\t" \
3424 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3425 "mr 12,%1\n\t" \
3426 "mr %0,3\n\t" \
3427 "ld 2,-16(12)\n\t" \
3428 VALGRIND_RESTORE_STACK \
3429 : "=r" (_res) \
3430 : "r" (&_argvec[2]) \
3431 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3432 ); \
3433 lval = (__typeof__(lval)) _res; \
3434 } while (0)
3435
3436 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3437 do { \
3438 volatile OrigFn _orig = (orig); \
3439 volatile unsigned long _argvec[3+2]; \
3440 volatile unsigned long _res; \
3441 \
3442 _argvec[1] = (unsigned long)_orig.r2; \
3443 _argvec[2] = (unsigned long)_orig.nraddr; \
3444 _argvec[2+1] = (unsigned long)arg1; \
3445 _argvec[2+2] = (unsigned long)arg2; \
3446 __asm__ volatile( \
3447 VALGRIND_ALIGN_STACK \
3448 "mr 12,%1\n\t" \
3449 "std 2,-16(12)\n\t" \
3450 "ld 2,-8(12)\n\t" \
3451 "ld 3, 8(12)\n\t" \
3452 "ld 4, 16(12)\n\t" \
3453 "ld 12, 0(12)\n\t" \
3454 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3455 "mr 12,%1\n\t" \
3456 "mr %0,3\n\t" \
3457 "ld 2,-16(12)\n\t" \
3458 VALGRIND_RESTORE_STACK \
3459 : "=r" (_res) \
3460 : "r" (&_argvec[2]) \
3461 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3462 ); \
3463 lval = (__typeof__(lval)) _res; \
3464 } while (0)
3465
3466 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3467 do { \
3468 volatile OrigFn _orig = (orig); \
3469 volatile unsigned long _argvec[3+3]; \
3470 volatile unsigned long _res; \
3471 \
3472 _argvec[1] = (unsigned long)_orig.r2; \
3473 _argvec[2] = (unsigned long)_orig.nraddr; \
3474 _argvec[2+1] = (unsigned long)arg1; \
3475 _argvec[2+2] = (unsigned long)arg2; \
3476 _argvec[2+3] = (unsigned long)arg3; \
3477 __asm__ volatile( \
3478 VALGRIND_ALIGN_STACK \
3479 "mr 12,%1\n\t" \
3480 "std 2,-16(12)\n\t" \
3481 "ld 2,-8(12)\n\t" \
3482 "ld 3, 8(12)\n\t" \
3483 "ld 4, 16(12)\n\t" \
3484 "ld 5, 24(12)\n\t" \
3485 "ld 12, 0(12)\n\t" \
3486 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3487 "mr 12,%1\n\t" \
3488 "mr %0,3\n\t" \
3489 "ld 2,-16(12)\n\t" \
3490 VALGRIND_RESTORE_STACK \
3491 : "=r" (_res) \
3492 : "r" (&_argvec[2]) \
3493 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3494 ); \
3495 lval = (__typeof__(lval)) _res; \
3496 } while (0)
3497
3498 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3499 do { \
3500 volatile OrigFn _orig = (orig); \
3501 volatile unsigned long _argvec[3+4]; \
3502 volatile unsigned long _res; \
3503 \
3504 _argvec[1] = (unsigned long)_orig.r2; \
3505 _argvec[2] = (unsigned long)_orig.nraddr; \
3506 _argvec[2+1] = (unsigned long)arg1; \
3507 _argvec[2+2] = (unsigned long)arg2; \
3508 _argvec[2+3] = (unsigned long)arg3; \
3509 _argvec[2+4] = (unsigned long)arg4; \
3510 __asm__ volatile( \
3511 VALGRIND_ALIGN_STACK \
3512 "mr 12,%1\n\t" \
3513 "std 2,-16(12)\n\t" \
3514 "ld 2,-8(12)\n\t" \
3515 "ld 3, 8(12)\n\t" \
3516 "ld 4, 16(12)\n\t" \
3517 "ld 5, 24(12)\n\t" \
3518 "ld 6, 32(12)\n\t" \
3519 "ld 12, 0(12)\n\t" \
3520 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3521 "mr 12,%1\n\t" \
3522 "mr %0,3\n\t" \
3523 "ld 2,-16(12)\n\t" \
3524 VALGRIND_RESTORE_STACK \
3525 : "=r" (_res) \
3526 : "r" (&_argvec[2]) \
3527 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3528 ); \
3529 lval = (__typeof__(lval)) _res; \
3530 } while (0)
3531
3532 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3533 do { \
3534 volatile OrigFn _orig = (orig); \
3535 volatile unsigned long _argvec[3+5]; \
3536 volatile unsigned long _res; \
3537 \
3538 _argvec[1] = (unsigned long)_orig.r2; \
3539 _argvec[2] = (unsigned long)_orig.nraddr; \
3540 _argvec[2+1] = (unsigned long)arg1; \
3541 _argvec[2+2] = (unsigned long)arg2; \
3542 _argvec[2+3] = (unsigned long)arg3; \
3543 _argvec[2+4] = (unsigned long)arg4; \
3544 _argvec[2+5] = (unsigned long)arg5; \
3545 __asm__ volatile( \
3546 VALGRIND_ALIGN_STACK \
3547 "mr 12,%1\n\t" \
3548 "std 2,-16(12)\n\t" \
3549 "ld 2,-8(12)\n\t" \
3550 "ld 3, 8(12)\n\t" \
3551 "ld 4, 16(12)\n\t" \
3552 "ld 5, 24(12)\n\t" \
3553 "ld 6, 32(12)\n\t" \
3554 "ld 7, 40(12)\n\t" \
3555 "ld 12, 0(12)\n\t" \
3556 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3557 "mr 12,%1\n\t" \
3558 "mr %0,3\n\t" \
3559 "ld 2,-16(12)\n\t" \
3560 VALGRIND_RESTORE_STACK \
3561 : "=r" (_res) \
3562 : "r" (&_argvec[2]) \
3563 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3564 ); \
3565 lval = (__typeof__(lval)) _res; \
3566 } while (0)
3567
3568 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3569 do { \
3570 volatile OrigFn _orig = (orig); \
3571 volatile unsigned long _argvec[3+6]; \
3572 volatile unsigned long _res; \
3573 \
3574 _argvec[1] = (unsigned long)_orig.r2; \
3575 _argvec[2] = (unsigned long)_orig.nraddr; \
3576 _argvec[2+1] = (unsigned long)arg1; \
3577 _argvec[2+2] = (unsigned long)arg2; \
3578 _argvec[2+3] = (unsigned long)arg3; \
3579 _argvec[2+4] = (unsigned long)arg4; \
3580 _argvec[2+5] = (unsigned long)arg5; \
3581 _argvec[2+6] = (unsigned long)arg6; \
3582 __asm__ volatile( \
3583 VALGRIND_ALIGN_STACK \
3584 "mr 12,%1\n\t" \
3585 "std 2,-16(12)\n\t" \
3586 "ld 2,-8(12)\n\t" \
3587 "ld 3, 8(12)\n\t" \
3588 "ld 4, 16(12)\n\t" \
3589 "ld 5, 24(12)\n\t" \
3590 "ld 6, 32(12)\n\t" \
3591 "ld 7, 40(12)\n\t" \
3592 "ld 8, 48(12)\n\t" \
3593 "ld 12, 0(12)\n\t" \
3594 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3595 "mr 12,%1\n\t" \
3596 "mr %0,3\n\t" \
3597 "ld 2,-16(12)\n\t" \
3598 VALGRIND_RESTORE_STACK \
3599 : "=r" (_res) \
3600 : "r" (&_argvec[2]) \
3601 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3602 ); \
3603 lval = (__typeof__(lval)) _res; \
3604 } while (0)
3605
3606 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3607 arg7) \
3608 do { \
3609 volatile OrigFn _orig = (orig); \
3610 volatile unsigned long _argvec[3+7]; \
3611 volatile unsigned long _res; \
3612 \
3613 _argvec[1] = (unsigned long)_orig.r2; \
3614 _argvec[2] = (unsigned long)_orig.nraddr; \
3615 _argvec[2+1] = (unsigned long)arg1; \
3616 _argvec[2+2] = (unsigned long)arg2; \
3617 _argvec[2+3] = (unsigned long)arg3; \
3618 _argvec[2+4] = (unsigned long)arg4; \
3619 _argvec[2+5] = (unsigned long)arg5; \
3620 _argvec[2+6] = (unsigned long)arg6; \
3621 _argvec[2+7] = (unsigned long)arg7; \
3622 __asm__ volatile( \
3623 VALGRIND_ALIGN_STACK \
3624 "mr 12,%1\n\t" \
3625 "std 2,-16(12)\n\t" \
3626 "ld 2,-8(12)\n\t" \
3627 "ld 3, 8(12)\n\t" \
3628 "ld 4, 16(12)\n\t" \
3629 "ld 5, 24(12)\n\t" \
3630 "ld 6, 32(12)\n\t" \
3631 "ld 7, 40(12)\n\t" \
3632 "ld 8, 48(12)\n\t" \
3633 "ld 9, 56(12)\n\t" \
3634 "ld 12, 0(12)\n\t" \
3635 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3636 "mr 12,%1\n\t" \
3637 "mr %0,3\n\t" \
3638 "ld 2,-16(12)\n\t" \
3639 VALGRIND_RESTORE_STACK \
3640 : "=r" (_res) \
3641 : "r" (&_argvec[2]) \
3642 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3643 ); \
3644 lval = (__typeof__(lval)) _res; \
3645 } while (0)
3646
3647 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3648 arg7,arg8) \
3649 do { \
3650 volatile OrigFn _orig = (orig); \
3651 volatile unsigned long _argvec[3+8]; \
3652 volatile unsigned long _res; \
3653 \
3654 _argvec[1] = (unsigned long)_orig.r2; \
3655 _argvec[2] = (unsigned long)_orig.nraddr; \
3656 _argvec[2+1] = (unsigned long)arg1; \
3657 _argvec[2+2] = (unsigned long)arg2; \
3658 _argvec[2+3] = (unsigned long)arg3; \
3659 _argvec[2+4] = (unsigned long)arg4; \
3660 _argvec[2+5] = (unsigned long)arg5; \
3661 _argvec[2+6] = (unsigned long)arg6; \
3662 _argvec[2+7] = (unsigned long)arg7; \
3663 _argvec[2+8] = (unsigned long)arg8; \
3664 __asm__ volatile( \
3665 VALGRIND_ALIGN_STACK \
3666 "mr 12,%1\n\t" \
3667 "std 2,-16(12)\n\t" \
3668 "ld 2,-8(12)\n\t" \
3669 "ld 3, 8(12)\n\t" \
3670 "ld 4, 16(12)\n\t" \
3671 "ld 5, 24(12)\n\t" \
3672 "ld 6, 32(12)\n\t" \
3673 "ld 7, 40(12)\n\t" \
3674 "ld 8, 48(12)\n\t" \
3675 "ld 9, 56(12)\n\t" \
3676 "ld 10, 64(12)\n\t" \
3677 "ld 12, 0(12)\n\t" \
3678 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3679 "mr 12,%1\n\t" \
3680 "mr %0,3\n\t" \
3681 "ld 2,-16(12)\n\t" \
3682 VALGRIND_RESTORE_STACK \
3683 : "=r" (_res) \
3684 : "r" (&_argvec[2]) \
3685 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3686 ); \
3687 lval = (__typeof__(lval)) _res; \
3688 } while (0)
3689
3690 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3691 arg7,arg8,arg9) \
3692 do { \
3693 volatile OrigFn _orig = (orig); \
3694 volatile unsigned long _argvec[3+9]; \
3695 volatile unsigned long _res; \
3696 \
3697 _argvec[1] = (unsigned long)_orig.r2; \
3698 _argvec[2] = (unsigned long)_orig.nraddr; \
3699 _argvec[2+1] = (unsigned long)arg1; \
3700 _argvec[2+2] = (unsigned long)arg2; \
3701 _argvec[2+3] = (unsigned long)arg3; \
3702 _argvec[2+4] = (unsigned long)arg4; \
3703 _argvec[2+5] = (unsigned long)arg5; \
3704 _argvec[2+6] = (unsigned long)arg6; \
3705 _argvec[2+7] = (unsigned long)arg7; \
3706 _argvec[2+8] = (unsigned long)arg8; \
3707 _argvec[2+9] = (unsigned long)arg9; \
3708 __asm__ volatile( \
3709 VALGRIND_ALIGN_STACK \
3710 "mr 12,%1\n\t" \
3711 "std 2,-16(12)\n\t" \
3712 "ld 2,-8(12)\n\t" \
3713 "addi 1,1,-128\n\t" \
3714 \
3715 "ld 3,72(12)\n\t" \
3716 "std 3,96(1)\n\t" \
3717 \
3718 "ld 3, 8(12)\n\t" \
3719 "ld 4, 16(12)\n\t" \
3720 "ld 5, 24(12)\n\t" \
3721 "ld 6, 32(12)\n\t" \
3722 "ld 7, 40(12)\n\t" \
3723 "ld 8, 48(12)\n\t" \
3724 "ld 9, 56(12)\n\t" \
3725 "ld 10, 64(12)\n\t" \
3726 "ld 12, 0(12)\n\t" \
3727 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3728 "mr 12,%1\n\t" \
3729 "mr %0,3\n\t" \
3730 "ld 2,-16(12)\n\t" \
3731 VALGRIND_RESTORE_STACK \
3732 : "=r" (_res) \
3733 : "r" (&_argvec[2]) \
3734 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3735 ); \
3736 lval = (__typeof__(lval)) _res; \
3737 } while (0)
3738
3739 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3740 arg7,arg8,arg9,arg10) \
3741 do { \
3742 volatile OrigFn _orig = (orig); \
3743 volatile unsigned long _argvec[3+10]; \
3744 volatile unsigned long _res; \
3745 \
3746 _argvec[1] = (unsigned long)_orig.r2; \
3747 _argvec[2] = (unsigned long)_orig.nraddr; \
3748 _argvec[2+1] = (unsigned long)arg1; \
3749 _argvec[2+2] = (unsigned long)arg2; \
3750 _argvec[2+3] = (unsigned long)arg3; \
3751 _argvec[2+4] = (unsigned long)arg4; \
3752 _argvec[2+5] = (unsigned long)arg5; \
3753 _argvec[2+6] = (unsigned long)arg6; \
3754 _argvec[2+7] = (unsigned long)arg7; \
3755 _argvec[2+8] = (unsigned long)arg8; \
3756 _argvec[2+9] = (unsigned long)arg9; \
3757 _argvec[2+10] = (unsigned long)arg10; \
3758 __asm__ volatile( \
3759 VALGRIND_ALIGN_STACK \
3760 "mr 12,%1\n\t" \
3761 "std 2,-16(12)\n\t" \
3762 "ld 2,-8(12)\n\t" \
3763 "addi 1,1,-128\n\t" \
3764 \
3765 "ld 3,80(12)\n\t" \
3766 "std 3,104(1)\n\t" \
3767 \
3768 "ld 3,72(12)\n\t" \
3769 "std 3,96(1)\n\t" \
3770 \
3771 "ld 3, 8(12)\n\t" \
3772 "ld 4, 16(12)\n\t" \
3773 "ld 5, 24(12)\n\t" \
3774 "ld 6, 32(12)\n\t" \
3775 "ld 7, 40(12)\n\t" \
3776 "ld 8, 48(12)\n\t" \
3777 "ld 9, 56(12)\n\t" \
3778 "ld 10, 64(12)\n\t" \
3779 "ld 12, 0(12)\n\t" \
3780 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3781 "mr 12,%1\n\t" \
3782 "mr %0,3\n\t" \
3783 "ld 2,-16(12)\n\t" \
3784 VALGRIND_RESTORE_STACK \
3785 : "=r" (_res) \
3786 : "r" (&_argvec[2]) \
3787 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3788 ); \
3789 lval = (__typeof__(lval)) _res; \
3790 } while (0)
3791
3792 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3793 arg7,arg8,arg9,arg10,arg11) \
3794 do { \
3795 volatile OrigFn _orig = (orig); \
3796 volatile unsigned long _argvec[3+11]; \
3797 volatile unsigned long _res; \
3798 \
3799 _argvec[1] = (unsigned long)_orig.r2; \
3800 _argvec[2] = (unsigned long)_orig.nraddr; \
3801 _argvec[2+1] = (unsigned long)arg1; \
3802 _argvec[2+2] = (unsigned long)arg2; \
3803 _argvec[2+3] = (unsigned long)arg3; \
3804 _argvec[2+4] = (unsigned long)arg4; \
3805 _argvec[2+5] = (unsigned long)arg5; \
3806 _argvec[2+6] = (unsigned long)arg6; \
3807 _argvec[2+7] = (unsigned long)arg7; \
3808 _argvec[2+8] = (unsigned long)arg8; \
3809 _argvec[2+9] = (unsigned long)arg9; \
3810 _argvec[2+10] = (unsigned long)arg10; \
3811 _argvec[2+11] = (unsigned long)arg11; \
3812 __asm__ volatile( \
3813 VALGRIND_ALIGN_STACK \
3814 "mr 12,%1\n\t" \
3815 "std 2,-16(12)\n\t" \
3816 "ld 2,-8(12)\n\t" \
3817 "addi 1,1,-144\n\t" \
3818 \
3819 "ld 3,88(12)\n\t" \
3820 "std 3,112(1)\n\t" \
3821 \
3822 "ld 3,80(12)\n\t" \
3823 "std 3,104(1)\n\t" \
3824 \
3825 "ld 3,72(12)\n\t" \
3826 "std 3,96(1)\n\t" \
3827 \
3828 "ld 3, 8(12)\n\t" \
3829 "ld 4, 16(12)\n\t" \
3830 "ld 5, 24(12)\n\t" \
3831 "ld 6, 32(12)\n\t" \
3832 "ld 7, 40(12)\n\t" \
3833 "ld 8, 48(12)\n\t" \
3834 "ld 9, 56(12)\n\t" \
3835 "ld 10, 64(12)\n\t" \
3836 "ld 12, 0(12)\n\t" \
3837 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3838 "mr 12,%1\n\t" \
3839 "mr %0,3\n\t" \
3840 "ld 2,-16(12)\n\t" \
3841 VALGRIND_RESTORE_STACK \
3842 : "=r" (_res) \
3843 : "r" (&_argvec[2]) \
3844 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3845 ); \
3846 lval = (__typeof__(lval)) _res; \
3847 } while (0)
3848
3849 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3850 arg7,arg8,arg9,arg10,arg11,arg12) \
3851 do { \
3852 volatile OrigFn _orig = (orig); \
3853 volatile unsigned long _argvec[3+12]; \
3854 volatile unsigned long _res; \
3855 \
3856 _argvec[1] = (unsigned long)_orig.r2; \
3857 _argvec[2] = (unsigned long)_orig.nraddr; \
3858 _argvec[2+1] = (unsigned long)arg1; \
3859 _argvec[2+2] = (unsigned long)arg2; \
3860 _argvec[2+3] = (unsigned long)arg3; \
3861 _argvec[2+4] = (unsigned long)arg4; \
3862 _argvec[2+5] = (unsigned long)arg5; \
3863 _argvec[2+6] = (unsigned long)arg6; \
3864 _argvec[2+7] = (unsigned long)arg7; \
3865 _argvec[2+8] = (unsigned long)arg8; \
3866 _argvec[2+9] = (unsigned long)arg9; \
3867 _argvec[2+10] = (unsigned long)arg10; \
3868 _argvec[2+11] = (unsigned long)arg11; \
3869 _argvec[2+12] = (unsigned long)arg12; \
3870 __asm__ volatile( \
3871 VALGRIND_ALIGN_STACK \
3872 "mr 12,%1\n\t" \
3873 "std 2,-16(12)\n\t" \
3874 "ld 2,-8(12)\n\t" \
3875 "addi 1,1,-144\n\t" \
3876 \
3877 "ld 3,96(12)\n\t" \
3878 "std 3,120(1)\n\t" \
3879 \
3880 "ld 3,88(12)\n\t" \
3881 "std 3,112(1)\n\t" \
3882 \
3883 "ld 3,80(12)\n\t" \
3884 "std 3,104(1)\n\t" \
3885 \
3886 "ld 3,72(12)\n\t" \
3887 "std 3,96(1)\n\t" \
3888 \
3889 "ld 3, 8(12)\n\t" \
3890 "ld 4, 16(12)\n\t" \
3891 "ld 5, 24(12)\n\t" \
3892 "ld 6, 32(12)\n\t" \
3893 "ld 7, 40(12)\n\t" \
3894 "ld 8, 48(12)\n\t" \
3895 "ld 9, 56(12)\n\t" \
3896 "ld 10, 64(12)\n\t" \
3897 "ld 12, 0(12)\n\t" \
3898 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3899 "mr 12,%1\n\t" \
3900 "mr %0,3\n\t" \
3901 "ld 2,-16(12)\n\t" \
3902 VALGRIND_RESTORE_STACK \
3903 : "=r" (_res) \
3904 : "r" (&_argvec[2]) \
3905 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3906 ); \
3907 lval = (__typeof__(lval)) _res; \
3908 } while (0)
3909
3910 #endif
3911
3912
3913
3914 #if defined(PLAT_arm_linux)
3915
3916
3917 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3918
3919
3920
3921
3922
3923
3924
3925
3926
3927
3928
3929
3930
3931
3932 #define VALGRIND_ALIGN_STACK \
3933 "mov r10, sp\n\t" \
3934 "mov r4, sp\n\t" \
3935 "bic r4, r4, #7\n\t" \
3936 "mov sp, r4\n\t"
3937 #define VALGRIND_RESTORE_STACK \
3938 "mov sp, r10\n\t"
3939
3940
3941
3942
3943 #define CALL_FN_W_v(lval, orig) \
3944 do { \
3945 volatile OrigFn _orig = (orig); \
3946 volatile unsigned long _argvec[1]; \
3947 volatile unsigned long _res; \
3948 _argvec[0] = (unsigned long)_orig.nraddr; \
3949 __asm__ volatile( \
3950 VALGRIND_ALIGN_STACK \
3951 "ldr r4, [%1] \n\t" \
3952 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3953 VALGRIND_RESTORE_STACK \
3954 "mov %0, r0\n" \
3955 : "=r" (_res) \
3956 : "0" (&_argvec[0]) \
3957 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3958 ); \
3959 lval = (__typeof__(lval)) _res; \
3960 } while (0)
3961
3962 #define CALL_FN_W_W(lval, orig, arg1) \
3963 do { \
3964 volatile OrigFn _orig = (orig); \
3965 volatile unsigned long _argvec[2]; \
3966 volatile unsigned long _res; \
3967 _argvec[0] = (unsigned long)_orig.nraddr; \
3968 _argvec[1] = (unsigned long)(arg1); \
3969 __asm__ volatile( \
3970 VALGRIND_ALIGN_STACK \
3971 "ldr r0, [%1, #4] \n\t" \
3972 "ldr r4, [%1] \n\t" \
3973 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3974 VALGRIND_RESTORE_STACK \
3975 "mov %0, r0\n" \
3976 : "=r" (_res) \
3977 : "0" (&_argvec[0]) \
3978 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3979 ); \
3980 lval = (__typeof__(lval)) _res; \
3981 } while (0)
3982
3983 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3984 do { \
3985 volatile OrigFn _orig = (orig); \
3986 volatile unsigned long _argvec[3]; \
3987 volatile unsigned long _res; \
3988 _argvec[0] = (unsigned long)_orig.nraddr; \
3989 _argvec[1] = (unsigned long)(arg1); \
3990 _argvec[2] = (unsigned long)(arg2); \
3991 __asm__ volatile( \
3992 VALGRIND_ALIGN_STACK \
3993 "ldr r0, [%1, #4] \n\t" \
3994 "ldr r1, [%1, #8] \n\t" \
3995 "ldr r4, [%1] \n\t" \
3996 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3997 VALGRIND_RESTORE_STACK \
3998 "mov %0, r0\n" \
3999 : "=r" (_res) \
4000 : "0" (&_argvec[0]) \
4001 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4002 ); \
4003 lval = (__typeof__(lval)) _res; \
4004 } while (0)
4005
4006 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4007 do { \
4008 volatile OrigFn _orig = (orig); \
4009 volatile unsigned long _argvec[4]; \
4010 volatile unsigned long _res; \
4011 _argvec[0] = (unsigned long)_orig.nraddr; \
4012 _argvec[1] = (unsigned long)(arg1); \
4013 _argvec[2] = (unsigned long)(arg2); \
4014 _argvec[3] = (unsigned long)(arg3); \
4015 __asm__ volatile( \
4016 VALGRIND_ALIGN_STACK \
4017 "ldr r0, [%1, #4] \n\t" \
4018 "ldr r1, [%1, #8] \n\t" \
4019 "ldr r2, [%1, #12] \n\t" \
4020 "ldr r4, [%1] \n\t" \
4021 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4022 VALGRIND_RESTORE_STACK \
4023 "mov %0, r0\n" \
4024 : "=r" (_res) \
4025 : "0" (&_argvec[0]) \
4026 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4027 ); \
4028 lval = (__typeof__(lval)) _res; \
4029 } while (0)
4030
4031 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4032 do { \
4033 volatile OrigFn _orig = (orig); \
4034 volatile unsigned long _argvec[5]; \
4035 volatile unsigned long _res; \
4036 _argvec[0] = (unsigned long)_orig.nraddr; \
4037 _argvec[1] = (unsigned long)(arg1); \
4038 _argvec[2] = (unsigned long)(arg2); \
4039 _argvec[3] = (unsigned long)(arg3); \
4040 _argvec[4] = (unsigned long)(arg4); \
4041 __asm__ volatile( \
4042 VALGRIND_ALIGN_STACK \
4043 "ldr r0, [%1, #4] \n\t" \
4044 "ldr r1, [%1, #8] \n\t" \
4045 "ldr r2, [%1, #12] \n\t" \
4046 "ldr r3, [%1, #16] \n\t" \
4047 "ldr r4, [%1] \n\t" \
4048 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4049 VALGRIND_RESTORE_STACK \
4050 "mov %0, r0" \
4051 : "=r" (_res) \
4052 : "0" (&_argvec[0]) \
4053 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4054 ); \
4055 lval = (__typeof__(lval)) _res; \
4056 } while (0)
4057
4058 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4059 do { \
4060 volatile OrigFn _orig = (orig); \
4061 volatile unsigned long _argvec[6]; \
4062 volatile unsigned long _res; \
4063 _argvec[0] = (unsigned long)_orig.nraddr; \
4064 _argvec[1] = (unsigned long)(arg1); \
4065 _argvec[2] = (unsigned long)(arg2); \
4066 _argvec[3] = (unsigned long)(arg3); \
4067 _argvec[4] = (unsigned long)(arg4); \
4068 _argvec[5] = (unsigned long)(arg5); \
4069 __asm__ volatile( \
4070 VALGRIND_ALIGN_STACK \
4071 "sub sp, sp, #4 \n\t" \
4072 "ldr r0, [%1, #20] \n\t" \
4073 "push {r0} \n\t" \
4074 "ldr r0, [%1, #4] \n\t" \
4075 "ldr r1, [%1, #8] \n\t" \
4076 "ldr r2, [%1, #12] \n\t" \
4077 "ldr r3, [%1, #16] \n\t" \
4078 "ldr r4, [%1] \n\t" \
4079 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4080 VALGRIND_RESTORE_STACK \
4081 "mov %0, r0" \
4082 : "=r" (_res) \
4083 : "0" (&_argvec[0]) \
4084 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4085 ); \
4086 lval = (__typeof__(lval)) _res; \
4087 } while (0)
4088
4089 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4090 do { \
4091 volatile OrigFn _orig = (orig); \
4092 volatile unsigned long _argvec[7]; \
4093 volatile unsigned long _res; \
4094 _argvec[0] = (unsigned long)_orig.nraddr; \
4095 _argvec[1] = (unsigned long)(arg1); \
4096 _argvec[2] = (unsigned long)(arg2); \
4097 _argvec[3] = (unsigned long)(arg3); \
4098 _argvec[4] = (unsigned long)(arg4); \
4099 _argvec[5] = (unsigned long)(arg5); \
4100 _argvec[6] = (unsigned long)(arg6); \
4101 __asm__ volatile( \
4102 VALGRIND_ALIGN_STACK \
4103 "ldr r0, [%1, #20] \n\t" \
4104 "ldr r1, [%1, #24] \n\t" \
4105 "push {r0, r1} \n\t" \
4106 "ldr r0, [%1, #4] \n\t" \
4107 "ldr r1, [%1, #8] \n\t" \
4108 "ldr r2, [%1, #12] \n\t" \
4109 "ldr r3, [%1, #16] \n\t" \
4110 "ldr r4, [%1] \n\t" \
4111 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4112 VALGRIND_RESTORE_STACK \
4113 "mov %0, r0" \
4114 : "=r" (_res) \
4115 : "0" (&_argvec[0]) \
4116 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4117 ); \
4118 lval = (__typeof__(lval)) _res; \
4119 } while (0)
4120
4121 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4122 arg7) \
4123 do { \
4124 volatile OrigFn _orig = (orig); \
4125 volatile unsigned long _argvec[8]; \
4126 volatile unsigned long _res; \
4127 _argvec[0] = (unsigned long)_orig.nraddr; \
4128 _argvec[1] = (unsigned long)(arg1); \
4129 _argvec[2] = (unsigned long)(arg2); \
4130 _argvec[3] = (unsigned long)(arg3); \
4131 _argvec[4] = (unsigned long)(arg4); \
4132 _argvec[5] = (unsigned long)(arg5); \
4133 _argvec[6] = (unsigned long)(arg6); \
4134 _argvec[7] = (unsigned long)(arg7); \
4135 __asm__ volatile( \
4136 VALGRIND_ALIGN_STACK \
4137 "sub sp, sp, #4 \n\t" \
4138 "ldr r0, [%1, #20] \n\t" \
4139 "ldr r1, [%1, #24] \n\t" \
4140 "ldr r2, [%1, #28] \n\t" \
4141 "push {r0, r1, r2} \n\t" \
4142 "ldr r0, [%1, #4] \n\t" \
4143 "ldr r1, [%1, #8] \n\t" \
4144 "ldr r2, [%1, #12] \n\t" \
4145 "ldr r3, [%1, #16] \n\t" \
4146 "ldr r4, [%1] \n\t" \
4147 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4148 VALGRIND_RESTORE_STACK \
4149 "mov %0, r0" \
4150 : "=r" (_res) \
4151 : "0" (&_argvec[0]) \
4152 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4153 ); \
4154 lval = (__typeof__(lval)) _res; \
4155 } while (0)
4156
4157 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4158 arg7,arg8) \
4159 do { \
4160 volatile OrigFn _orig = (orig); \
4161 volatile unsigned long _argvec[9]; \
4162 volatile unsigned long _res; \
4163 _argvec[0] = (unsigned long)_orig.nraddr; \
4164 _argvec[1] = (unsigned long)(arg1); \
4165 _argvec[2] = (unsigned long)(arg2); \
4166 _argvec[3] = (unsigned long)(arg3); \
4167 _argvec[4] = (unsigned long)(arg4); \
4168 _argvec[5] = (unsigned long)(arg5); \
4169 _argvec[6] = (unsigned long)(arg6); \
4170 _argvec[7] = (unsigned long)(arg7); \
4171 _argvec[8] = (unsigned long)(arg8); \
4172 __asm__ volatile( \
4173 VALGRIND_ALIGN_STACK \
4174 "ldr r0, [%1, #20] \n\t" \
4175 "ldr r1, [%1, #24] \n\t" \
4176 "ldr r2, [%1, #28] \n\t" \
4177 "ldr r3, [%1, #32] \n\t" \
4178 "push {r0, r1, r2, r3} \n\t" \
4179 "ldr r0, [%1, #4] \n\t" \
4180 "ldr r1, [%1, #8] \n\t" \
4181 "ldr r2, [%1, #12] \n\t" \
4182 "ldr r3, [%1, #16] \n\t" \
4183 "ldr r4, [%1] \n\t" \
4184 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4185 VALGRIND_RESTORE_STACK \
4186 "mov %0, r0" \
4187 : "=r" (_res) \
4188 : "0" (&_argvec[0]) \
4189 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4190 ); \
4191 lval = (__typeof__(lval)) _res; \
4192 } while (0)
4193
4194 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4195 arg7,arg8,arg9) \
4196 do { \
4197 volatile OrigFn _orig = (orig); \
4198 volatile unsigned long _argvec[10]; \
4199 volatile unsigned long _res; \
4200 _argvec[0] = (unsigned long)_orig.nraddr; \
4201 _argvec[1] = (unsigned long)(arg1); \
4202 _argvec[2] = (unsigned long)(arg2); \
4203 _argvec[3] = (unsigned long)(arg3); \
4204 _argvec[4] = (unsigned long)(arg4); \
4205 _argvec[5] = (unsigned long)(arg5); \
4206 _argvec[6] = (unsigned long)(arg6); \
4207 _argvec[7] = (unsigned long)(arg7); \
4208 _argvec[8] = (unsigned long)(arg8); \
4209 _argvec[9] = (unsigned long)(arg9); \
4210 __asm__ volatile( \
4211 VALGRIND_ALIGN_STACK \
4212 "sub sp, sp, #4 \n\t" \
4213 "ldr r0, [%1, #20] \n\t" \
4214 "ldr r1, [%1, #24] \n\t" \
4215 "ldr r2, [%1, #28] \n\t" \
4216 "ldr r3, [%1, #32] \n\t" \
4217 "ldr r4, [%1, #36] \n\t" \
4218 "push {r0, r1, r2, r3, r4} \n\t" \
4219 "ldr r0, [%1, #4] \n\t" \
4220 "ldr r1, [%1, #8] \n\t" \
4221 "ldr r2, [%1, #12] \n\t" \
4222 "ldr r3, [%1, #16] \n\t" \
4223 "ldr r4, [%1] \n\t" \
4224 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4225 VALGRIND_RESTORE_STACK \
4226 "mov %0, r0" \
4227 : "=r" (_res) \
4228 : "0" (&_argvec[0]) \
4229 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4230 ); \
4231 lval = (__typeof__(lval)) _res; \
4232 } while (0)
4233
4234 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4235 arg7,arg8,arg9,arg10) \
4236 do { \
4237 volatile OrigFn _orig = (orig); \
4238 volatile unsigned long _argvec[11]; \
4239 volatile unsigned long _res; \
4240 _argvec[0] = (unsigned long)_orig.nraddr; \
4241 _argvec[1] = (unsigned long)(arg1); \
4242 _argvec[2] = (unsigned long)(arg2); \
4243 _argvec[3] = (unsigned long)(arg3); \
4244 _argvec[4] = (unsigned long)(arg4); \
4245 _argvec[5] = (unsigned long)(arg5); \
4246 _argvec[6] = (unsigned long)(arg6); \
4247 _argvec[7] = (unsigned long)(arg7); \
4248 _argvec[8] = (unsigned long)(arg8); \
4249 _argvec[9] = (unsigned long)(arg9); \
4250 _argvec[10] = (unsigned long)(arg10); \
4251 __asm__ volatile( \
4252 VALGRIND_ALIGN_STACK \
4253 "ldr r0, [%1, #40] \n\t" \
4254 "push {r0} \n\t" \
4255 "ldr r0, [%1, #20] \n\t" \
4256 "ldr r1, [%1, #24] \n\t" \
4257 "ldr r2, [%1, #28] \n\t" \
4258 "ldr r3, [%1, #32] \n\t" \
4259 "ldr r4, [%1, #36] \n\t" \
4260 "push {r0, r1, r2, r3, r4} \n\t" \
4261 "ldr r0, [%1, #4] \n\t" \
4262 "ldr r1, [%1, #8] \n\t" \
4263 "ldr r2, [%1, #12] \n\t" \
4264 "ldr r3, [%1, #16] \n\t" \
4265 "ldr r4, [%1] \n\t" \
4266 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4267 VALGRIND_RESTORE_STACK \
4268 "mov %0, r0" \
4269 : "=r" (_res) \
4270 : "0" (&_argvec[0]) \
4271 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4272 ); \
4273 lval = (__typeof__(lval)) _res; \
4274 } while (0)
4275
4276 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4277 arg6,arg7,arg8,arg9,arg10, \
4278 arg11) \
4279 do { \
4280 volatile OrigFn _orig = (orig); \
4281 volatile unsigned long _argvec[12]; \
4282 volatile unsigned long _res; \
4283 _argvec[0] = (unsigned long)_orig.nraddr; \
4284 _argvec[1] = (unsigned long)(arg1); \
4285 _argvec[2] = (unsigned long)(arg2); \
4286 _argvec[3] = (unsigned long)(arg3); \
4287 _argvec[4] = (unsigned long)(arg4); \
4288 _argvec[5] = (unsigned long)(arg5); \
4289 _argvec[6] = (unsigned long)(arg6); \
4290 _argvec[7] = (unsigned long)(arg7); \
4291 _argvec[8] = (unsigned long)(arg8); \
4292 _argvec[9] = (unsigned long)(arg9); \
4293 _argvec[10] = (unsigned long)(arg10); \
4294 _argvec[11] = (unsigned long)(arg11); \
4295 __asm__ volatile( \
4296 VALGRIND_ALIGN_STACK \
4297 "sub sp, sp, #4 \n\t" \
4298 "ldr r0, [%1, #40] \n\t" \
4299 "ldr r1, [%1, #44] \n\t" \
4300 "push {r0, r1} \n\t" \
4301 "ldr r0, [%1, #20] \n\t" \
4302 "ldr r1, [%1, #24] \n\t" \
4303 "ldr r2, [%1, #28] \n\t" \
4304 "ldr r3, [%1, #32] \n\t" \
4305 "ldr r4, [%1, #36] \n\t" \
4306 "push {r0, r1, r2, r3, r4} \n\t" \
4307 "ldr r0, [%1, #4] \n\t" \
4308 "ldr r1, [%1, #8] \n\t" \
4309 "ldr r2, [%1, #12] \n\t" \
4310 "ldr r3, [%1, #16] \n\t" \
4311 "ldr r4, [%1] \n\t" \
4312 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4313 VALGRIND_RESTORE_STACK \
4314 "mov %0, r0" \
4315 : "=r" (_res) \
4316 : "0" (&_argvec[0]) \
4317 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4318 ); \
4319 lval = (__typeof__(lval)) _res; \
4320 } while (0)
4321
4322 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4323 arg6,arg7,arg8,arg9,arg10, \
4324 arg11,arg12) \
4325 do { \
4326 volatile OrigFn _orig = (orig); \
4327 volatile unsigned long _argvec[13]; \
4328 volatile unsigned long _res; \
4329 _argvec[0] = (unsigned long)_orig.nraddr; \
4330 _argvec[1] = (unsigned long)(arg1); \
4331 _argvec[2] = (unsigned long)(arg2); \
4332 _argvec[3] = (unsigned long)(arg3); \
4333 _argvec[4] = (unsigned long)(arg4); \
4334 _argvec[5] = (unsigned long)(arg5); \
4335 _argvec[6] = (unsigned long)(arg6); \
4336 _argvec[7] = (unsigned long)(arg7); \
4337 _argvec[8] = (unsigned long)(arg8); \
4338 _argvec[9] = (unsigned long)(arg9); \
4339 _argvec[10] = (unsigned long)(arg10); \
4340 _argvec[11] = (unsigned long)(arg11); \
4341 _argvec[12] = (unsigned long)(arg12); \
4342 __asm__ volatile( \
4343 VALGRIND_ALIGN_STACK \
4344 "ldr r0, [%1, #40] \n\t" \
4345 "ldr r1, [%1, #44] \n\t" \
4346 "ldr r2, [%1, #48] \n\t" \
4347 "push {r0, r1, r2} \n\t" \
4348 "ldr r0, [%1, #20] \n\t" \
4349 "ldr r1, [%1, #24] \n\t" \
4350 "ldr r2, [%1, #28] \n\t" \
4351 "ldr r3, [%1, #32] \n\t" \
4352 "ldr r4, [%1, #36] \n\t" \
4353 "push {r0, r1, r2, r3, r4} \n\t" \
4354 "ldr r0, [%1, #4] \n\t" \
4355 "ldr r1, [%1, #8] \n\t" \
4356 "ldr r2, [%1, #12] \n\t" \
4357 "ldr r3, [%1, #16] \n\t" \
4358 "ldr r4, [%1] \n\t" \
4359 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4360 VALGRIND_RESTORE_STACK \
4361 "mov %0, r0" \
4362 : "=r" (_res) \
4363 : "0" (&_argvec[0]) \
4364 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4365 ); \
4366 lval = (__typeof__(lval)) _res; \
4367 } while (0)
4368
4369 #endif
4370
4371
4372
4373 #if defined(PLAT_arm64_linux) || defined(PLAT_arm64_freebsd)
4374
4375
4376 #define __CALLER_SAVED_REGS \
4377 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4378 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4379 "x18", "x19", "x20", "x30", \
4380 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4381 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4382 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4383 "v26", "v27", "v28", "v29", "v30", "v31"
4384
4385
4386
4387 #define VALGRIND_ALIGN_STACK \
4388 "mov x21, sp\n\t" \
4389 "bic sp, x21, #15\n\t"
4390 #define VALGRIND_RESTORE_STACK \
4391 "mov sp, x21\n\t"
4392
4393
4394
4395
4396 #define CALL_FN_W_v(lval, orig) \
4397 do { \
4398 volatile OrigFn _orig = (orig); \
4399 volatile unsigned long _argvec[1]; \
4400 volatile unsigned long _res; \
4401 _argvec[0] = (unsigned long)_orig.nraddr; \
4402 __asm__ volatile( \
4403 VALGRIND_ALIGN_STACK \
4404 "ldr x8, [%1] \n\t" \
4405 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4406 VALGRIND_RESTORE_STACK \
4407 "mov %0, x0\n" \
4408 : "=r" (_res) \
4409 : "0" (&_argvec[0]) \
4410 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4411 ); \
4412 lval = (__typeof__(lval)) _res; \
4413 } while (0)
4414
4415 #define CALL_FN_W_W(lval, orig, arg1) \
4416 do { \
4417 volatile OrigFn _orig = (orig); \
4418 volatile unsigned long _argvec[2]; \
4419 volatile unsigned long _res; \
4420 _argvec[0] = (unsigned long)_orig.nraddr; \
4421 _argvec[1] = (unsigned long)(arg1); \
4422 __asm__ volatile( \
4423 VALGRIND_ALIGN_STACK \
4424 "ldr x0, [%1, #8] \n\t" \
4425 "ldr x8, [%1] \n\t" \
4426 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4427 VALGRIND_RESTORE_STACK \
4428 "mov %0, x0\n" \
4429 : "=r" (_res) \
4430 : "0" (&_argvec[0]) \
4431 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4432 ); \
4433 lval = (__typeof__(lval)) _res; \
4434 } while (0)
4435
4436 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4437 do { \
4438 volatile OrigFn _orig = (orig); \
4439 volatile unsigned long _argvec[3]; \
4440 volatile unsigned long _res; \
4441 _argvec[0] = (unsigned long)_orig.nraddr; \
4442 _argvec[1] = (unsigned long)(arg1); \
4443 _argvec[2] = (unsigned long)(arg2); \
4444 __asm__ volatile( \
4445 VALGRIND_ALIGN_STACK \
4446 "ldr x0, [%1, #8] \n\t" \
4447 "ldr x1, [%1, #16] \n\t" \
4448 "ldr x8, [%1] \n\t" \
4449 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4450 VALGRIND_RESTORE_STACK \
4451 "mov %0, x0\n" \
4452 : "=r" (_res) \
4453 : "0" (&_argvec[0]) \
4454 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4455 ); \
4456 lval = (__typeof__(lval)) _res; \
4457 } while (0)
4458
4459 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4460 do { \
4461 volatile OrigFn _orig = (orig); \
4462 volatile unsigned long _argvec[4]; \
4463 volatile unsigned long _res; \
4464 _argvec[0] = (unsigned long)_orig.nraddr; \
4465 _argvec[1] = (unsigned long)(arg1); \
4466 _argvec[2] = (unsigned long)(arg2); \
4467 _argvec[3] = (unsigned long)(arg3); \
4468 __asm__ volatile( \
4469 VALGRIND_ALIGN_STACK \
4470 "ldr x0, [%1, #8] \n\t" \
4471 "ldr x1, [%1, #16] \n\t" \
4472 "ldr x2, [%1, #24] \n\t" \
4473 "ldr x8, [%1] \n\t" \
4474 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4475 VALGRIND_RESTORE_STACK \
4476 "mov %0, x0\n" \
4477 : "=r" (_res) \
4478 : "0" (&_argvec[0]) \
4479 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4480 ); \
4481 lval = (__typeof__(lval)) _res; \
4482 } while (0)
4483
4484 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4485 do { \
4486 volatile OrigFn _orig = (orig); \
4487 volatile unsigned long _argvec[5]; \
4488 volatile unsigned long _res; \
4489 _argvec[0] = (unsigned long)_orig.nraddr; \
4490 _argvec[1] = (unsigned long)(arg1); \
4491 _argvec[2] = (unsigned long)(arg2); \
4492 _argvec[3] = (unsigned long)(arg3); \
4493 _argvec[4] = (unsigned long)(arg4); \
4494 __asm__ volatile( \
4495 VALGRIND_ALIGN_STACK \
4496 "ldr x0, [%1, #8] \n\t" \
4497 "ldr x1, [%1, #16] \n\t" \
4498 "ldr x2, [%1, #24] \n\t" \
4499 "ldr x3, [%1, #32] \n\t" \
4500 "ldr x8, [%1] \n\t" \
4501 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4502 VALGRIND_RESTORE_STACK \
4503 "mov %0, x0" \
4504 : "=r" (_res) \
4505 : "0" (&_argvec[0]) \
4506 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4507 ); \
4508 lval = (__typeof__(lval)) _res; \
4509 } while (0)
4510
4511 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4512 do { \
4513 volatile OrigFn _orig = (orig); \
4514 volatile unsigned long _argvec[6]; \
4515 volatile unsigned long _res; \
4516 _argvec[0] = (unsigned long)_orig.nraddr; \
4517 _argvec[1] = (unsigned long)(arg1); \
4518 _argvec[2] = (unsigned long)(arg2); \
4519 _argvec[3] = (unsigned long)(arg3); \
4520 _argvec[4] = (unsigned long)(arg4); \
4521 _argvec[5] = (unsigned long)(arg5); \
4522 __asm__ volatile( \
4523 VALGRIND_ALIGN_STACK \
4524 "ldr x0, [%1, #8] \n\t" \
4525 "ldr x1, [%1, #16] \n\t" \
4526 "ldr x2, [%1, #24] \n\t" \
4527 "ldr x3, [%1, #32] \n\t" \
4528 "ldr x4, [%1, #40] \n\t" \
4529 "ldr x8, [%1] \n\t" \
4530 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4531 VALGRIND_RESTORE_STACK \
4532 "mov %0, x0" \
4533 : "=r" (_res) \
4534 : "0" (&_argvec[0]) \
4535 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4536 ); \
4537 lval = (__typeof__(lval)) _res; \
4538 } while (0)
4539
4540 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4541 do { \
4542 volatile OrigFn _orig = (orig); \
4543 volatile unsigned long _argvec[7]; \
4544 volatile unsigned long _res; \
4545 _argvec[0] = (unsigned long)_orig.nraddr; \
4546 _argvec[1] = (unsigned long)(arg1); \
4547 _argvec[2] = (unsigned long)(arg2); \
4548 _argvec[3] = (unsigned long)(arg3); \
4549 _argvec[4] = (unsigned long)(arg4); \
4550 _argvec[5] = (unsigned long)(arg5); \
4551 _argvec[6] = (unsigned long)(arg6); \
4552 __asm__ volatile( \
4553 VALGRIND_ALIGN_STACK \
4554 "ldr x0, [%1, #8] \n\t" \
4555 "ldr x1, [%1, #16] \n\t" \
4556 "ldr x2, [%1, #24] \n\t" \
4557 "ldr x3, [%1, #32] \n\t" \
4558 "ldr x4, [%1, #40] \n\t" \
4559 "ldr x5, [%1, #48] \n\t" \
4560 "ldr x8, [%1] \n\t" \
4561 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4562 VALGRIND_RESTORE_STACK \
4563 "mov %0, x0" \
4564 : "=r" (_res) \
4565 : "0" (&_argvec[0]) \
4566 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4567 ); \
4568 lval = (__typeof__(lval)) _res; \
4569 } while (0)
4570
4571 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4572 arg7) \
4573 do { \
4574 volatile OrigFn _orig = (orig); \
4575 volatile unsigned long _argvec[8]; \
4576 volatile unsigned long _res; \
4577 _argvec[0] = (unsigned long)_orig.nraddr; \
4578 _argvec[1] = (unsigned long)(arg1); \
4579 _argvec[2] = (unsigned long)(arg2); \
4580 _argvec[3] = (unsigned long)(arg3); \
4581 _argvec[4] = (unsigned long)(arg4); \
4582 _argvec[5] = (unsigned long)(arg5); \
4583 _argvec[6] = (unsigned long)(arg6); \
4584 _argvec[7] = (unsigned long)(arg7); \
4585 __asm__ volatile( \
4586 VALGRIND_ALIGN_STACK \
4587 "ldr x0, [%1, #8] \n\t" \
4588 "ldr x1, [%1, #16] \n\t" \
4589 "ldr x2, [%1, #24] \n\t" \
4590 "ldr x3, [%1, #32] \n\t" \
4591 "ldr x4, [%1, #40] \n\t" \
4592 "ldr x5, [%1, #48] \n\t" \
4593 "ldr x6, [%1, #56] \n\t" \
4594 "ldr x8, [%1] \n\t" \
4595 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4596 VALGRIND_RESTORE_STACK \
4597 "mov %0, x0" \
4598 : "=r" (_res) \
4599 : "0" (&_argvec[0]) \
4600 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4601 ); \
4602 lval = (__typeof__(lval)) _res; \
4603 } while (0)
4604
4605 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4606 arg7,arg8) \
4607 do { \
4608 volatile OrigFn _orig = (orig); \
4609 volatile unsigned long _argvec[9]; \
4610 volatile unsigned long _res; \
4611 _argvec[0] = (unsigned long)_orig.nraddr; \
4612 _argvec[1] = (unsigned long)(arg1); \
4613 _argvec[2] = (unsigned long)(arg2); \
4614 _argvec[3] = (unsigned long)(arg3); \
4615 _argvec[4] = (unsigned long)(arg4); \
4616 _argvec[5] = (unsigned long)(arg5); \
4617 _argvec[6] = (unsigned long)(arg6); \
4618 _argvec[7] = (unsigned long)(arg7); \
4619 _argvec[8] = (unsigned long)(arg8); \
4620 __asm__ volatile( \
4621 VALGRIND_ALIGN_STACK \
4622 "ldr x0, [%1, #8] \n\t" \
4623 "ldr x1, [%1, #16] \n\t" \
4624 "ldr x2, [%1, #24] \n\t" \
4625 "ldr x3, [%1, #32] \n\t" \
4626 "ldr x4, [%1, #40] \n\t" \
4627 "ldr x5, [%1, #48] \n\t" \
4628 "ldr x6, [%1, #56] \n\t" \
4629 "ldr x7, [%1, #64] \n\t" \
4630 "ldr x8, [%1] \n\t" \
4631 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4632 VALGRIND_RESTORE_STACK \
4633 "mov %0, x0" \
4634 : "=r" (_res) \
4635 : "0" (&_argvec[0]) \
4636 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4637 ); \
4638 lval = (__typeof__(lval)) _res; \
4639 } while (0)
4640
4641 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4642 arg7,arg8,arg9) \
4643 do { \
4644 volatile OrigFn _orig = (orig); \
4645 volatile unsigned long _argvec[10]; \
4646 volatile unsigned long _res; \
4647 _argvec[0] = (unsigned long)_orig.nraddr; \
4648 _argvec[1] = (unsigned long)(arg1); \
4649 _argvec[2] = (unsigned long)(arg2); \
4650 _argvec[3] = (unsigned long)(arg3); \
4651 _argvec[4] = (unsigned long)(arg4); \
4652 _argvec[5] = (unsigned long)(arg5); \
4653 _argvec[6] = (unsigned long)(arg6); \
4654 _argvec[7] = (unsigned long)(arg7); \
4655 _argvec[8] = (unsigned long)(arg8); \
4656 _argvec[9] = (unsigned long)(arg9); \
4657 __asm__ volatile( \
4658 VALGRIND_ALIGN_STACK \
4659 "sub sp, sp, #0x20 \n\t" \
4660 "ldr x0, [%1, #8] \n\t" \
4661 "ldr x1, [%1, #16] \n\t" \
4662 "ldr x2, [%1, #24] \n\t" \
4663 "ldr x3, [%1, #32] \n\t" \
4664 "ldr x4, [%1, #40] \n\t" \
4665 "ldr x5, [%1, #48] \n\t" \
4666 "ldr x6, [%1, #56] \n\t" \
4667 "ldr x7, [%1, #64] \n\t" \
4668 "ldr x8, [%1, #72] \n\t" \
4669 "str x8, [sp, #0] \n\t" \
4670 "ldr x8, [%1] \n\t" \
4671 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4672 VALGRIND_RESTORE_STACK \
4673 "mov %0, x0" \
4674 : "=r" (_res) \
4675 : "0" (&_argvec[0]) \
4676 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4677 ); \
4678 lval = (__typeof__(lval)) _res; \
4679 } while (0)
4680
4681 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4682 arg7,arg8,arg9,arg10) \
4683 do { \
4684 volatile OrigFn _orig = (orig); \
4685 volatile unsigned long _argvec[11]; \
4686 volatile unsigned long _res; \
4687 _argvec[0] = (unsigned long)_orig.nraddr; \
4688 _argvec[1] = (unsigned long)(arg1); \
4689 _argvec[2] = (unsigned long)(arg2); \
4690 _argvec[3] = (unsigned long)(arg3); \
4691 _argvec[4] = (unsigned long)(arg4); \
4692 _argvec[5] = (unsigned long)(arg5); \
4693 _argvec[6] = (unsigned long)(arg6); \
4694 _argvec[7] = (unsigned long)(arg7); \
4695 _argvec[8] = (unsigned long)(arg8); \
4696 _argvec[9] = (unsigned long)(arg9); \
4697 _argvec[10] = (unsigned long)(arg10); \
4698 __asm__ volatile( \
4699 VALGRIND_ALIGN_STACK \
4700 "sub sp, sp, #0x20 \n\t" \
4701 "ldr x0, [%1, #8] \n\t" \
4702 "ldr x1, [%1, #16] \n\t" \
4703 "ldr x2, [%1, #24] \n\t" \
4704 "ldr x3, [%1, #32] \n\t" \
4705 "ldr x4, [%1, #40] \n\t" \
4706 "ldr x5, [%1, #48] \n\t" \
4707 "ldr x6, [%1, #56] \n\t" \
4708 "ldr x7, [%1, #64] \n\t" \
4709 "ldr x8, [%1, #72] \n\t" \
4710 "str x8, [sp, #0] \n\t" \
4711 "ldr x8, [%1, #80] \n\t" \
4712 "str x8, [sp, #8] \n\t" \
4713 "ldr x8, [%1] \n\t" \
4714 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4715 VALGRIND_RESTORE_STACK \
4716 "mov %0, x0" \
4717 : "=r" (_res) \
4718 : "0" (&_argvec[0]) \
4719 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4720 ); \
4721 lval = (__typeof__(lval)) _res; \
4722 } while (0)
4723
4724 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4725 arg7,arg8,arg9,arg10,arg11) \
4726 do { \
4727 volatile OrigFn _orig = (orig); \
4728 volatile unsigned long _argvec[12]; \
4729 volatile unsigned long _res; \
4730 _argvec[0] = (unsigned long)_orig.nraddr; \
4731 _argvec[1] = (unsigned long)(arg1); \
4732 _argvec[2] = (unsigned long)(arg2); \
4733 _argvec[3] = (unsigned long)(arg3); \
4734 _argvec[4] = (unsigned long)(arg4); \
4735 _argvec[5] = (unsigned long)(arg5); \
4736 _argvec[6] = (unsigned long)(arg6); \
4737 _argvec[7] = (unsigned long)(arg7); \
4738 _argvec[8] = (unsigned long)(arg8); \
4739 _argvec[9] = (unsigned long)(arg9); \
4740 _argvec[10] = (unsigned long)(arg10); \
4741 _argvec[11] = (unsigned long)(arg11); \
4742 __asm__ volatile( \
4743 VALGRIND_ALIGN_STACK \
4744 "sub sp, sp, #0x30 \n\t" \
4745 "ldr x0, [%1, #8] \n\t" \
4746 "ldr x1, [%1, #16] \n\t" \
4747 "ldr x2, [%1, #24] \n\t" \
4748 "ldr x3, [%1, #32] \n\t" \
4749 "ldr x4, [%1, #40] \n\t" \
4750 "ldr x5, [%1, #48] \n\t" \
4751 "ldr x6, [%1, #56] \n\t" \
4752 "ldr x7, [%1, #64] \n\t" \
4753 "ldr x8, [%1, #72] \n\t" \
4754 "str x8, [sp, #0] \n\t" \
4755 "ldr x8, [%1, #80] \n\t" \
4756 "str x8, [sp, #8] \n\t" \
4757 "ldr x8, [%1, #88] \n\t" \
4758 "str x8, [sp, #16] \n\t" \
4759 "ldr x8, [%1] \n\t" \
4760 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4761 VALGRIND_RESTORE_STACK \
4762 "mov %0, x0" \
4763 : "=r" (_res) \
4764 : "0" (&_argvec[0]) \
4765 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4766 ); \
4767 lval = (__typeof__(lval)) _res; \
4768 } while (0)
4769
4770 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4771 arg7,arg8,arg9,arg10,arg11, \
4772 arg12) \
4773 do { \
4774 volatile OrigFn _orig = (orig); \
4775 volatile unsigned long _argvec[13]; \
4776 volatile unsigned long _res; \
4777 _argvec[0] = (unsigned long)_orig.nraddr; \
4778 _argvec[1] = (unsigned long)(arg1); \
4779 _argvec[2] = (unsigned long)(arg2); \
4780 _argvec[3] = (unsigned long)(arg3); \
4781 _argvec[4] = (unsigned long)(arg4); \
4782 _argvec[5] = (unsigned long)(arg5); \
4783 _argvec[6] = (unsigned long)(arg6); \
4784 _argvec[7] = (unsigned long)(arg7); \
4785 _argvec[8] = (unsigned long)(arg8); \
4786 _argvec[9] = (unsigned long)(arg9); \
4787 _argvec[10] = (unsigned long)(arg10); \
4788 _argvec[11] = (unsigned long)(arg11); \
4789 _argvec[12] = (unsigned long)(arg12); \
4790 __asm__ volatile( \
4791 VALGRIND_ALIGN_STACK \
4792 "sub sp, sp, #0x30 \n\t" \
4793 "ldr x0, [%1, #8] \n\t" \
4794 "ldr x1, [%1, #16] \n\t" \
4795 "ldr x2, [%1, #24] \n\t" \
4796 "ldr x3, [%1, #32] \n\t" \
4797 "ldr x4, [%1, #40] \n\t" \
4798 "ldr x5, [%1, #48] \n\t" \
4799 "ldr x6, [%1, #56] \n\t" \
4800 "ldr x7, [%1, #64] \n\t" \
4801 "ldr x8, [%1, #72] \n\t" \
4802 "str x8, [sp, #0] \n\t" \
4803 "ldr x8, [%1, #80] \n\t" \
4804 "str x8, [sp, #8] \n\t" \
4805 "ldr x8, [%1, #88] \n\t" \
4806 "str x8, [sp, #16] \n\t" \
4807 "ldr x8, [%1, #96] \n\t" \
4808 "str x8, [sp, #24] \n\t" \
4809 "ldr x8, [%1] \n\t" \
4810 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4811 VALGRIND_RESTORE_STACK \
4812 "mov %0, x0" \
4813 : "=r" (_res) \
4814 : "0" (&_argvec[0]) \
4815 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4816 ); \
4817 lval = (__typeof__(lval)) _res; \
4818 } while (0)
4819
4820 #endif
4821
4822
4823
4824 #if defined(PLAT_s390x_linux)
4825
4826
4827
4828
4829
4830 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4831 # define __FRAME_POINTER \
4832 ,"d"(__builtin_dwarf_cfa())
4833 # define VALGRIND_CFI_PROLOGUE \
4834 ".cfi_remember_state\n\t" \
4835 "lgr 1,%1\n\t" \
4836 "lgr 7,11\n\t" \
4837 "lgr 11,%2\n\t" \
4838 ".cfi_def_cfa 11, 0\n\t"
4839 # define VALGRIND_CFI_EPILOGUE \
4840 "lgr 11, 7\n\t" \
4841 ".cfi_restore_state\n\t"
4842 #else
4843 # define __FRAME_POINTER
4844 # define VALGRIND_CFI_PROLOGUE \
4845 "lgr 1,%1\n\t"
4846 # define VALGRIND_CFI_EPILOGUE
4847 #endif
4848
4849
4850
4851
4852
4853
4854
4855
4856
4857
4858 #if defined(__VX__) || defined(__S390_VX__)
4859 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4860 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4861 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4862 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4863 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4864 #else
4865 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4866 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4867 #endif
4868
4869
4870
4871
4872
4873
4874
4875
4876
4877
4878 #define CALL_FN_W_v(lval, orig) \
4879 do { \
4880 volatile OrigFn _orig = (orig); \
4881 volatile unsigned long _argvec[1]; \
4882 volatile unsigned long _res; \
4883 _argvec[0] = (unsigned long)_orig.nraddr; \
4884 __asm__ volatile( \
4885 VALGRIND_CFI_PROLOGUE \
4886 "aghi 15,-160\n\t" \
4887 "lg 1, 0(1)\n\t" \
4888 VALGRIND_CALL_NOREDIR_R1 \
4889 "aghi 15,160\n\t" \
4890 VALGRIND_CFI_EPILOGUE \
4891 "lgr %0, 2\n\t" \
4892 : "=d" (_res) \
4893 : "d" (&_argvec[0]) __FRAME_POINTER \
4894 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4895 ); \
4896 lval = (__typeof__(lval)) _res; \
4897 } while (0)
4898
4899
4900 #define CALL_FN_W_W(lval, orig, arg1) \
4901 do { \
4902 volatile OrigFn _orig = (orig); \
4903 volatile unsigned long _argvec[2]; \
4904 volatile unsigned long _res; \
4905 _argvec[0] = (unsigned long)_orig.nraddr; \
4906 _argvec[1] = (unsigned long)arg1; \
4907 __asm__ volatile( \
4908 VALGRIND_CFI_PROLOGUE \
4909 "aghi 15,-160\n\t" \
4910 "lg 2, 8(1)\n\t" \
4911 "lg 1, 0(1)\n\t" \
4912 VALGRIND_CALL_NOREDIR_R1 \
4913 "aghi 15,160\n\t" \
4914 VALGRIND_CFI_EPILOGUE \
4915 "lgr %0, 2\n\t" \
4916 : "=d" (_res) \
4917 : "a" (&_argvec[0]) __FRAME_POINTER \
4918 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4919 ); \
4920 lval = (__typeof__(lval)) _res; \
4921 } while (0)
4922
4923 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4924 do { \
4925 volatile OrigFn _orig = (orig); \
4926 volatile unsigned long _argvec[3]; \
4927 volatile unsigned long _res; \
4928 _argvec[0] = (unsigned long)_orig.nraddr; \
4929 _argvec[1] = (unsigned long)arg1; \
4930 _argvec[2] = (unsigned long)arg2; \
4931 __asm__ volatile( \
4932 VALGRIND_CFI_PROLOGUE \
4933 "aghi 15,-160\n\t" \
4934 "lg 2, 8(1)\n\t" \
4935 "lg 3,16(1)\n\t" \
4936 "lg 1, 0(1)\n\t" \
4937 VALGRIND_CALL_NOREDIR_R1 \
4938 "aghi 15,160\n\t" \
4939 VALGRIND_CFI_EPILOGUE \
4940 "lgr %0, 2\n\t" \
4941 : "=d" (_res) \
4942 : "a" (&_argvec[0]) __FRAME_POINTER \
4943 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4944 ); \
4945 lval = (__typeof__(lval)) _res; \
4946 } while (0)
4947
4948 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4949 do { \
4950 volatile OrigFn _orig = (orig); \
4951 volatile unsigned long _argvec[4]; \
4952 volatile unsigned long _res; \
4953 _argvec[0] = (unsigned long)_orig.nraddr; \
4954 _argvec[1] = (unsigned long)arg1; \
4955 _argvec[2] = (unsigned long)arg2; \
4956 _argvec[3] = (unsigned long)arg3; \
4957 __asm__ volatile( \
4958 VALGRIND_CFI_PROLOGUE \
4959 "aghi 15,-160\n\t" \
4960 "lg 2, 8(1)\n\t" \
4961 "lg 3,16(1)\n\t" \
4962 "lg 4,24(1)\n\t" \
4963 "lg 1, 0(1)\n\t" \
4964 VALGRIND_CALL_NOREDIR_R1 \
4965 "aghi 15,160\n\t" \
4966 VALGRIND_CFI_EPILOGUE \
4967 "lgr %0, 2\n\t" \
4968 : "=d" (_res) \
4969 : "a" (&_argvec[0]) __FRAME_POINTER \
4970 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4971 ); \
4972 lval = (__typeof__(lval)) _res; \
4973 } while (0)
4974
4975 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4976 do { \
4977 volatile OrigFn _orig = (orig); \
4978 volatile unsigned long _argvec[5]; \
4979 volatile unsigned long _res; \
4980 _argvec[0] = (unsigned long)_orig.nraddr; \
4981 _argvec[1] = (unsigned long)arg1; \
4982 _argvec[2] = (unsigned long)arg2; \
4983 _argvec[3] = (unsigned long)arg3; \
4984 _argvec[4] = (unsigned long)arg4; \
4985 __asm__ volatile( \
4986 VALGRIND_CFI_PROLOGUE \
4987 "aghi 15,-160\n\t" \
4988 "lg 2, 8(1)\n\t" \
4989 "lg 3,16(1)\n\t" \
4990 "lg 4,24(1)\n\t" \
4991 "lg 5,32(1)\n\t" \
4992 "lg 1, 0(1)\n\t" \
4993 VALGRIND_CALL_NOREDIR_R1 \
4994 "aghi 15,160\n\t" \
4995 VALGRIND_CFI_EPILOGUE \
4996 "lgr %0, 2\n\t" \
4997 : "=d" (_res) \
4998 : "a" (&_argvec[0]) __FRAME_POINTER \
4999 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
5000 ); \
5001 lval = (__typeof__(lval)) _res; \
5002 } while (0)
5003
5004 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
5005 do { \
5006 volatile OrigFn _orig = (orig); \
5007 volatile unsigned long _argvec[6]; \
5008 volatile unsigned long _res; \
5009 _argvec[0] = (unsigned long)_orig.nraddr; \
5010 _argvec[1] = (unsigned long)arg1; \
5011 _argvec[2] = (unsigned long)arg2; \
5012 _argvec[3] = (unsigned long)arg3; \
5013 _argvec[4] = (unsigned long)arg4; \
5014 _argvec[5] = (unsigned long)arg5; \
5015 __asm__ volatile( \
5016 VALGRIND_CFI_PROLOGUE \
5017 "aghi 15,-160\n\t" \
5018 "lg 2, 8(1)\n\t" \
5019 "lg 3,16(1)\n\t" \
5020 "lg 4,24(1)\n\t" \
5021 "lg 5,32(1)\n\t" \
5022 "lg 6,40(1)\n\t" \
5023 "lg 1, 0(1)\n\t" \
5024 VALGRIND_CALL_NOREDIR_R1 \
5025 "aghi 15,160\n\t" \
5026 VALGRIND_CFI_EPILOGUE \
5027 "lgr %0, 2\n\t" \
5028 : "=d" (_res) \
5029 : "a" (&_argvec[0]) __FRAME_POINTER \
5030 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5031 ); \
5032 lval = (__typeof__(lval)) _res; \
5033 } while (0)
5034
5035 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5036 arg6) \
5037 do { \
5038 volatile OrigFn _orig = (orig); \
5039 volatile unsigned long _argvec[7]; \
5040 volatile unsigned long _res; \
5041 _argvec[0] = (unsigned long)_orig.nraddr; \
5042 _argvec[1] = (unsigned long)arg1; \
5043 _argvec[2] = (unsigned long)arg2; \
5044 _argvec[3] = (unsigned long)arg3; \
5045 _argvec[4] = (unsigned long)arg4; \
5046 _argvec[5] = (unsigned long)arg5; \
5047 _argvec[6] = (unsigned long)arg6; \
5048 __asm__ volatile( \
5049 VALGRIND_CFI_PROLOGUE \
5050 "aghi 15,-168\n\t" \
5051 "lg 2, 8(1)\n\t" \
5052 "lg 3,16(1)\n\t" \
5053 "lg 4,24(1)\n\t" \
5054 "lg 5,32(1)\n\t" \
5055 "lg 6,40(1)\n\t" \
5056 "mvc 160(8,15), 48(1)\n\t" \
5057 "lg 1, 0(1)\n\t" \
5058 VALGRIND_CALL_NOREDIR_R1 \
5059 "aghi 15,168\n\t" \
5060 VALGRIND_CFI_EPILOGUE \
5061 "lgr %0, 2\n\t" \
5062 : "=d" (_res) \
5063 : "a" (&_argvec[0]) __FRAME_POINTER \
5064 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5065 ); \
5066 lval = (__typeof__(lval)) _res; \
5067 } while (0)
5068
5069 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5070 arg6, arg7) \
5071 do { \
5072 volatile OrigFn _orig = (orig); \
5073 volatile unsigned long _argvec[8]; \
5074 volatile unsigned long _res; \
5075 _argvec[0] = (unsigned long)_orig.nraddr; \
5076 _argvec[1] = (unsigned long)arg1; \
5077 _argvec[2] = (unsigned long)arg2; \
5078 _argvec[3] = (unsigned long)arg3; \
5079 _argvec[4] = (unsigned long)arg4; \
5080 _argvec[5] = (unsigned long)arg5; \
5081 _argvec[6] = (unsigned long)arg6; \
5082 _argvec[7] = (unsigned long)arg7; \
5083 __asm__ volatile( \
5084 VALGRIND_CFI_PROLOGUE \
5085 "aghi 15,-176\n\t" \
5086 "lg 2, 8(1)\n\t" \
5087 "lg 3,16(1)\n\t" \
5088 "lg 4,24(1)\n\t" \
5089 "lg 5,32(1)\n\t" \
5090 "lg 6,40(1)\n\t" \
5091 "mvc 160(8,15), 48(1)\n\t" \
5092 "mvc 168(8,15), 56(1)\n\t" \
5093 "lg 1, 0(1)\n\t" \
5094 VALGRIND_CALL_NOREDIR_R1 \
5095 "aghi 15,176\n\t" \
5096 VALGRIND_CFI_EPILOGUE \
5097 "lgr %0, 2\n\t" \
5098 : "=d" (_res) \
5099 : "a" (&_argvec[0]) __FRAME_POINTER \
5100 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5101 ); \
5102 lval = (__typeof__(lval)) _res; \
5103 } while (0)
5104
5105 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5106 arg6, arg7 ,arg8) \
5107 do { \
5108 volatile OrigFn _orig = (orig); \
5109 volatile unsigned long _argvec[9]; \
5110 volatile unsigned long _res; \
5111 _argvec[0] = (unsigned long)_orig.nraddr; \
5112 _argvec[1] = (unsigned long)arg1; \
5113 _argvec[2] = (unsigned long)arg2; \
5114 _argvec[3] = (unsigned long)arg3; \
5115 _argvec[4] = (unsigned long)arg4; \
5116 _argvec[5] = (unsigned long)arg5; \
5117 _argvec[6] = (unsigned long)arg6; \
5118 _argvec[7] = (unsigned long)arg7; \
5119 _argvec[8] = (unsigned long)arg8; \
5120 __asm__ volatile( \
5121 VALGRIND_CFI_PROLOGUE \
5122 "aghi 15,-184\n\t" \
5123 "lg 2, 8(1)\n\t" \
5124 "lg 3,16(1)\n\t" \
5125 "lg 4,24(1)\n\t" \
5126 "lg 5,32(1)\n\t" \
5127 "lg 6,40(1)\n\t" \
5128 "mvc 160(8,15), 48(1)\n\t" \
5129 "mvc 168(8,15), 56(1)\n\t" \
5130 "mvc 176(8,15), 64(1)\n\t" \
5131 "lg 1, 0(1)\n\t" \
5132 VALGRIND_CALL_NOREDIR_R1 \
5133 "aghi 15,184\n\t" \
5134 VALGRIND_CFI_EPILOGUE \
5135 "lgr %0, 2\n\t" \
5136 : "=d" (_res) \
5137 : "a" (&_argvec[0]) __FRAME_POINTER \
5138 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5139 ); \
5140 lval = (__typeof__(lval)) _res; \
5141 } while (0)
5142
5143 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5144 arg6, arg7 ,arg8, arg9) \
5145 do { \
5146 volatile OrigFn _orig = (orig); \
5147 volatile unsigned long _argvec[10]; \
5148 volatile unsigned long _res; \
5149 _argvec[0] = (unsigned long)_orig.nraddr; \
5150 _argvec[1] = (unsigned long)arg1; \
5151 _argvec[2] = (unsigned long)arg2; \
5152 _argvec[3] = (unsigned long)arg3; \
5153 _argvec[4] = (unsigned long)arg4; \
5154 _argvec[5] = (unsigned long)arg5; \
5155 _argvec[6] = (unsigned long)arg6; \
5156 _argvec[7] = (unsigned long)arg7; \
5157 _argvec[8] = (unsigned long)arg8; \
5158 _argvec[9] = (unsigned long)arg9; \
5159 __asm__ volatile( \
5160 VALGRIND_CFI_PROLOGUE \
5161 "aghi 15,-192\n\t" \
5162 "lg 2, 8(1)\n\t" \
5163 "lg 3,16(1)\n\t" \
5164 "lg 4,24(1)\n\t" \
5165 "lg 5,32(1)\n\t" \
5166 "lg 6,40(1)\n\t" \
5167 "mvc 160(8,15), 48(1)\n\t" \
5168 "mvc 168(8,15), 56(1)\n\t" \
5169 "mvc 176(8,15), 64(1)\n\t" \
5170 "mvc 184(8,15), 72(1)\n\t" \
5171 "lg 1, 0(1)\n\t" \
5172 VALGRIND_CALL_NOREDIR_R1 \
5173 "aghi 15,192\n\t" \
5174 VALGRIND_CFI_EPILOGUE \
5175 "lgr %0, 2\n\t" \
5176 : "=d" (_res) \
5177 : "a" (&_argvec[0]) __FRAME_POINTER \
5178 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5179 ); \
5180 lval = (__typeof__(lval)) _res; \
5181 } while (0)
5182
5183 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5184 arg6, arg7 ,arg8, arg9, arg10) \
5185 do { \
5186 volatile OrigFn _orig = (orig); \
5187 volatile unsigned long _argvec[11]; \
5188 volatile unsigned long _res; \
5189 _argvec[0] = (unsigned long)_orig.nraddr; \
5190 _argvec[1] = (unsigned long)arg1; \
5191 _argvec[2] = (unsigned long)arg2; \
5192 _argvec[3] = (unsigned long)arg3; \
5193 _argvec[4] = (unsigned long)arg4; \
5194 _argvec[5] = (unsigned long)arg5; \
5195 _argvec[6] = (unsigned long)arg6; \
5196 _argvec[7] = (unsigned long)arg7; \
5197 _argvec[8] = (unsigned long)arg8; \
5198 _argvec[9] = (unsigned long)arg9; \
5199 _argvec[10] = (unsigned long)arg10; \
5200 __asm__ volatile( \
5201 VALGRIND_CFI_PROLOGUE \
5202 "aghi 15,-200\n\t" \
5203 "lg 2, 8(1)\n\t" \
5204 "lg 3,16(1)\n\t" \
5205 "lg 4,24(1)\n\t" \
5206 "lg 5,32(1)\n\t" \
5207 "lg 6,40(1)\n\t" \
5208 "mvc 160(8,15), 48(1)\n\t" \
5209 "mvc 168(8,15), 56(1)\n\t" \
5210 "mvc 176(8,15), 64(1)\n\t" \
5211 "mvc 184(8,15), 72(1)\n\t" \
5212 "mvc 192(8,15), 80(1)\n\t" \
5213 "lg 1, 0(1)\n\t" \
5214 VALGRIND_CALL_NOREDIR_R1 \
5215 "aghi 15,200\n\t" \
5216 VALGRIND_CFI_EPILOGUE \
5217 "lgr %0, 2\n\t" \
5218 : "=d" (_res) \
5219 : "a" (&_argvec[0]) __FRAME_POINTER \
5220 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5221 ); \
5222 lval = (__typeof__(lval)) _res; \
5223 } while (0)
5224
5225 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5226 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5227 do { \
5228 volatile OrigFn _orig = (orig); \
5229 volatile unsigned long _argvec[12]; \
5230 volatile unsigned long _res; \
5231 _argvec[0] = (unsigned long)_orig.nraddr; \
5232 _argvec[1] = (unsigned long)arg1; \
5233 _argvec[2] = (unsigned long)arg2; \
5234 _argvec[3] = (unsigned long)arg3; \
5235 _argvec[4] = (unsigned long)arg4; \
5236 _argvec[5] = (unsigned long)arg5; \
5237 _argvec[6] = (unsigned long)arg6; \
5238 _argvec[7] = (unsigned long)arg7; \
5239 _argvec[8] = (unsigned long)arg8; \
5240 _argvec[9] = (unsigned long)arg9; \
5241 _argvec[10] = (unsigned long)arg10; \
5242 _argvec[11] = (unsigned long)arg11; \
5243 __asm__ volatile( \
5244 VALGRIND_CFI_PROLOGUE \
5245 "aghi 15,-208\n\t" \
5246 "lg 2, 8(1)\n\t" \
5247 "lg 3,16(1)\n\t" \
5248 "lg 4,24(1)\n\t" \
5249 "lg 5,32(1)\n\t" \
5250 "lg 6,40(1)\n\t" \
5251 "mvc 160(8,15), 48(1)\n\t" \
5252 "mvc 168(8,15), 56(1)\n\t" \
5253 "mvc 176(8,15), 64(1)\n\t" \
5254 "mvc 184(8,15), 72(1)\n\t" \
5255 "mvc 192(8,15), 80(1)\n\t" \
5256 "mvc 200(8,15), 88(1)\n\t" \
5257 "lg 1, 0(1)\n\t" \
5258 VALGRIND_CALL_NOREDIR_R1 \
5259 "aghi 15,208\n\t" \
5260 VALGRIND_CFI_EPILOGUE \
5261 "lgr %0, 2\n\t" \
5262 : "=d" (_res) \
5263 : "a" (&_argvec[0]) __FRAME_POINTER \
5264 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5265 ); \
5266 lval = (__typeof__(lval)) _res; \
5267 } while (0)
5268
5269 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5270 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5271 do { \
5272 volatile OrigFn _orig = (orig); \
5273 volatile unsigned long _argvec[13]; \
5274 volatile unsigned long _res; \
5275 _argvec[0] = (unsigned long)_orig.nraddr; \
5276 _argvec[1] = (unsigned long)arg1; \
5277 _argvec[2] = (unsigned long)arg2; \
5278 _argvec[3] = (unsigned long)arg3; \
5279 _argvec[4] = (unsigned long)arg4; \
5280 _argvec[5] = (unsigned long)arg5; \
5281 _argvec[6] = (unsigned long)arg6; \
5282 _argvec[7] = (unsigned long)arg7; \
5283 _argvec[8] = (unsigned long)arg8; \
5284 _argvec[9] = (unsigned long)arg9; \
5285 _argvec[10] = (unsigned long)arg10; \
5286 _argvec[11] = (unsigned long)arg11; \
5287 _argvec[12] = (unsigned long)arg12; \
5288 __asm__ volatile( \
5289 VALGRIND_CFI_PROLOGUE \
5290 "aghi 15,-216\n\t" \
5291 "lg 2, 8(1)\n\t" \
5292 "lg 3,16(1)\n\t" \
5293 "lg 4,24(1)\n\t" \
5294 "lg 5,32(1)\n\t" \
5295 "lg 6,40(1)\n\t" \
5296 "mvc 160(8,15), 48(1)\n\t" \
5297 "mvc 168(8,15), 56(1)\n\t" \
5298 "mvc 176(8,15), 64(1)\n\t" \
5299 "mvc 184(8,15), 72(1)\n\t" \
5300 "mvc 192(8,15), 80(1)\n\t" \
5301 "mvc 200(8,15), 88(1)\n\t" \
5302 "mvc 208(8,15), 96(1)\n\t" \
5303 "lg 1, 0(1)\n\t" \
5304 VALGRIND_CALL_NOREDIR_R1 \
5305 "aghi 15,216\n\t" \
5306 VALGRIND_CFI_EPILOGUE \
5307 "lgr %0, 2\n\t" \
5308 : "=d" (_res) \
5309 : "a" (&_argvec[0]) __FRAME_POINTER \
5310 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5311 ); \
5312 lval = (__typeof__(lval)) _res; \
5313 } while (0)
5314
5315
5316 #endif
5317
5318
5319
5320 #if defined(PLAT_mips32_linux)
5321
5322
5323 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5324 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5325 "$25", "$31"
5326
5327
5328
5329
5330 #define CALL_FN_W_v(lval, orig) \
5331 do { \
5332 volatile OrigFn _orig = (orig); \
5333 volatile unsigned long _argvec[1]; \
5334 volatile unsigned long _res; \
5335 _argvec[0] = (unsigned long)_orig.nraddr; \
5336 __asm__ volatile( \
5337 "subu $29, $29, 8 \n\t" \
5338 "sw $28, 0($29) \n\t" \
5339 "sw $31, 4($29) \n\t" \
5340 "subu $29, $29, 16 \n\t" \
5341 "lw $25, 0(%1) \n\t" \
5342 VALGRIND_CALL_NOREDIR_T9 \
5343 "addu $29, $29, 16\n\t" \
5344 "lw $28, 0($29) \n\t" \
5345 "lw $31, 4($29) \n\t" \
5346 "addu $29, $29, 8 \n\t" \
5347 "move %0, $2\n" \
5348 : "=r" (_res) \
5349 : "0" (&_argvec[0]) \
5350 : "memory", __CALLER_SAVED_REGS \
5351 ); \
5352 lval = (__typeof__(lval)) _res; \
5353 } while (0)
5354
5355 #define CALL_FN_W_W(lval, orig, arg1) \
5356 do { \
5357 volatile OrigFn _orig = (orig); \
5358 volatile unsigned long _argvec[2]; \
5359 volatile unsigned long _res; \
5360 _argvec[0] = (unsigned long)_orig.nraddr; \
5361 _argvec[1] = (unsigned long)(arg1); \
5362 __asm__ volatile( \
5363 "subu $29, $29, 8 \n\t" \
5364 "sw $28, 0($29) \n\t" \
5365 "sw $31, 4($29) \n\t" \
5366 "subu $29, $29, 16 \n\t" \
5367 "lw $4, 4(%1) \n\t" \
5368 "lw $25, 0(%1) \n\t" \
5369 VALGRIND_CALL_NOREDIR_T9 \
5370 "addu $29, $29, 16 \n\t" \
5371 "lw $28, 0($29) \n\t" \
5372 "lw $31, 4($29) \n\t" \
5373 "addu $29, $29, 8 \n\t" \
5374 "move %0, $2\n" \
5375 : "=r" (_res) \
5376 : "0" (&_argvec[0]) \
5377 : "memory", __CALLER_SAVED_REGS \
5378 ); \
5379 lval = (__typeof__(lval)) _res; \
5380 } while (0)
5381
5382 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5383 do { \
5384 volatile OrigFn _orig = (orig); \
5385 volatile unsigned long _argvec[3]; \
5386 volatile unsigned long _res; \
5387 _argvec[0] = (unsigned long)_orig.nraddr; \
5388 _argvec[1] = (unsigned long)(arg1); \
5389 _argvec[2] = (unsigned long)(arg2); \
5390 __asm__ volatile( \
5391 "subu $29, $29, 8 \n\t" \
5392 "sw $28, 0($29) \n\t" \
5393 "sw $31, 4($29) \n\t" \
5394 "subu $29, $29, 16 \n\t" \
5395 "lw $4, 4(%1) \n\t" \
5396 "lw $5, 8(%1) \n\t" \
5397 "lw $25, 0(%1) \n\t" \
5398 VALGRIND_CALL_NOREDIR_T9 \
5399 "addu $29, $29, 16 \n\t" \
5400 "lw $28, 0($29) \n\t" \
5401 "lw $31, 4($29) \n\t" \
5402 "addu $29, $29, 8 \n\t" \
5403 "move %0, $2\n" \
5404 : "=r" (_res) \
5405 : "0" (&_argvec[0]) \
5406 : "memory", __CALLER_SAVED_REGS \
5407 ); \
5408 lval = (__typeof__(lval)) _res; \
5409 } while (0)
5410
5411 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5412 do { \
5413 volatile OrigFn _orig = (orig); \
5414 volatile unsigned long _argvec[4]; \
5415 volatile unsigned long _res; \
5416 _argvec[0] = (unsigned long)_orig.nraddr; \
5417 _argvec[1] = (unsigned long)(arg1); \
5418 _argvec[2] = (unsigned long)(arg2); \
5419 _argvec[3] = (unsigned long)(arg3); \
5420 __asm__ volatile( \
5421 "subu $29, $29, 8 \n\t" \
5422 "sw $28, 0($29) \n\t" \
5423 "sw $31, 4($29) \n\t" \
5424 "subu $29, $29, 16 \n\t" \
5425 "lw $4, 4(%1) \n\t" \
5426 "lw $5, 8(%1) \n\t" \
5427 "lw $6, 12(%1) \n\t" \
5428 "lw $25, 0(%1) \n\t" \
5429 VALGRIND_CALL_NOREDIR_T9 \
5430 "addu $29, $29, 16 \n\t" \
5431 "lw $28, 0($29) \n\t" \
5432 "lw $31, 4($29) \n\t" \
5433 "addu $29, $29, 8 \n\t" \
5434 "move %0, $2\n" \
5435 : "=r" (_res) \
5436 : "0" (&_argvec[0]) \
5437 : "memory", __CALLER_SAVED_REGS \
5438 ); \
5439 lval = (__typeof__(lval)) _res; \
5440 } while (0)
5441
5442 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5443 do { \
5444 volatile OrigFn _orig = (orig); \
5445 volatile unsigned long _argvec[5]; \
5446 volatile unsigned long _res; \
5447 _argvec[0] = (unsigned long)_orig.nraddr; \
5448 _argvec[1] = (unsigned long)(arg1); \
5449 _argvec[2] = (unsigned long)(arg2); \
5450 _argvec[3] = (unsigned long)(arg3); \
5451 _argvec[4] = (unsigned long)(arg4); \
5452 __asm__ volatile( \
5453 "subu $29, $29, 8 \n\t" \
5454 "sw $28, 0($29) \n\t" \
5455 "sw $31, 4($29) \n\t" \
5456 "subu $29, $29, 16 \n\t" \
5457 "lw $4, 4(%1) \n\t" \
5458 "lw $5, 8(%1) \n\t" \
5459 "lw $6, 12(%1) \n\t" \
5460 "lw $7, 16(%1) \n\t" \
5461 "lw $25, 0(%1) \n\t" \
5462 VALGRIND_CALL_NOREDIR_T9 \
5463 "addu $29, $29, 16 \n\t" \
5464 "lw $28, 0($29) \n\t" \
5465 "lw $31, 4($29) \n\t" \
5466 "addu $29, $29, 8 \n\t" \
5467 "move %0, $2\n" \
5468 : "=r" (_res) \
5469 : "0" (&_argvec[0]) \
5470 : "memory", __CALLER_SAVED_REGS \
5471 ); \
5472 lval = (__typeof__(lval)) _res; \
5473 } while (0)
5474
5475 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5476 do { \
5477 volatile OrigFn _orig = (orig); \
5478 volatile unsigned long _argvec[6]; \
5479 volatile unsigned long _res; \
5480 _argvec[0] = (unsigned long)_orig.nraddr; \
5481 _argvec[1] = (unsigned long)(arg1); \
5482 _argvec[2] = (unsigned long)(arg2); \
5483 _argvec[3] = (unsigned long)(arg3); \
5484 _argvec[4] = (unsigned long)(arg4); \
5485 _argvec[5] = (unsigned long)(arg5); \
5486 __asm__ volatile( \
5487 "subu $29, $29, 8 \n\t" \
5488 "sw $28, 0($29) \n\t" \
5489 "sw $31, 4($29) \n\t" \
5490 "lw $4, 20(%1) \n\t" \
5491 "subu $29, $29, 24\n\t" \
5492 "sw $4, 16($29) \n\t" \
5493 "lw $4, 4(%1) \n\t" \
5494 "lw $5, 8(%1) \n\t" \
5495 "lw $6, 12(%1) \n\t" \
5496 "lw $7, 16(%1) \n\t" \
5497 "lw $25, 0(%1) \n\t" \
5498 VALGRIND_CALL_NOREDIR_T9 \
5499 "addu $29, $29, 24 \n\t" \
5500 "lw $28, 0($29) \n\t" \
5501 "lw $31, 4($29) \n\t" \
5502 "addu $29, $29, 8 \n\t" \
5503 "move %0, $2\n" \
5504 : "=r" (_res) \
5505 : "0" (&_argvec[0]) \
5506 : "memory", __CALLER_SAVED_REGS \
5507 ); \
5508 lval = (__typeof__(lval)) _res; \
5509 } while (0)
5510 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5511 do { \
5512 volatile OrigFn _orig = (orig); \
5513 volatile unsigned long _argvec[7]; \
5514 volatile unsigned long _res; \
5515 _argvec[0] = (unsigned long)_orig.nraddr; \
5516 _argvec[1] = (unsigned long)(arg1); \
5517 _argvec[2] = (unsigned long)(arg2); \
5518 _argvec[3] = (unsigned long)(arg3); \
5519 _argvec[4] = (unsigned long)(arg4); \
5520 _argvec[5] = (unsigned long)(arg5); \
5521 _argvec[6] = (unsigned long)(arg6); \
5522 __asm__ volatile( \
5523 "subu $29, $29, 8 \n\t" \
5524 "sw $28, 0($29) \n\t" \
5525 "sw $31, 4($29) \n\t" \
5526 "lw $4, 20(%1) \n\t" \
5527 "subu $29, $29, 32\n\t" \
5528 "sw $4, 16($29) \n\t" \
5529 "lw $4, 24(%1) \n\t" \
5530 "nop\n\t" \
5531 "sw $4, 20($29) \n\t" \
5532 "lw $4, 4(%1) \n\t" \
5533 "lw $5, 8(%1) \n\t" \
5534 "lw $6, 12(%1) \n\t" \
5535 "lw $7, 16(%1) \n\t" \
5536 "lw $25, 0(%1) \n\t" \
5537 VALGRIND_CALL_NOREDIR_T9 \
5538 "addu $29, $29, 32 \n\t" \
5539 "lw $28, 0($29) \n\t" \
5540 "lw $31, 4($29) \n\t" \
5541 "addu $29, $29, 8 \n\t" \
5542 "move %0, $2\n" \
5543 : "=r" (_res) \
5544 : "0" (&_argvec[0]) \
5545 : "memory", __CALLER_SAVED_REGS \
5546 ); \
5547 lval = (__typeof__(lval)) _res; \
5548 } while (0)
5549
5550 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5551 arg7) \
5552 do { \
5553 volatile OrigFn _orig = (orig); \
5554 volatile unsigned long _argvec[8]; \
5555 volatile unsigned long _res; \
5556 _argvec[0] = (unsigned long)_orig.nraddr; \
5557 _argvec[1] = (unsigned long)(arg1); \
5558 _argvec[2] = (unsigned long)(arg2); \
5559 _argvec[3] = (unsigned long)(arg3); \
5560 _argvec[4] = (unsigned long)(arg4); \
5561 _argvec[5] = (unsigned long)(arg5); \
5562 _argvec[6] = (unsigned long)(arg6); \
5563 _argvec[7] = (unsigned long)(arg7); \
5564 __asm__ volatile( \
5565 "subu $29, $29, 8 \n\t" \
5566 "sw $28, 0($29) \n\t" \
5567 "sw $31, 4($29) \n\t" \
5568 "lw $4, 20(%1) \n\t" \
5569 "subu $29, $29, 32\n\t" \
5570 "sw $4, 16($29) \n\t" \
5571 "lw $4, 24(%1) \n\t" \
5572 "sw $4, 20($29) \n\t" \
5573 "lw $4, 28(%1) \n\t" \
5574 "sw $4, 24($29) \n\t" \
5575 "lw $4, 4(%1) \n\t" \
5576 "lw $5, 8(%1) \n\t" \
5577 "lw $6, 12(%1) \n\t" \
5578 "lw $7, 16(%1) \n\t" \
5579 "lw $25, 0(%1) \n\t" \
5580 VALGRIND_CALL_NOREDIR_T9 \
5581 "addu $29, $29, 32 \n\t" \
5582 "lw $28, 0($29) \n\t" \
5583 "lw $31, 4($29) \n\t" \
5584 "addu $29, $29, 8 \n\t" \
5585 "move %0, $2\n" \
5586 : "=r" (_res) \
5587 : "0" (&_argvec[0]) \
5588 : "memory", __CALLER_SAVED_REGS \
5589 ); \
5590 lval = (__typeof__(lval)) _res; \
5591 } while (0)
5592
5593 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5594 arg7,arg8) \
5595 do { \
5596 volatile OrigFn _orig = (orig); \
5597 volatile unsigned long _argvec[9]; \
5598 volatile unsigned long _res; \
5599 _argvec[0] = (unsigned long)_orig.nraddr; \
5600 _argvec[1] = (unsigned long)(arg1); \
5601 _argvec[2] = (unsigned long)(arg2); \
5602 _argvec[3] = (unsigned long)(arg3); \
5603 _argvec[4] = (unsigned long)(arg4); \
5604 _argvec[5] = (unsigned long)(arg5); \
5605 _argvec[6] = (unsigned long)(arg6); \
5606 _argvec[7] = (unsigned long)(arg7); \
5607 _argvec[8] = (unsigned long)(arg8); \
5608 __asm__ volatile( \
5609 "subu $29, $29, 8 \n\t" \
5610 "sw $28, 0($29) \n\t" \
5611 "sw $31, 4($29) \n\t" \
5612 "lw $4, 20(%1) \n\t" \
5613 "subu $29, $29, 40\n\t" \
5614 "sw $4, 16($29) \n\t" \
5615 "lw $4, 24(%1) \n\t" \
5616 "sw $4, 20($29) \n\t" \
5617 "lw $4, 28(%1) \n\t" \
5618 "sw $4, 24($29) \n\t" \
5619 "lw $4, 32(%1) \n\t" \
5620 "sw $4, 28($29) \n\t" \
5621 "lw $4, 4(%1) \n\t" \
5622 "lw $5, 8(%1) \n\t" \
5623 "lw $6, 12(%1) \n\t" \
5624 "lw $7, 16(%1) \n\t" \
5625 "lw $25, 0(%1) \n\t" \
5626 VALGRIND_CALL_NOREDIR_T9 \
5627 "addu $29, $29, 40 \n\t" \
5628 "lw $28, 0($29) \n\t" \
5629 "lw $31, 4($29) \n\t" \
5630 "addu $29, $29, 8 \n\t" \
5631 "move %0, $2\n" \
5632 : "=r" (_res) \
5633 : "0" (&_argvec[0]) \
5634 : "memory", __CALLER_SAVED_REGS \
5635 ); \
5636 lval = (__typeof__(lval)) _res; \
5637 } while (0)
5638
5639 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5640 arg7,arg8,arg9) \
5641 do { \
5642 volatile OrigFn _orig = (orig); \
5643 volatile unsigned long _argvec[10]; \
5644 volatile unsigned long _res; \
5645 _argvec[0] = (unsigned long)_orig.nraddr; \
5646 _argvec[1] = (unsigned long)(arg1); \
5647 _argvec[2] = (unsigned long)(arg2); \
5648 _argvec[3] = (unsigned long)(arg3); \
5649 _argvec[4] = (unsigned long)(arg4); \
5650 _argvec[5] = (unsigned long)(arg5); \
5651 _argvec[6] = (unsigned long)(arg6); \
5652 _argvec[7] = (unsigned long)(arg7); \
5653 _argvec[8] = (unsigned long)(arg8); \
5654 _argvec[9] = (unsigned long)(arg9); \
5655 __asm__ volatile( \
5656 "subu $29, $29, 8 \n\t" \
5657 "sw $28, 0($29) \n\t" \
5658 "sw $31, 4($29) \n\t" \
5659 "lw $4, 20(%1) \n\t" \
5660 "subu $29, $29, 40\n\t" \
5661 "sw $4, 16($29) \n\t" \
5662 "lw $4, 24(%1) \n\t" \
5663 "sw $4, 20($29) \n\t" \
5664 "lw $4, 28(%1) \n\t" \
5665 "sw $4, 24($29) \n\t" \
5666 "lw $4, 32(%1) \n\t" \
5667 "sw $4, 28($29) \n\t" \
5668 "lw $4, 36(%1) \n\t" \
5669 "sw $4, 32($29) \n\t" \
5670 "lw $4, 4(%1) \n\t" \
5671 "lw $5, 8(%1) \n\t" \
5672 "lw $6, 12(%1) \n\t" \
5673 "lw $7, 16(%1) \n\t" \
5674 "lw $25, 0(%1) \n\t" \
5675 VALGRIND_CALL_NOREDIR_T9 \
5676 "addu $29, $29, 40 \n\t" \
5677 "lw $28, 0($29) \n\t" \
5678 "lw $31, 4($29) \n\t" \
5679 "addu $29, $29, 8 \n\t" \
5680 "move %0, $2\n" \
5681 : "=r" (_res) \
5682 : "0" (&_argvec[0]) \
5683 : "memory", __CALLER_SAVED_REGS \
5684 ); \
5685 lval = (__typeof__(lval)) _res; \
5686 } while (0)
5687
5688 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5689 arg7,arg8,arg9,arg10) \
5690 do { \
5691 volatile OrigFn _orig = (orig); \
5692 volatile unsigned long _argvec[11]; \
5693 volatile unsigned long _res; \
5694 _argvec[0] = (unsigned long)_orig.nraddr; \
5695 _argvec[1] = (unsigned long)(arg1); \
5696 _argvec[2] = (unsigned long)(arg2); \
5697 _argvec[3] = (unsigned long)(arg3); \
5698 _argvec[4] = (unsigned long)(arg4); \
5699 _argvec[5] = (unsigned long)(arg5); \
5700 _argvec[6] = (unsigned long)(arg6); \
5701 _argvec[7] = (unsigned long)(arg7); \
5702 _argvec[8] = (unsigned long)(arg8); \
5703 _argvec[9] = (unsigned long)(arg9); \
5704 _argvec[10] = (unsigned long)(arg10); \
5705 __asm__ volatile( \
5706 "subu $29, $29, 8 \n\t" \
5707 "sw $28, 0($29) \n\t" \
5708 "sw $31, 4($29) \n\t" \
5709 "lw $4, 20(%1) \n\t" \
5710 "subu $29, $29, 48\n\t" \
5711 "sw $4, 16($29) \n\t" \
5712 "lw $4, 24(%1) \n\t" \
5713 "sw $4, 20($29) \n\t" \
5714 "lw $4, 28(%1) \n\t" \
5715 "sw $4, 24($29) \n\t" \
5716 "lw $4, 32(%1) \n\t" \
5717 "sw $4, 28($29) \n\t" \
5718 "lw $4, 36(%1) \n\t" \
5719 "sw $4, 32($29) \n\t" \
5720 "lw $4, 40(%1) \n\t" \
5721 "sw $4, 36($29) \n\t" \
5722 "lw $4, 4(%1) \n\t" \
5723 "lw $5, 8(%1) \n\t" \
5724 "lw $6, 12(%1) \n\t" \
5725 "lw $7, 16(%1) \n\t" \
5726 "lw $25, 0(%1) \n\t" \
5727 VALGRIND_CALL_NOREDIR_T9 \
5728 "addu $29, $29, 48 \n\t" \
5729 "lw $28, 0($29) \n\t" \
5730 "lw $31, 4($29) \n\t" \
5731 "addu $29, $29, 8 \n\t" \
5732 "move %0, $2\n" \
5733 : "=r" (_res) \
5734 : "0" (&_argvec[0]) \
5735 : "memory", __CALLER_SAVED_REGS \
5736 ); \
5737 lval = (__typeof__(lval)) _res; \
5738 } while (0)
5739
5740 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5741 arg6,arg7,arg8,arg9,arg10, \
5742 arg11) \
5743 do { \
5744 volatile OrigFn _orig = (orig); \
5745 volatile unsigned long _argvec[12]; \
5746 volatile unsigned long _res; \
5747 _argvec[0] = (unsigned long)_orig.nraddr; \
5748 _argvec[1] = (unsigned long)(arg1); \
5749 _argvec[2] = (unsigned long)(arg2); \
5750 _argvec[3] = (unsigned long)(arg3); \
5751 _argvec[4] = (unsigned long)(arg4); \
5752 _argvec[5] = (unsigned long)(arg5); \
5753 _argvec[6] = (unsigned long)(arg6); \
5754 _argvec[7] = (unsigned long)(arg7); \
5755 _argvec[8] = (unsigned long)(arg8); \
5756 _argvec[9] = (unsigned long)(arg9); \
5757 _argvec[10] = (unsigned long)(arg10); \
5758 _argvec[11] = (unsigned long)(arg11); \
5759 __asm__ volatile( \
5760 "subu $29, $29, 8 \n\t" \
5761 "sw $28, 0($29) \n\t" \
5762 "sw $31, 4($29) \n\t" \
5763 "lw $4, 20(%1) \n\t" \
5764 "subu $29, $29, 48\n\t" \
5765 "sw $4, 16($29) \n\t" \
5766 "lw $4, 24(%1) \n\t" \
5767 "sw $4, 20($29) \n\t" \
5768 "lw $4, 28(%1) \n\t" \
5769 "sw $4, 24($29) \n\t" \
5770 "lw $4, 32(%1) \n\t" \
5771 "sw $4, 28($29) \n\t" \
5772 "lw $4, 36(%1) \n\t" \
5773 "sw $4, 32($29) \n\t" \
5774 "lw $4, 40(%1) \n\t" \
5775 "sw $4, 36($29) \n\t" \
5776 "lw $4, 44(%1) \n\t" \
5777 "sw $4, 40($29) \n\t" \
5778 "lw $4, 4(%1) \n\t" \
5779 "lw $5, 8(%1) \n\t" \
5780 "lw $6, 12(%1) \n\t" \
5781 "lw $7, 16(%1) \n\t" \
5782 "lw $25, 0(%1) \n\t" \
5783 VALGRIND_CALL_NOREDIR_T9 \
5784 "addu $29, $29, 48 \n\t" \
5785 "lw $28, 0($29) \n\t" \
5786 "lw $31, 4($29) \n\t" \
5787 "addu $29, $29, 8 \n\t" \
5788 "move %0, $2\n" \
5789 : "=r" (_res) \
5790 : "0" (&_argvec[0]) \
5791 : "memory", __CALLER_SAVED_REGS \
5792 ); \
5793 lval = (__typeof__(lval)) _res; \
5794 } while (0)
5795
5796 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5797 arg6,arg7,arg8,arg9,arg10, \
5798 arg11,arg12) \
5799 do { \
5800 volatile OrigFn _orig = (orig); \
5801 volatile unsigned long _argvec[13]; \
5802 volatile unsigned long _res; \
5803 _argvec[0] = (unsigned long)_orig.nraddr; \
5804 _argvec[1] = (unsigned long)(arg1); \
5805 _argvec[2] = (unsigned long)(arg2); \
5806 _argvec[3] = (unsigned long)(arg3); \
5807 _argvec[4] = (unsigned long)(arg4); \
5808 _argvec[5] = (unsigned long)(arg5); \
5809 _argvec[6] = (unsigned long)(arg6); \
5810 _argvec[7] = (unsigned long)(arg7); \
5811 _argvec[8] = (unsigned long)(arg8); \
5812 _argvec[9] = (unsigned long)(arg9); \
5813 _argvec[10] = (unsigned long)(arg10); \
5814 _argvec[11] = (unsigned long)(arg11); \
5815 _argvec[12] = (unsigned long)(arg12); \
5816 __asm__ volatile( \
5817 "subu $29, $29, 8 \n\t" \
5818 "sw $28, 0($29) \n\t" \
5819 "sw $31, 4($29) \n\t" \
5820 "lw $4, 20(%1) \n\t" \
5821 "subu $29, $29, 56\n\t" \
5822 "sw $4, 16($29) \n\t" \
5823 "lw $4, 24(%1) \n\t" \
5824 "sw $4, 20($29) \n\t" \
5825 "lw $4, 28(%1) \n\t" \
5826 "sw $4, 24($29) \n\t" \
5827 "lw $4, 32(%1) \n\t" \
5828 "sw $4, 28($29) \n\t" \
5829 "lw $4, 36(%1) \n\t" \
5830 "sw $4, 32($29) \n\t" \
5831 "lw $4, 40(%1) \n\t" \
5832 "sw $4, 36($29) \n\t" \
5833 "lw $4, 44(%1) \n\t" \
5834 "sw $4, 40($29) \n\t" \
5835 "lw $4, 48(%1) \n\t" \
5836 "sw $4, 44($29) \n\t" \
5837 "lw $4, 4(%1) \n\t" \
5838 "lw $5, 8(%1) \n\t" \
5839 "lw $6, 12(%1) \n\t" \
5840 "lw $7, 16(%1) \n\t" \
5841 "lw $25, 0(%1) \n\t" \
5842 VALGRIND_CALL_NOREDIR_T9 \
5843 "addu $29, $29, 56 \n\t" \
5844 "lw $28, 0($29) \n\t" \
5845 "lw $31, 4($29) \n\t" \
5846 "addu $29, $29, 8 \n\t" \
5847 "move %0, $2\n" \
5848 : "=r" (_res) \
5849 : "r" (&_argvec[0]) \
5850 : "memory", __CALLER_SAVED_REGS \
5851 ); \
5852 lval = (__typeof__(lval)) _res; \
5853 } while (0)
5854
5855 #endif
5856
5857
5858
5859 #if defined(PLAT_nanomips_linux)
5860
5861
5862 #define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5863 "$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5864 "$t8","$t9", "$at"
5865
5866
5867
5868
5869 #define CALL_FN_W_v(lval, orig) \
5870 do { \
5871 volatile OrigFn _orig = (orig); \
5872 volatile unsigned long _argvec[1]; \
5873 volatile unsigned long _res; \
5874 _argvec[0] = (unsigned long)_orig.nraddr; \
5875 __asm__ volatile( \
5876 "lw $t9, 0(%1)\n\t" \
5877 VALGRIND_CALL_NOREDIR_T9 \
5878 "move %0, $a0\n" \
5879 : "=r" (_res) \
5880 : "r" (&_argvec[0]) \
5881 : "memory", __CALLER_SAVED_REGS \
5882 ); \
5883 lval = (__typeof__(lval)) _res; \
5884 } while (0)
5885
5886 #define CALL_FN_W_W(lval, orig, arg1) \
5887 do { \
5888 volatile OrigFn _orig = (orig); \
5889 volatile unsigned long _argvec[2]; \
5890 volatile unsigned long _res; \
5891 _argvec[0] = (unsigned long)_orig.nraddr; \
5892 _argvec[1] = (unsigned long)(arg1); \
5893 __asm__ volatile( \
5894 "lw $t9, 0(%1)\n\t" \
5895 "lw $a0, 4(%1)\n\t" \
5896 VALGRIND_CALL_NOREDIR_T9 \
5897 "move %0, $a0\n" \
5898 : "=r" (_res) \
5899 : "r" (&_argvec[0]) \
5900 : "memory", __CALLER_SAVED_REGS \
5901 ); \
5902 lval = (__typeof__(lval)) _res; \
5903 } while (0)
5904
5905 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5906 do { \
5907 volatile OrigFn _orig = (orig); \
5908 volatile unsigned long _argvec[3]; \
5909 volatile unsigned long _res; \
5910 _argvec[0] = (unsigned long)_orig.nraddr; \
5911 _argvec[1] = (unsigned long)(arg1); \
5912 _argvec[2] = (unsigned long)(arg2); \
5913 __asm__ volatile( \
5914 "lw $t9, 0(%1)\n\t" \
5915 "lw $a0, 4(%1)\n\t" \
5916 "lw $a1, 8(%1)\n\t" \
5917 VALGRIND_CALL_NOREDIR_T9 \
5918 "move %0, $a0\n" \
5919 : "=r" (_res) \
5920 : "r" (&_argvec[0]) \
5921 : "memory", __CALLER_SAVED_REGS \
5922 ); \
5923 lval = (__typeof__(lval)) _res; \
5924 } while (0)
5925
5926 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5927 do { \
5928 volatile OrigFn _orig = (orig); \
5929 volatile unsigned long _argvec[4]; \
5930 volatile unsigned long _res; \
5931 _argvec[0] = (unsigned long)_orig.nraddr; \
5932 _argvec[1] = (unsigned long)(arg1); \
5933 _argvec[2] = (unsigned long)(arg2); \
5934 _argvec[3] = (unsigned long)(arg3); \
5935 __asm__ volatile( \
5936 "lw $t9, 0(%1)\n\t" \
5937 "lw $a0, 4(%1)\n\t" \
5938 "lw $a1, 8(%1)\n\t" \
5939 "lw $a2,12(%1)\n\t" \
5940 VALGRIND_CALL_NOREDIR_T9 \
5941 "move %0, $a0\n" \
5942 : "=r" (_res) \
5943 : "r" (&_argvec[0]) \
5944 : "memory", __CALLER_SAVED_REGS \
5945 ); \
5946 lval = (__typeof__(lval)) _res; \
5947 } while (0)
5948
5949 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5950 do { \
5951 volatile OrigFn _orig = (orig); \
5952 volatile unsigned long _argvec[5]; \
5953 volatile unsigned long _res; \
5954 _argvec[0] = (unsigned long)_orig.nraddr; \
5955 _argvec[1] = (unsigned long)(arg1); \
5956 _argvec[2] = (unsigned long)(arg2); \
5957 _argvec[3] = (unsigned long)(arg3); \
5958 _argvec[4] = (unsigned long)(arg4); \
5959 __asm__ volatile( \
5960 "lw $t9, 0(%1)\n\t" \
5961 "lw $a0, 4(%1)\n\t" \
5962 "lw $a1, 8(%1)\n\t" \
5963 "lw $a2,12(%1)\n\t" \
5964 "lw $a3,16(%1)\n\t" \
5965 VALGRIND_CALL_NOREDIR_T9 \
5966 "move %0, $a0\n" \
5967 : "=r" (_res) \
5968 : "r" (&_argvec[0]) \
5969 : "memory", __CALLER_SAVED_REGS \
5970 ); \
5971 lval = (__typeof__(lval)) _res; \
5972 } while (0)
5973
5974 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5975 do { \
5976 volatile OrigFn _orig = (orig); \
5977 volatile unsigned long _argvec[6]; \
5978 volatile unsigned long _res; \
5979 _argvec[0] = (unsigned long)_orig.nraddr; \
5980 _argvec[1] = (unsigned long)(arg1); \
5981 _argvec[2] = (unsigned long)(arg2); \
5982 _argvec[3] = (unsigned long)(arg3); \
5983 _argvec[4] = (unsigned long)(arg4); \
5984 _argvec[5] = (unsigned long)(arg5); \
5985 __asm__ volatile( \
5986 "lw $t9, 0(%1)\n\t" \
5987 "lw $a0, 4(%1)\n\t" \
5988 "lw $a1, 8(%1)\n\t" \
5989 "lw $a2,12(%1)\n\t" \
5990 "lw $a3,16(%1)\n\t" \
5991 "lw $a4,20(%1)\n\t" \
5992 VALGRIND_CALL_NOREDIR_T9 \
5993 "move %0, $a0\n" \
5994 : "=r" (_res) \
5995 : "r" (&_argvec[0]) \
5996 : "memory", __CALLER_SAVED_REGS \
5997 ); \
5998 lval = (__typeof__(lval)) _res; \
5999 } while (0)
6000 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6001 do { \
6002 volatile OrigFn _orig = (orig); \
6003 volatile unsigned long _argvec[7]; \
6004 volatile unsigned long _res; \
6005 _argvec[0] = (unsigned long)_orig.nraddr; \
6006 _argvec[1] = (unsigned long)(arg1); \
6007 _argvec[2] = (unsigned long)(arg2); \
6008 _argvec[3] = (unsigned long)(arg3); \
6009 _argvec[4] = (unsigned long)(arg4); \
6010 _argvec[5] = (unsigned long)(arg5); \
6011 _argvec[6] = (unsigned long)(arg6); \
6012 __asm__ volatile( \
6013 "lw $t9, 0(%1)\n\t" \
6014 "lw $a0, 4(%1)\n\t" \
6015 "lw $a1, 8(%1)\n\t" \
6016 "lw $a2,12(%1)\n\t" \
6017 "lw $a3,16(%1)\n\t" \
6018 "lw $a4,20(%1)\n\t" \
6019 "lw $a5,24(%1)\n\t" \
6020 VALGRIND_CALL_NOREDIR_T9 \
6021 "move %0, $a0\n" \
6022 : "=r" (_res) \
6023 : "r" (&_argvec[0]) \
6024 : "memory", __CALLER_SAVED_REGS \
6025 ); \
6026 lval = (__typeof__(lval)) _res; \
6027 } while (0)
6028
6029 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6030 arg7) \
6031 do { \
6032 volatile OrigFn _orig = (orig); \
6033 volatile unsigned long _argvec[8]; \
6034 volatile unsigned long _res; \
6035 _argvec[0] = (unsigned long)_orig.nraddr; \
6036 _argvec[1] = (unsigned long)(arg1); \
6037 _argvec[2] = (unsigned long)(arg2); \
6038 _argvec[3] = (unsigned long)(arg3); \
6039 _argvec[4] = (unsigned long)(arg4); \
6040 _argvec[5] = (unsigned long)(arg5); \
6041 _argvec[6] = (unsigned long)(arg6); \
6042 _argvec[7] = (unsigned long)(arg7); \
6043 __asm__ volatile( \
6044 "lw $t9, 0(%1)\n\t" \
6045 "lw $a0, 4(%1)\n\t" \
6046 "lw $a1, 8(%1)\n\t" \
6047 "lw $a2,12(%1)\n\t" \
6048 "lw $a3,16(%1)\n\t" \
6049 "lw $a4,20(%1)\n\t" \
6050 "lw $a5,24(%1)\n\t" \
6051 "lw $a6,28(%1)\n\t" \
6052 VALGRIND_CALL_NOREDIR_T9 \
6053 "move %0, $a0\n" \
6054 : "=r" (_res) \
6055 : "r" (&_argvec[0]) \
6056 : "memory", __CALLER_SAVED_REGS \
6057 ); \
6058 lval = (__typeof__(lval)) _res; \
6059 } while (0)
6060
6061 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6062 arg7,arg8) \
6063 do { \
6064 volatile OrigFn _orig = (orig); \
6065 volatile unsigned long _argvec[9]; \
6066 volatile unsigned long _res; \
6067 _argvec[0] = (unsigned long)_orig.nraddr; \
6068 _argvec[1] = (unsigned long)(arg1); \
6069 _argvec[2] = (unsigned long)(arg2); \
6070 _argvec[3] = (unsigned long)(arg3); \
6071 _argvec[4] = (unsigned long)(arg4); \
6072 _argvec[5] = (unsigned long)(arg5); \
6073 _argvec[6] = (unsigned long)(arg6); \
6074 _argvec[7] = (unsigned long)(arg7); \
6075 _argvec[8] = (unsigned long)(arg8); \
6076 __asm__ volatile( \
6077 "lw $t9, 0(%1)\n\t" \
6078 "lw $a0, 4(%1)\n\t" \
6079 "lw $a1, 8(%1)\n\t" \
6080 "lw $a2,12(%1)\n\t" \
6081 "lw $a3,16(%1)\n\t" \
6082 "lw $a4,20(%1)\n\t" \
6083 "lw $a5,24(%1)\n\t" \
6084 "lw $a6,28(%1)\n\t" \
6085 "lw $a7,32(%1)\n\t" \
6086 VALGRIND_CALL_NOREDIR_T9 \
6087 "move %0, $a0\n" \
6088 : "=r" (_res) \
6089 : "r" (&_argvec[0]) \
6090 : "memory", __CALLER_SAVED_REGS \
6091 ); \
6092 lval = (__typeof__(lval)) _res; \
6093 } while (0)
6094
6095 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6096 arg7,arg8,arg9) \
6097 do { \
6098 volatile OrigFn _orig = (orig); \
6099 volatile unsigned long _argvec[10]; \
6100 volatile unsigned long _res; \
6101 _argvec[0] = (unsigned long)_orig.nraddr; \
6102 _argvec[1] = (unsigned long)(arg1); \
6103 _argvec[2] = (unsigned long)(arg2); \
6104 _argvec[3] = (unsigned long)(arg3); \
6105 _argvec[4] = (unsigned long)(arg4); \
6106 _argvec[5] = (unsigned long)(arg5); \
6107 _argvec[6] = (unsigned long)(arg6); \
6108 _argvec[7] = (unsigned long)(arg7); \
6109 _argvec[8] = (unsigned long)(arg8); \
6110 _argvec[9] = (unsigned long)(arg9); \
6111 __asm__ volatile( \
6112 "addiu $sp, $sp, -16 \n\t" \
6113 "lw $t9,36(%1) \n\t" \
6114 "sw $t9, 0($sp) \n\t" \
6115 "lw $t9, 0(%1) \n\t" \
6116 "lw $a0, 4(%1) \n\t" \
6117 "lw $a1, 8(%1) \n\t" \
6118 "lw $a2,12(%1) \n\t" \
6119 "lw $a3,16(%1) \n\t" \
6120 "lw $a4,20(%1) \n\t" \
6121 "lw $a5,24(%1) \n\t" \
6122 "lw $a6,28(%1) \n\t" \
6123 "lw $a7,32(%1) \n\t" \
6124 VALGRIND_CALL_NOREDIR_T9 \
6125 "move %0, $a0 \n\t" \
6126 "addiu $sp, $sp, 16 \n\t" \
6127 : "=r" (_res) \
6128 : "r" (&_argvec[0]) \
6129 : "memory", __CALLER_SAVED_REGS \
6130 ); \
6131 lval = (__typeof__(lval)) _res; \
6132 } while (0)
6133
6134 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6135 arg7,arg8,arg9,arg10) \
6136 do { \
6137 volatile OrigFn _orig = (orig); \
6138 volatile unsigned long _argvec[11]; \
6139 volatile unsigned long _res; \
6140 _argvec[0] = (unsigned long)_orig.nraddr; \
6141 _argvec[1] = (unsigned long)(arg1); \
6142 _argvec[2] = (unsigned long)(arg2); \
6143 _argvec[3] = (unsigned long)(arg3); \
6144 _argvec[4] = (unsigned long)(arg4); \
6145 _argvec[5] = (unsigned long)(arg5); \
6146 _argvec[6] = (unsigned long)(arg6); \
6147 _argvec[7] = (unsigned long)(arg7); \
6148 _argvec[8] = (unsigned long)(arg8); \
6149 _argvec[9] = (unsigned long)(arg9); \
6150 _argvec[10] = (unsigned long)(arg10); \
6151 __asm__ volatile( \
6152 "addiu $sp, $sp, -16 \n\t" \
6153 "lw $t9,36(%1) \n\t" \
6154 "sw $t9, 0($sp) \n\t" \
6155 "lw $t9,40(%1) \n\t" \
6156 "sw $t9, 4($sp) \n\t" \
6157 "lw $t9, 0(%1) \n\t" \
6158 "lw $a0, 4(%1) \n\t" \
6159 "lw $a1, 8(%1) \n\t" \
6160 "lw $a2,12(%1) \n\t" \
6161 "lw $a3,16(%1) \n\t" \
6162 "lw $a4,20(%1) \n\t" \
6163 "lw $a5,24(%1) \n\t" \
6164 "lw $a6,28(%1) \n\t" \
6165 "lw $a7,32(%1) \n\t" \
6166 VALGRIND_CALL_NOREDIR_T9 \
6167 "move %0, $a0 \n\t" \
6168 "addiu $sp, $sp, 16 \n\t" \
6169 : "=r" (_res) \
6170 : "r" (&_argvec[0]) \
6171 : "memory", __CALLER_SAVED_REGS \
6172 ); \
6173 lval = (__typeof__(lval)) _res; \
6174 } while (0)
6175
6176 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6177 arg6,arg7,arg8,arg9,arg10, \
6178 arg11) \
6179 do { \
6180 volatile OrigFn _orig = (orig); \
6181 volatile unsigned long _argvec[12]; \
6182 volatile unsigned long _res; \
6183 _argvec[0] = (unsigned long)_orig.nraddr; \
6184 _argvec[1] = (unsigned long)(arg1); \
6185 _argvec[2] = (unsigned long)(arg2); \
6186 _argvec[3] = (unsigned long)(arg3); \
6187 _argvec[4] = (unsigned long)(arg4); \
6188 _argvec[5] = (unsigned long)(arg5); \
6189 _argvec[6] = (unsigned long)(arg6); \
6190 _argvec[7] = (unsigned long)(arg7); \
6191 _argvec[8] = (unsigned long)(arg8); \
6192 _argvec[9] = (unsigned long)(arg9); \
6193 _argvec[10] = (unsigned long)(arg10); \
6194 _argvec[11] = (unsigned long)(arg11); \
6195 __asm__ volatile( \
6196 "addiu $sp, $sp, -16 \n\t" \
6197 "lw $t9,36(%1) \n\t" \
6198 "sw $t9, 0($sp) \n\t" \
6199 "lw $t9,40(%1) \n\t" \
6200 "sw $t9, 4($sp) \n\t" \
6201 "lw $t9,44(%1) \n\t" \
6202 "sw $t9, 8($sp) \n\t" \
6203 "lw $t9, 0(%1) \n\t" \
6204 "lw $a0, 4(%1) \n\t" \
6205 "lw $a1, 8(%1) \n\t" \
6206 "lw $a2,12(%1) \n\t" \
6207 "lw $a3,16(%1) \n\t" \
6208 "lw $a4,20(%1) \n\t" \
6209 "lw $a5,24(%1) \n\t" \
6210 "lw $a6,28(%1) \n\t" \
6211 "lw $a7,32(%1) \n\t" \
6212 VALGRIND_CALL_NOREDIR_T9 \
6213 "move %0, $a0 \n\t" \
6214 "addiu $sp, $sp, 16 \n\t" \
6215 : "=r" (_res) \
6216 : "r" (&_argvec[0]) \
6217 : "memory", __CALLER_SAVED_REGS \
6218 ); \
6219 lval = (__typeof__(lval)) _res; \
6220 } while (0)
6221
6222 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6223 arg6,arg7,arg8,arg9,arg10, \
6224 arg11,arg12) \
6225 do { \
6226 volatile OrigFn _orig = (orig); \
6227 volatile unsigned long _argvec[13]; \
6228 volatile unsigned long _res; \
6229 _argvec[0] = (unsigned long)_orig.nraddr; \
6230 _argvec[1] = (unsigned long)(arg1); \
6231 _argvec[2] = (unsigned long)(arg2); \
6232 _argvec[3] = (unsigned long)(arg3); \
6233 _argvec[4] = (unsigned long)(arg4); \
6234 _argvec[5] = (unsigned long)(arg5); \
6235 _argvec[6] = (unsigned long)(arg6); \
6236 _argvec[7] = (unsigned long)(arg7); \
6237 _argvec[8] = (unsigned long)(arg8); \
6238 _argvec[9] = (unsigned long)(arg9); \
6239 _argvec[10] = (unsigned long)(arg10); \
6240 _argvec[11] = (unsigned long)(arg11); \
6241 _argvec[12] = (unsigned long)(arg12); \
6242 __asm__ volatile( \
6243 "addiu $sp, $sp, -16 \n\t" \
6244 "lw $t9,36(%1) \n\t" \
6245 "sw $t9, 0($sp) \n\t" \
6246 "lw $t9,40(%1) \n\t" \
6247 "sw $t9, 4($sp) \n\t" \
6248 "lw $t9,44(%1) \n\t" \
6249 "sw $t9, 8($sp) \n\t" \
6250 "lw $t9,48(%1) \n\t" \
6251 "sw $t9,12($sp) \n\t" \
6252 "lw $t9, 0(%1) \n\t" \
6253 "lw $a0, 4(%1) \n\t" \
6254 "lw $a1, 8(%1) \n\t" \
6255 "lw $a2,12(%1) \n\t" \
6256 "lw $a3,16(%1) \n\t" \
6257 "lw $a4,20(%1) \n\t" \
6258 "lw $a5,24(%1) \n\t" \
6259 "lw $a6,28(%1) \n\t" \
6260 "lw $a7,32(%1) \n\t" \
6261 VALGRIND_CALL_NOREDIR_T9 \
6262 "move %0, $a0 \n\t" \
6263 "addiu $sp, $sp, 16 \n\t" \
6264 : "=r" (_res) \
6265 : "r" (&_argvec[0]) \
6266 : "memory", __CALLER_SAVED_REGS \
6267 ); \
6268 lval = (__typeof__(lval)) _res; \
6269 } while (0)
6270
6271 #endif
6272
6273
6274
6275 #if defined(PLAT_mips64_linux)
6276
6277
6278 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6279 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6280 "$25", "$31"
6281
6282
6283
6284
6285 #define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6286
6287 #define CALL_FN_W_v(lval, orig) \
6288 do { \
6289 volatile OrigFn _orig = (orig); \
6290 volatile unsigned long long _argvec[1]; \
6291 volatile unsigned long long _res; \
6292 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6293 __asm__ volatile( \
6294 "ld $25, 0(%1)\n\t" \
6295 VALGRIND_CALL_NOREDIR_T9 \
6296 "move %0, $2\n" \
6297 : "=r" (_res) \
6298 : "0" (&_argvec[0]) \
6299 : "memory", __CALLER_SAVED_REGS \
6300 ); \
6301 lval = (__typeof__(lval)) (long)_res; \
6302 } while (0)
6303
6304 #define CALL_FN_W_W(lval, orig, arg1) \
6305 do { \
6306 volatile OrigFn _orig = (orig); \
6307 volatile unsigned long long _argvec[2]; \
6308 volatile unsigned long long _res; \
6309 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6310 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6311 __asm__ volatile( \
6312 "ld $4, 8(%1)\n\t" \
6313 "ld $25, 0(%1)\n\t" \
6314 VALGRIND_CALL_NOREDIR_T9 \
6315 "move %0, $2\n" \
6316 : "=r" (_res) \
6317 : "r" (&_argvec[0]) \
6318 : "memory", __CALLER_SAVED_REGS \
6319 ); \
6320 lval = (__typeof__(lval)) (long)_res; \
6321 } while (0)
6322
6323 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6324 do { \
6325 volatile OrigFn _orig = (orig); \
6326 volatile unsigned long long _argvec[3]; \
6327 volatile unsigned long long _res; \
6328 _argvec[0] = _orig.nraddr; \
6329 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6330 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6331 __asm__ volatile( \
6332 "ld $4, 8(%1)\n\t" \
6333 "ld $5, 16(%1)\n\t" \
6334 "ld $25, 0(%1)\n\t" \
6335 VALGRIND_CALL_NOREDIR_T9 \
6336 "move %0, $2\n" \
6337 : "=r" (_res) \
6338 : "r" (&_argvec[0]) \
6339 : "memory", __CALLER_SAVED_REGS \
6340 ); \
6341 lval = (__typeof__(lval)) (long)_res; \
6342 } while (0)
6343
6344
6345 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6346 do { \
6347 volatile OrigFn _orig = (orig); \
6348 volatile unsigned long long _argvec[4]; \
6349 volatile unsigned long long _res; \
6350 _argvec[0] = _orig.nraddr; \
6351 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6352 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6353 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6354 __asm__ volatile( \
6355 "ld $4, 8(%1)\n\t" \
6356 "ld $5, 16(%1)\n\t" \
6357 "ld $6, 24(%1)\n\t" \
6358 "ld $25, 0(%1)\n\t" \
6359 VALGRIND_CALL_NOREDIR_T9 \
6360 "move %0, $2\n" \
6361 : "=r" (_res) \
6362 : "r" (&_argvec[0]) \
6363 : "memory", __CALLER_SAVED_REGS \
6364 ); \
6365 lval = (__typeof__(lval)) (long)_res; \
6366 } while (0)
6367
6368 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6369 do { \
6370 volatile OrigFn _orig = (orig); \
6371 volatile unsigned long long _argvec[5]; \
6372 volatile unsigned long long _res; \
6373 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6374 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6375 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6376 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6377 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6378 __asm__ volatile( \
6379 "ld $4, 8(%1)\n\t" \
6380 "ld $5, 16(%1)\n\t" \
6381 "ld $6, 24(%1)\n\t" \
6382 "ld $7, 32(%1)\n\t" \
6383 "ld $25, 0(%1)\n\t" \
6384 VALGRIND_CALL_NOREDIR_T9 \
6385 "move %0, $2\n" \
6386 : "=r" (_res) \
6387 : "r" (&_argvec[0]) \
6388 : "memory", __CALLER_SAVED_REGS \
6389 ); \
6390 lval = (__typeof__(lval)) (long)_res; \
6391 } while (0)
6392
6393 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6394 do { \
6395 volatile OrigFn _orig = (orig); \
6396 volatile unsigned long long _argvec[6]; \
6397 volatile unsigned long long _res; \
6398 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6399 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6400 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6401 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6402 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6403 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6404 __asm__ volatile( \
6405 "ld $4, 8(%1)\n\t" \
6406 "ld $5, 16(%1)\n\t" \
6407 "ld $6, 24(%1)\n\t" \
6408 "ld $7, 32(%1)\n\t" \
6409 "ld $8, 40(%1)\n\t" \
6410 "ld $25, 0(%1)\n\t" \
6411 VALGRIND_CALL_NOREDIR_T9 \
6412 "move %0, $2\n" \
6413 : "=r" (_res) \
6414 : "r" (&_argvec[0]) \
6415 : "memory", __CALLER_SAVED_REGS \
6416 ); \
6417 lval = (__typeof__(lval)) (long)_res; \
6418 } while (0)
6419
6420 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6421 do { \
6422 volatile OrigFn _orig = (orig); \
6423 volatile unsigned long long _argvec[7]; \
6424 volatile unsigned long long _res; \
6425 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6426 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6427 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6428 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6429 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6430 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6431 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6432 __asm__ volatile( \
6433 "ld $4, 8(%1)\n\t" \
6434 "ld $5, 16(%1)\n\t" \
6435 "ld $6, 24(%1)\n\t" \
6436 "ld $7, 32(%1)\n\t" \
6437 "ld $8, 40(%1)\n\t" \
6438 "ld $9, 48(%1)\n\t" \
6439 "ld $25, 0(%1)\n\t" \
6440 VALGRIND_CALL_NOREDIR_T9 \
6441 "move %0, $2\n" \
6442 : "=r" (_res) \
6443 : "r" (&_argvec[0]) \
6444 : "memory", __CALLER_SAVED_REGS \
6445 ); \
6446 lval = (__typeof__(lval)) (long)_res; \
6447 } while (0)
6448
6449 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6450 arg7) \
6451 do { \
6452 volatile OrigFn _orig = (orig); \
6453 volatile unsigned long long _argvec[8]; \
6454 volatile unsigned long long _res; \
6455 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6456 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6457 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6458 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6459 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6460 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6461 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6462 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6463 __asm__ volatile( \
6464 "ld $4, 8(%1)\n\t" \
6465 "ld $5, 16(%1)\n\t" \
6466 "ld $6, 24(%1)\n\t" \
6467 "ld $7, 32(%1)\n\t" \
6468 "ld $8, 40(%1)\n\t" \
6469 "ld $9, 48(%1)\n\t" \
6470 "ld $10, 56(%1)\n\t" \
6471 "ld $25, 0(%1) \n\t" \
6472 VALGRIND_CALL_NOREDIR_T9 \
6473 "move %0, $2\n" \
6474 : "=r" (_res) \
6475 : "r" (&_argvec[0]) \
6476 : "memory", __CALLER_SAVED_REGS \
6477 ); \
6478 lval = (__typeof__(lval)) (long)_res; \
6479 } while (0)
6480
6481 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6482 arg7,arg8) \
6483 do { \
6484 volatile OrigFn _orig = (orig); \
6485 volatile unsigned long long _argvec[9]; \
6486 volatile unsigned long long _res; \
6487 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6488 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6489 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6490 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6491 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6492 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6493 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6494 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6495 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6496 __asm__ volatile( \
6497 "ld $4, 8(%1)\n\t" \
6498 "ld $5, 16(%1)\n\t" \
6499 "ld $6, 24(%1)\n\t" \
6500 "ld $7, 32(%1)\n\t" \
6501 "ld $8, 40(%1)\n\t" \
6502 "ld $9, 48(%1)\n\t" \
6503 "ld $10, 56(%1)\n\t" \
6504 "ld $11, 64(%1)\n\t" \
6505 "ld $25, 0(%1) \n\t" \
6506 VALGRIND_CALL_NOREDIR_T9 \
6507 "move %0, $2\n" \
6508 : "=r" (_res) \
6509 : "r" (&_argvec[0]) \
6510 : "memory", __CALLER_SAVED_REGS \
6511 ); \
6512 lval = (__typeof__(lval)) (long)_res; \
6513 } while (0)
6514
6515 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6516 arg7,arg8,arg9) \
6517 do { \
6518 volatile OrigFn _orig = (orig); \
6519 volatile unsigned long long _argvec[10]; \
6520 volatile unsigned long long _res; \
6521 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6522 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6523 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6524 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6525 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6526 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6527 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6528 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6529 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6530 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6531 __asm__ volatile( \
6532 "dsubu $29, $29, 8\n\t" \
6533 "ld $4, 72(%1)\n\t" \
6534 "sd $4, 0($29)\n\t" \
6535 "ld $4, 8(%1)\n\t" \
6536 "ld $5, 16(%1)\n\t" \
6537 "ld $6, 24(%1)\n\t" \
6538 "ld $7, 32(%1)\n\t" \
6539 "ld $8, 40(%1)\n\t" \
6540 "ld $9, 48(%1)\n\t" \
6541 "ld $10, 56(%1)\n\t" \
6542 "ld $11, 64(%1)\n\t" \
6543 "ld $25, 0(%1)\n\t" \
6544 VALGRIND_CALL_NOREDIR_T9 \
6545 "daddu $29, $29, 8\n\t" \
6546 "move %0, $2\n" \
6547 : "=r" (_res) \
6548 : "r" (&_argvec[0]) \
6549 : "memory", __CALLER_SAVED_REGS \
6550 ); \
6551 lval = (__typeof__(lval)) (long)_res; \
6552 } while (0)
6553
6554 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6555 arg7,arg8,arg9,arg10) \
6556 do { \
6557 volatile OrigFn _orig = (orig); \
6558 volatile unsigned long long _argvec[11]; \
6559 volatile unsigned long long _res; \
6560 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6561 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6562 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6563 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6564 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6565 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6566 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6567 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6568 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6569 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6570 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6571 __asm__ volatile( \
6572 "dsubu $29, $29, 16\n\t" \
6573 "ld $4, 72(%1)\n\t" \
6574 "sd $4, 0($29)\n\t" \
6575 "ld $4, 80(%1)\n\t" \
6576 "sd $4, 8($29)\n\t" \
6577 "ld $4, 8(%1)\n\t" \
6578 "ld $5, 16(%1)\n\t" \
6579 "ld $6, 24(%1)\n\t" \
6580 "ld $7, 32(%1)\n\t" \
6581 "ld $8, 40(%1)\n\t" \
6582 "ld $9, 48(%1)\n\t" \
6583 "ld $10, 56(%1)\n\t" \
6584 "ld $11, 64(%1)\n\t" \
6585 "ld $25, 0(%1)\n\t" \
6586 VALGRIND_CALL_NOREDIR_T9 \
6587 "daddu $29, $29, 16\n\t" \
6588 "move %0, $2\n" \
6589 : "=r" (_res) \
6590 : "r" (&_argvec[0]) \
6591 : "memory", __CALLER_SAVED_REGS \
6592 ); \
6593 lval = (__typeof__(lval)) (long)_res; \
6594 } while (0)
6595
6596 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6597 arg6,arg7,arg8,arg9,arg10, \
6598 arg11) \
6599 do { \
6600 volatile OrigFn _orig = (orig); \
6601 volatile unsigned long long _argvec[12]; \
6602 volatile unsigned long long _res; \
6603 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6604 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6605 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6606 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6607 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6608 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6609 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6610 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6611 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6612 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6613 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6614 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6615 __asm__ volatile( \
6616 "dsubu $29, $29, 24\n\t" \
6617 "ld $4, 72(%1)\n\t" \
6618 "sd $4, 0($29)\n\t" \
6619 "ld $4, 80(%1)\n\t" \
6620 "sd $4, 8($29)\n\t" \
6621 "ld $4, 88(%1)\n\t" \
6622 "sd $4, 16($29)\n\t" \
6623 "ld $4, 8(%1)\n\t" \
6624 "ld $5, 16(%1)\n\t" \
6625 "ld $6, 24(%1)\n\t" \
6626 "ld $7, 32(%1)\n\t" \
6627 "ld $8, 40(%1)\n\t" \
6628 "ld $9, 48(%1)\n\t" \
6629 "ld $10, 56(%1)\n\t" \
6630 "ld $11, 64(%1)\n\t" \
6631 "ld $25, 0(%1)\n\t" \
6632 VALGRIND_CALL_NOREDIR_T9 \
6633 "daddu $29, $29, 24\n\t" \
6634 "move %0, $2\n" \
6635 : "=r" (_res) \
6636 : "r" (&_argvec[0]) \
6637 : "memory", __CALLER_SAVED_REGS \
6638 ); \
6639 lval = (__typeof__(lval)) (long)_res; \
6640 } while (0)
6641
6642 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6643 arg6,arg7,arg8,arg9,arg10, \
6644 arg11,arg12) \
6645 do { \
6646 volatile OrigFn _orig = (orig); \
6647 volatile unsigned long long _argvec[13]; \
6648 volatile unsigned long long _res; \
6649 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6650 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6651 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6652 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6653 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6654 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6655 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6656 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6657 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6658 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6659 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6660 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6661 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6662 __asm__ volatile( \
6663 "dsubu $29, $29, 32\n\t" \
6664 "ld $4, 72(%1)\n\t" \
6665 "sd $4, 0($29)\n\t" \
6666 "ld $4, 80(%1)\n\t" \
6667 "sd $4, 8($29)\n\t" \
6668 "ld $4, 88(%1)\n\t" \
6669 "sd $4, 16($29)\n\t" \
6670 "ld $4, 96(%1)\n\t" \
6671 "sd $4, 24($29)\n\t" \
6672 "ld $4, 8(%1)\n\t" \
6673 "ld $5, 16(%1)\n\t" \
6674 "ld $6, 24(%1)\n\t" \
6675 "ld $7, 32(%1)\n\t" \
6676 "ld $8, 40(%1)\n\t" \
6677 "ld $9, 48(%1)\n\t" \
6678 "ld $10, 56(%1)\n\t" \
6679 "ld $11, 64(%1)\n\t" \
6680 "ld $25, 0(%1)\n\t" \
6681 VALGRIND_CALL_NOREDIR_T9 \
6682 "daddu $29, $29, 32\n\t" \
6683 "move %0, $2\n" \
6684 : "=r" (_res) \
6685 : "r" (&_argvec[0]) \
6686 : "memory", __CALLER_SAVED_REGS \
6687 ); \
6688 lval = (__typeof__(lval)) (long)_res; \
6689 } while (0)
6690
6691 #endif
6692
6693
6694
6695 #if defined(PLAT_riscv64_linux)
6696
6697
6698 #define __CALLER_SAVED_REGS \
6699 "ra", \
6700 "t0", "t1", "t2", "t3", "t4", "t5", "t6", \
6701 "a0", "a1", "a2", "a3", "a4", "a5", "a6", "a7", \
6702 "ft0", "ft1", "ft2", "ft3", "ft4", "ft5", "ft6", "ft7", \
6703 "ft8", "ft9", "ft10", "ft11", \
6704 "fa0", "fa1", "fa2", "fa3", "fa4", "fa5", "fa6", "fa7"
6705
6706
6707
6708 #define VALGRIND_ALIGN_STACK \
6709 "mv s11, sp\n\t" \
6710 "andi sp, sp, 0xfffffffffffffff0\n\t"
6711 #define VALGRIND_RESTORE_STACK \
6712 "mv sp, s11\n\t"
6713
6714
6715
6716
6717 #define CALL_FN_W_v(lval, orig) \
6718 do { \
6719 volatile OrigFn _orig = (orig); \
6720 volatile unsigned long _argvec[1]; \
6721 volatile unsigned long _res; \
6722 _argvec[0] = (unsigned long)_orig.nraddr; \
6723 __asm__ volatile( \
6724 VALGRIND_ALIGN_STACK \
6725 "ld t0, 0(%1) \n\t" \
6726 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6727 VALGRIND_RESTORE_STACK \
6728 "mv %0, a0\n" \
6729 : "=r" (_res) \
6730 : "0" (&_argvec[0]) \
6731 : "memory", __CALLER_SAVED_REGS, "s11" \
6732 ); \
6733 lval = (__typeof__(lval)) _res; \
6734 } while (0)
6735
6736 #define CALL_FN_W_W(lval, orig, arg1) \
6737 do { \
6738 volatile OrigFn _orig = (orig); \
6739 volatile unsigned long _argvec[2]; \
6740 volatile unsigned long _res; \
6741 _argvec[0] = (unsigned long)_orig.nraddr; \
6742 _argvec[1] = (unsigned long)(arg1); \
6743 __asm__ volatile( \
6744 VALGRIND_ALIGN_STACK \
6745 "ld a0, 8(%1) \n\t" \
6746 "ld t0, 0(%1) \n\t" \
6747 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6748 VALGRIND_RESTORE_STACK \
6749 "mv %0, a0\n" \
6750 : "=r" (_res) \
6751 : "0" (&_argvec[0]) \
6752 : "memory", __CALLER_SAVED_REGS, "s11" \
6753 ); \
6754 lval = (__typeof__(lval)) _res; \
6755 } while (0)
6756
6757 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6758 do { \
6759 volatile OrigFn _orig = (orig); \
6760 volatile unsigned long _argvec[3]; \
6761 volatile unsigned long _res; \
6762 _argvec[0] = (unsigned long)_orig.nraddr; \
6763 _argvec[1] = (unsigned long)(arg1); \
6764 _argvec[2] = (unsigned long)(arg2); \
6765 __asm__ volatile( \
6766 VALGRIND_ALIGN_STACK \
6767 "ld a0, 8(%1) \n\t" \
6768 "ld a1, 16(%1) \n\t" \
6769 "ld t0, 0(%1) \n\t" \
6770 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6771 VALGRIND_RESTORE_STACK \
6772 "mv %0, a0\n" \
6773 : "=r" (_res) \
6774 : "0" (&_argvec[0]) \
6775 : "memory", __CALLER_SAVED_REGS, "s11" \
6776 ); \
6777 lval = (__typeof__(lval)) _res; \
6778 } while (0)
6779
6780 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6781 do { \
6782 volatile OrigFn _orig = (orig); \
6783 volatile unsigned long _argvec[4]; \
6784 volatile unsigned long _res; \
6785 _argvec[0] = (unsigned long)_orig.nraddr; \
6786 _argvec[1] = (unsigned long)(arg1); \
6787 _argvec[2] = (unsigned long)(arg2); \
6788 _argvec[3] = (unsigned long)(arg3); \
6789 __asm__ volatile( \
6790 VALGRIND_ALIGN_STACK \
6791 "ld a0, 8(%1) \n\t" \
6792 "ld a1, 16(%1) \n\t" \
6793 "ld a2, 24(%1) \n\t" \
6794 "ld t0, 0(%1) \n\t" \
6795 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6796 VALGRIND_RESTORE_STACK \
6797 "mv %0, a0\n" \
6798 : "=r" (_res) \
6799 : "0" (&_argvec[0]) \
6800 : "memory", __CALLER_SAVED_REGS, "s11" \
6801 ); \
6802 lval = (__typeof__(lval)) _res; \
6803 } while (0)
6804
6805 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6806 do { \
6807 volatile OrigFn _orig = (orig); \
6808 volatile unsigned long _argvec[5]; \
6809 volatile unsigned long _res; \
6810 _argvec[0] = (unsigned long)_orig.nraddr; \
6811 _argvec[1] = (unsigned long)(arg1); \
6812 _argvec[2] = (unsigned long)(arg2); \
6813 _argvec[3] = (unsigned long)(arg3); \
6814 _argvec[4] = (unsigned long)(arg4); \
6815 __asm__ volatile( \
6816 VALGRIND_ALIGN_STACK \
6817 "ld a0, 8(%1) \n\t" \
6818 "ld a1, 16(%1) \n\t" \
6819 "ld a2, 24(%1) \n\t" \
6820 "ld a3, 32(%1) \n\t" \
6821 "ld t0, 0(%1) \n\t" \
6822 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6823 VALGRIND_RESTORE_STACK \
6824 "mv %0, a0" \
6825 : "=r" (_res) \
6826 : "0" (&_argvec[0]) \
6827 : "memory", __CALLER_SAVED_REGS, "s11" \
6828 ); \
6829 lval = (__typeof__(lval)) _res; \
6830 } while (0)
6831
6832 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6833 do { \
6834 volatile OrigFn _orig = (orig); \
6835 volatile unsigned long _argvec[6]; \
6836 volatile unsigned long _res; \
6837 _argvec[0] = (unsigned long)_orig.nraddr; \
6838 _argvec[1] = (unsigned long)(arg1); \
6839 _argvec[2] = (unsigned long)(arg2); \
6840 _argvec[3] = (unsigned long)(arg3); \
6841 _argvec[4] = (unsigned long)(arg4); \
6842 _argvec[5] = (unsigned long)(arg5); \
6843 __asm__ volatile( \
6844 VALGRIND_ALIGN_STACK \
6845 "ld a0, 8(%1) \n\t" \
6846 "ld a1, 16(%1) \n\t" \
6847 "ld a2, 24(%1) \n\t" \
6848 "ld a3, 32(%1) \n\t" \
6849 "ld a4, 40(%1) \n\t" \
6850 "ld t0, 0(%1) \n\t" \
6851 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6852 VALGRIND_RESTORE_STACK \
6853 "mv %0, a0" \
6854 : "=r" (_res) \
6855 : "0" (&_argvec[0]) \
6856 : "memory", __CALLER_SAVED_REGS, "s11" \
6857 ); \
6858 lval = (__typeof__(lval)) _res; \
6859 } while (0)
6860
6861 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6862 do { \
6863 volatile OrigFn _orig = (orig); \
6864 volatile unsigned long _argvec[7]; \
6865 volatile unsigned long _res; \
6866 _argvec[0] = (unsigned long)_orig.nraddr; \
6867 _argvec[1] = (unsigned long)(arg1); \
6868 _argvec[2] = (unsigned long)(arg2); \
6869 _argvec[3] = (unsigned long)(arg3); \
6870 _argvec[4] = (unsigned long)(arg4); \
6871 _argvec[5] = (unsigned long)(arg5); \
6872 _argvec[6] = (unsigned long)(arg6); \
6873 __asm__ volatile( \
6874 VALGRIND_ALIGN_STACK \
6875 "ld a0, 8(%1) \n\t" \
6876 "ld a1, 16(%1) \n\t" \
6877 "ld a2, 24(%1) \n\t" \
6878 "ld a3, 32(%1) \n\t" \
6879 "ld a4, 40(%1) \n\t" \
6880 "ld a5, 48(%1) \n\t" \
6881 "ld t0, 0(%1) \n\t" \
6882 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6883 VALGRIND_RESTORE_STACK \
6884 "mv %0, a0" \
6885 : "=r" (_res) \
6886 : "0" (&_argvec[0]) \
6887 : "memory", __CALLER_SAVED_REGS, "s11" \
6888 ); \
6889 lval = (__typeof__(lval)) _res; \
6890 } while (0)
6891
6892 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6893 arg7) \
6894 do { \
6895 volatile OrigFn _orig = (orig); \
6896 volatile unsigned long _argvec[8]; \
6897 volatile unsigned long _res; \
6898 _argvec[0] = (unsigned long)_orig.nraddr; \
6899 _argvec[1] = (unsigned long)(arg1); \
6900 _argvec[2] = (unsigned long)(arg2); \
6901 _argvec[3] = (unsigned long)(arg3); \
6902 _argvec[4] = (unsigned long)(arg4); \
6903 _argvec[5] = (unsigned long)(arg5); \
6904 _argvec[6] = (unsigned long)(arg6); \
6905 _argvec[7] = (unsigned long)(arg7); \
6906 __asm__ volatile( \
6907 VALGRIND_ALIGN_STACK \
6908 "ld a0, 8(%1) \n\t" \
6909 "ld a1, 16(%1) \n\t" \
6910 "ld a2, 24(%1) \n\t" \
6911 "ld a3, 32(%1) \n\t" \
6912 "ld a4, 40(%1) \n\t" \
6913 "ld a5, 48(%1) \n\t" \
6914 "ld a6, 56(%1) \n\t" \
6915 "ld t0, 0(%1) \n\t" \
6916 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6917 VALGRIND_RESTORE_STACK \
6918 "mv %0, a0" \
6919 : "=r" (_res) \
6920 : "0" (&_argvec[0]) \
6921 : "memory", __CALLER_SAVED_REGS, "s11" \
6922 ); \
6923 lval = (__typeof__(lval)) _res; \
6924 } while (0)
6925
6926 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6927 arg7,arg8) \
6928 do { \
6929 volatile OrigFn _orig = (orig); \
6930 volatile unsigned long _argvec[9]; \
6931 volatile unsigned long _res; \
6932 _argvec[0] = (unsigned long)_orig.nraddr; \
6933 _argvec[1] = (unsigned long)(arg1); \
6934 _argvec[2] = (unsigned long)(arg2); \
6935 _argvec[3] = (unsigned long)(arg3); \
6936 _argvec[4] = (unsigned long)(arg4); \
6937 _argvec[5] = (unsigned long)(arg5); \
6938 _argvec[6] = (unsigned long)(arg6); \
6939 _argvec[7] = (unsigned long)(arg7); \
6940 _argvec[8] = (unsigned long)(arg8); \
6941 __asm__ volatile( \
6942 VALGRIND_ALIGN_STACK \
6943 "ld a0, 8(%1) \n\t" \
6944 "ld a1, 16(%1) \n\t" \
6945 "ld a2, 24(%1) \n\t" \
6946 "ld a3, 32(%1) \n\t" \
6947 "ld a4, 40(%1) \n\t" \
6948 "ld a5, 48(%1) \n\t" \
6949 "ld a6, 56(%1) \n\t" \
6950 "ld a7, 64(%1) \n\t" \
6951 "ld t0, 0(%1) \n\t" \
6952 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6953 VALGRIND_RESTORE_STACK \
6954 "mv %0, a0" \
6955 : "=r" (_res) \
6956 : "0" (&_argvec[0]) \
6957 : "memory", __CALLER_SAVED_REGS, "s11" \
6958 ); \
6959 lval = (__typeof__(lval)) _res; \
6960 } while (0)
6961
6962 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6963 arg7,arg8,arg9) \
6964 do { \
6965 volatile OrigFn _orig = (orig); \
6966 volatile unsigned long _argvec[10]; \
6967 volatile unsigned long _res; \
6968 _argvec[0] = (unsigned long)_orig.nraddr; \
6969 _argvec[1] = (unsigned long)(arg1); \
6970 _argvec[2] = (unsigned long)(arg2); \
6971 _argvec[3] = (unsigned long)(arg3); \
6972 _argvec[4] = (unsigned long)(arg4); \
6973 _argvec[5] = (unsigned long)(arg5); \
6974 _argvec[6] = (unsigned long)(arg6); \
6975 _argvec[7] = (unsigned long)(arg7); \
6976 _argvec[8] = (unsigned long)(arg8); \
6977 _argvec[9] = (unsigned long)(arg9); \
6978 __asm__ volatile( \
6979 VALGRIND_ALIGN_STACK \
6980 "addi sp, sp, -16 \n\t" \
6981 "ld a0, 8(%1) \n\t" \
6982 "ld a1, 16(%1) \n\t" \
6983 "ld a2, 24(%1) \n\t" \
6984 "ld a3, 32(%1) \n\t" \
6985 "ld a4, 40(%1) \n\t" \
6986 "ld a5, 48(%1) \n\t" \
6987 "ld a6, 56(%1) \n\t" \
6988 "ld a7, 64(%1) \n\t" \
6989 "ld t0, 72(%1) \n\t" \
6990 "sd t0, 0(sp) \n\t" \
6991 "ld t0, 0(%1) \n\t" \
6992 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6993 VALGRIND_RESTORE_STACK \
6994 "mv %0, a0" \
6995 : "=r" (_res) \
6996 : "0" (&_argvec[0]) \
6997 : "memory", __CALLER_SAVED_REGS, "s11" \
6998 ); \
6999 lval = (__typeof__(lval)) _res; \
7000 } while (0)
7001
7002 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
7003 arg7,arg8,arg9,arg10) \
7004 do { \
7005 volatile OrigFn _orig = (orig); \
7006 volatile unsigned long _argvec[11]; \
7007 volatile unsigned long _res; \
7008 _argvec[0] = (unsigned long)_orig.nraddr; \
7009 _argvec[1] = (unsigned long)(arg1); \
7010 _argvec[2] = (unsigned long)(arg2); \
7011 _argvec[3] = (unsigned long)(arg3); \
7012 _argvec[4] = (unsigned long)(arg4); \
7013 _argvec[5] = (unsigned long)(arg5); \
7014 _argvec[6] = (unsigned long)(arg6); \
7015 _argvec[7] = (unsigned long)(arg7); \
7016 _argvec[8] = (unsigned long)(arg8); \
7017 _argvec[9] = (unsigned long)(arg9); \
7018 _argvec[10] = (unsigned long)(arg10); \
7019 __asm__ volatile( \
7020 VALGRIND_ALIGN_STACK \
7021 "addi sp, sp, -16 \n\t" \
7022 "ld a0, 8(%1) \n\t" \
7023 "ld a1, 16(%1) \n\t" \
7024 "ld a2, 24(%1) \n\t" \
7025 "ld a3, 32(%1) \n\t" \
7026 "ld a4, 40(%1) \n\t" \
7027 "ld a5, 48(%1) \n\t" \
7028 "ld a6, 56(%1) \n\t" \
7029 "ld a7, 64(%1) \n\t" \
7030 "ld t0, 72(%1) \n\t" \
7031 "sd t0, 0(sp) \n\t" \
7032 "ld t0, 80(%1) \n\t" \
7033 "sd t0, 8(sp) \n\t" \
7034 "ld t0, 0(%1) \n\t" \
7035 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
7036 VALGRIND_RESTORE_STACK \
7037 "mv %0, a0" \
7038 : "=r" (_res) \
7039 : "0" (&_argvec[0]) \
7040 : "memory", __CALLER_SAVED_REGS, "s11" \
7041 ); \
7042 lval = (__typeof__(lval)) _res; \
7043 } while (0)
7044
7045 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
7046 arg7,arg8,arg9,arg10,arg11) \
7047 do { \
7048 volatile OrigFn _orig = (orig); \
7049 volatile unsigned long _argvec[12]; \
7050 volatile unsigned long _res; \
7051 _argvec[0] = (unsigned long)_orig.nraddr; \
7052 _argvec[1] = (unsigned long)(arg1); \
7053 _argvec[2] = (unsigned long)(arg2); \
7054 _argvec[3] = (unsigned long)(arg3); \
7055 _argvec[4] = (unsigned long)(arg4); \
7056 _argvec[5] = (unsigned long)(arg5); \
7057 _argvec[6] = (unsigned long)(arg6); \
7058 _argvec[7] = (unsigned long)(arg7); \
7059 _argvec[8] = (unsigned long)(arg8); \
7060 _argvec[9] = (unsigned long)(arg9); \
7061 _argvec[10] = (unsigned long)(arg10); \
7062 _argvec[11] = (unsigned long)(arg11); \
7063 __asm__ volatile( \
7064 VALGRIND_ALIGN_STACK \
7065 "addi sp, sp, -32 \n\t" \
7066 "ld a0, 8(%1) \n\t" \
7067 "ld a1, 16(%1) \n\t" \
7068 "ld a2, 24(%1) \n\t" \
7069 "ld a3, 32(%1) \n\t" \
7070 "ld a4, 40(%1) \n\t" \
7071 "ld a5, 48(%1) \n\t" \
7072 "ld a6, 56(%1) \n\t" \
7073 "ld a7, 64(%1) \n\t" \
7074 "ld t0, 72(%1) \n\t" \
7075 "sd t0, 0(sp) \n\t" \
7076 "ld t0, 80(%1) \n\t" \
7077 "sd t0, 8(sp) \n\t" \
7078 "ld t0, 88(%1) \n\t" \
7079 "sd t0, 16(sp) \n\t" \
7080 "ld t0, 0(%1) \n\t" \
7081 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
7082 VALGRIND_RESTORE_STACK \
7083 "mv %0, a0" \
7084 : "=r" (_res) \
7085 : "0" (&_argvec[0]) \
7086 : "memory", __CALLER_SAVED_REGS, "s11" \
7087 ); \
7088 lval = (__typeof__(lval)) _res; \
7089 } while (0)
7090
7091 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
7092 arg7,arg8,arg9,arg10,arg11, \
7093 arg12) \
7094 do { \
7095 volatile OrigFn _orig = (orig); \
7096 volatile unsigned long _argvec[13]; \
7097 volatile unsigned long _res; \
7098 _argvec[0] = (unsigned long)_orig.nraddr; \
7099 _argvec[1] = (unsigned long)(arg1); \
7100 _argvec[2] = (unsigned long)(arg2); \
7101 _argvec[3] = (unsigned long)(arg3); \
7102 _argvec[4] = (unsigned long)(arg4); \
7103 _argvec[5] = (unsigned long)(arg5); \
7104 _argvec[6] = (unsigned long)(arg6); \
7105 _argvec[7] = (unsigned long)(arg7); \
7106 _argvec[8] = (unsigned long)(arg8); \
7107 _argvec[9] = (unsigned long)(arg9); \
7108 _argvec[10] = (unsigned long)(arg10); \
7109 _argvec[11] = (unsigned long)(arg11); \
7110 _argvec[12] = (unsigned long)(arg12); \
7111 __asm__ volatile( \
7112 VALGRIND_ALIGN_STACK \
7113 "addi sp, sp, -32 \n\t" \
7114 "ld a0, 8(%1) \n\t" \
7115 "ld a1, 16(%1) \n\t" \
7116 "ld a2, 24(%1) \n\t" \
7117 "ld a3, 32(%1) \n\t" \
7118 "ld a4, 40(%1) \n\t" \
7119 "ld a5, 48(%1) \n\t" \
7120 "ld a6, 56(%1) \n\t" \
7121 "ld a7, 64(%1) \n\t" \
7122 "ld t0, 72(%1) \n\t" \
7123 "sd t0, 0(sp) \n\t" \
7124 "ld t0, 80(%1) \n\t" \
7125 "sd t0, 8(sp) \n\t" \
7126 "ld t0, 88(%1) \n\t" \
7127 "sd t0, 16(sp) \n\t" \
7128 "ld t0, 96(%1) \n\t" \
7129 "sd t0, 24(sp) \n\t" \
7130 "ld t0, 0(%1) \n\t" \
7131 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
7132 VALGRIND_RESTORE_STACK \
7133 "mv %0, a0" \
7134 : "=r" (_res) \
7135 : "0" (&_argvec[0]) \
7136 : "memory", __CALLER_SAVED_REGS, "s11" \
7137 ); \
7138 lval = (__typeof__(lval)) _res; \
7139 } while (0)
7140
7141 #endif
7142
7143
7144
7145
7146
7147
7148
7149
7150
7151
7152
7153
7154
7155
7156
7157
7158 #define VG_USERREQ_TOOL_BASE(a,b) \
7159 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
7160 #define VG_IS_TOOL_USERREQ(a, b, v) \
7161 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
7162
7163
7164
7165
7166
7167
7168 typedef
7169 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
7170 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
7171
7172
7173
7174
7175
7176
7177 VG_USERREQ__CLIENT_CALL0 = 0x1101,
7178 VG_USERREQ__CLIENT_CALL1 = 0x1102,
7179 VG_USERREQ__CLIENT_CALL2 = 0x1103,
7180 VG_USERREQ__CLIENT_CALL3 = 0x1104,
7181
7182
7183
7184
7185 VG_USERREQ__COUNT_ERRORS = 0x1201,
7186
7187
7188
7189 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
7190
7191
7192
7193 VG_USERREQ__CLO_CHANGE = 0x1203,
7194
7195
7196
7197 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
7198 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
7199 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
7200
7201 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
7202 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
7203 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
7204 VG_USERREQ__MEMPOOL_FREE = 0x1306,
7205 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
7206 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
7207 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
7208 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
7209
7210
7211
7212
7213
7214
7215
7216
7217 VG_USERREQ__PRINTF = 0x1401,
7218 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
7219
7220 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
7221 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
7222
7223
7224 VG_USERREQ__STACK_REGISTER = 0x1501,
7225 VG_USERREQ__STACK_DEREGISTER = 0x1502,
7226 VG_USERREQ__STACK_CHANGE = 0x1503,
7227
7228
7229 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
7230
7231
7232 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
7233
7234
7235
7236
7237
7238
7239 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
7240
7241
7242
7243
7244 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
7245
7246
7247 VG_USERREQ__INNER_THREADS = 0x1902
7248 } Vg_ClientRequest;
7249
7250 #if !defined(__GNUC__)
7251 # define __extension__
7252 #endif
7253
7254
7255
7256
7257
7258
7259 #define RUNNING_ON_VALGRIND \
7260 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
7261 VG_USERREQ__RUNNING_ON_VALGRIND, \
7262 0, 0, 0, 0, 0) \
7263
7264
7265
7266
7267
7268
7269 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
7270 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
7271 _qzz_addr, _qzz_len, 0, 0, 0)
7272
7273 #define VALGRIND_INNER_THREADS(_qzz_addr) \
7274 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
7275 _qzz_addr, 0, 0, 0, 0)
7276
7277
7278
7279
7280
7281
7282
7283 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
7284
7285
7286 static int VALGRIND_PRINTF(const char *format, ...)
7287 __attribute__((format(__printf__, 1, 2), __unused__));
7288 #endif
7289 static int
7290 #if defined(_MSC_VER)
7291 __inline
7292 #endif
7293 VALGRIND_PRINTF(const char *format, ...)
7294 {
7295 #if defined(NVALGRIND)
7296 (void)format;
7297 return 0;
7298 #else
7299 #if defined(_MSC_VER) || defined(__MINGW64__)
7300 uintptr_t _qzz_res;
7301 #else
7302 unsigned long _qzz_res;
7303 #endif
7304 va_list vargs;
7305 va_start(vargs, format);
7306 #if defined(_MSC_VER) || defined(__MINGW64__)
7307 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
7308 VG_USERREQ__PRINTF_VALIST_BY_REF,
7309 (uintptr_t)format,
7310 (uintptr_t)&vargs,
7311 0, 0, 0);
7312 #else
7313 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
7314 VG_USERREQ__PRINTF_VALIST_BY_REF,
7315 (unsigned long)format,
7316 (unsigned long)&vargs,
7317 0, 0, 0);
7318 #endif
7319 va_end(vargs);
7320 return (int)_qzz_res;
7321 #endif
7322 }
7323
7324 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
7325 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
7326 __attribute__((format(__printf__, 1, 2), __unused__));
7327 #endif
7328 static int
7329 #if defined(_MSC_VER)
7330 __inline
7331 #endif
7332 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
7333 {
7334 #if defined(NVALGRIND)
7335 (void)format;
7336 return 0;
7337 #else
7338 #if defined(_MSC_VER) || defined(__MINGW64__)
7339 uintptr_t _qzz_res;
7340 #else
7341 unsigned long _qzz_res;
7342 #endif
7343 va_list vargs;
7344 va_start(vargs, format);
7345 #if defined(_MSC_VER) || defined(__MINGW64__)
7346 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
7347 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
7348 (uintptr_t)format,
7349 (uintptr_t)&vargs,
7350 0, 0, 0);
7351 #else
7352 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
7353 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
7354 (unsigned long)format,
7355 (unsigned long)&vargs,
7356 0, 0, 0);
7357 #endif
7358 va_end(vargs);
7359 return (int)_qzz_res;
7360 #endif
7361 }
7362
7363
7364
7365
7366
7367
7368
7369
7370
7371
7372
7373
7374
7375
7376
7377
7378
7379
7380
7381
7382
7383
7384
7385
7386
7387 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
7388 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
7389 VG_USERREQ__CLIENT_CALL0, \
7390 _qyy_fn, \
7391 0, 0, 0, 0)
7392
7393 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
7394 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
7395 VG_USERREQ__CLIENT_CALL1, \
7396 _qyy_fn, \
7397 _qyy_arg1, 0, 0, 0)
7398
7399 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
7400 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
7401 VG_USERREQ__CLIENT_CALL2, \
7402 _qyy_fn, \
7403 _qyy_arg1, _qyy_arg2, 0, 0)
7404
7405 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
7406 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
7407 VG_USERREQ__CLIENT_CALL3, \
7408 _qyy_fn, \
7409 _qyy_arg1, _qyy_arg2, \
7410 _qyy_arg3, 0)
7411
7412
7413
7414
7415
7416 #define VALGRIND_COUNT_ERRORS \
7417 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
7418 0 , \
7419 VG_USERREQ__COUNT_ERRORS, \
7420 0, 0, 0, 0, 0)
7421
7422
7423
7424
7425
7426
7427
7428
7429
7430
7431
7432
7433
7434
7435
7436
7437
7438
7439
7440
7441
7442
7443
7444
7445
7446
7447
7448
7449
7450
7451
7452
7453
7454
7455
7456
7457
7458
7459
7460
7461
7462
7463
7464
7465
7466
7467
7468
7469
7470
7471
7472
7473
7474
7475
7476
7477
7478
7479
7480
7481
7482
7483
7484
7485
7486
7487
7488
7489
7490
7491
7492
7493
7494
7495
7496
7497
7498
7499
7500
7501
7502
7503
7504
7505
7506
7507
7508
7509
7510
7511
7512
7513
7514
7515
7516
7517
7518
7519
7520
7521
7522
7523 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
7524 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
7525 addr, sizeB, rzB, is_zeroed, 0)
7526
7527
7528
7529
7530 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
7531 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
7532 addr, oldSizeB, newSizeB, rzB, 0)
7533
7534
7535
7536
7537 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
7538 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
7539 addr, rzB, 0, 0, 0)
7540
7541
7542 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
7543 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7544 pool, rzB, is_zeroed, 0, 0)
7545
7546
7547
7548
7549
7550
7551
7552
7553
7554
7555
7556
7557
7558
7559
7560
7561
7562
7563
7564
7565
7566
7567
7568
7569
7570
7571
7572 #define VALGRIND_MEMPOOL_AUTO_FREE 1
7573 #define VALGRIND_MEMPOOL_METAPOOL 2
7574 #define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
7575 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7576 pool, rzB, is_zeroed, flags, 0)
7577
7578
7579 #define VALGRIND_DESTROY_MEMPOOL(pool) \
7580 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7581 pool, 0, 0, 0, 0)
7582
7583
7584 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7585 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7586 pool, addr, size, 0, 0)
7587
7588
7589 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
7590 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7591 pool, addr, 0, 0, 0)
7592
7593
7594 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7595 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7596 pool, addr, size, 0, 0)
7597
7598
7599 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7600 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7601 poolA, poolB, 0, 0, 0)
7602
7603
7604 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7605 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7606 pool, addrA, addrB, size, 0)
7607
7608
7609 #define VALGRIND_MEMPOOL_EXISTS(pool) \
7610 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7611 VG_USERREQ__MEMPOOL_EXISTS, \
7612 pool, 0, 0, 0, 0)
7613
7614
7615
7616
7617 #define VALGRIND_STACK_REGISTER(start, end) \
7618 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7619 VG_USERREQ__STACK_REGISTER, \
7620 start, end, 0, 0, 0)
7621
7622
7623
7624 #define VALGRIND_STACK_DEREGISTER(id) \
7625 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7626 id, 0, 0, 0, 0)
7627
7628
7629
7630
7631 #define VALGRIND_STACK_CHANGE(id, start, end) \
7632 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7633 id, start, end, 0, 0)
7634
7635
7636 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7637 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7638 fd, ptr, total_size, delta, 0)
7639
7640
7641
7642
7643
7644 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7645 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7646 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7647 addr, buf64, 0, 0, 0)
7648
7649
7650
7651
7652
7653
7654
7655
7656
7657 #define VALGRIND_DISABLE_ERROR_REPORTING \
7658 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7659 1, 0, 0, 0, 0)
7660
7661
7662
7663 #define VALGRIND_ENABLE_ERROR_REPORTING \
7664 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7665 -1, 0, 0, 0, 0)
7666
7667
7668
7669
7670
7671
7672 #define VALGRIND_MONITOR_COMMAND(command) \
7673 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7674 command, 0, 0, 0, 0)
7675
7676
7677
7678
7679
7680 #define VALGRIND_CLO_CHANGE(option) \
7681 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7682 option, 0, 0, 0, 0)
7683
7684
7685 #undef PLAT_x86_darwin
7686 #undef PLAT_amd64_darwin
7687 #undef PLAT_x86_win32
7688 #undef PLAT_amd64_win64
7689 #undef PLAT_x86_linux
7690 #undef PLAT_amd64_linux
7691 #undef PLAT_ppc32_linux
7692 #undef PLAT_ppc64be_linux
7693 #undef PLAT_ppc64le_linux
7694 #undef PLAT_arm_linux
7695 #undef PLAT_s390x_linux
7696 #undef PLAT_mips32_linux
7697 #undef PLAT_mips64_linux
7698 #undef PLAT_nanomips_linux
7699 #undef PLAT_riscv64_linux
7700 #undef PLAT_x86_solaris
7701 #undef PLAT_amd64_solaris
7702
7703 #endif