Warning, file /include/valgrind/valgrind.h was not indexed
or was modified since last indexation (in which case cross-reference links may be missing, inaccurate or erroneous).
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073 #ifndef __VALGRIND_H
0074 #define __VALGRIND_H
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091 #define __VALGRIND_MAJOR__ 3
0092 #define __VALGRIND_MINOR__ 24
0093
0094
0095 #include <stdarg.h>
0096
0097
0098
0099
0100
0101
0102
0103
0104
0105
0106
0107
0108
0109
0110
0111 #undef PLAT_x86_darwin
0112 #undef PLAT_amd64_darwin
0113 #undef PLAT_x86_freebsd
0114 #undef PLAT_amd64_freebsd
0115 #undef PLAT_arm64_freebsd
0116 #undef PLAT_x86_win32
0117 #undef PLAT_amd64_win64
0118 #undef PLAT_x86_linux
0119 #undef PLAT_amd64_linux
0120 #undef PLAT_ppc32_linux
0121 #undef PLAT_ppc64be_linux
0122 #undef PLAT_ppc64le_linux
0123 #undef PLAT_arm_linux
0124 #undef PLAT_arm64_linux
0125 #undef PLAT_s390x_linux
0126 #undef PLAT_mips32_linux
0127 #undef PLAT_mips64_linux
0128 #undef PLAT_nanomips_linux
0129 #undef PLAT_x86_solaris
0130 #undef PLAT_amd64_solaris
0131
0132
0133 #if defined(__APPLE__) && defined(__i386__)
0134 # define PLAT_x86_darwin 1
0135 #elif defined(__APPLE__) && defined(__x86_64__)
0136 # define PLAT_amd64_darwin 1
0137 #elif defined(__FreeBSD__) && defined(__i386__)
0138 # define PLAT_x86_freebsd 1
0139 #elif defined(__FreeBSD__) && defined(__amd64__)
0140 # define PLAT_amd64_freebsd 1
0141 #elif defined(__FreeBSD__) && defined(__aarch64__) && !defined(__arm__)
0142 # define PLAT_arm64_freebsd 1
0143 #elif (defined(__MINGW32__) && defined(__i386__)) \
0144 || defined(__CYGWIN32__) \
0145 || (defined(_WIN32) && defined(_M_IX86))
0146 # define PLAT_x86_win32 1
0147 #elif (defined(__MINGW32__) && defined(__x86_64__)) \
0148 || (defined(_WIN32) && defined(_M_X64))
0149
0150 # define PLAT_amd64_win64 1
0151 #elif defined(__linux__) && defined(__i386__)
0152 # define PLAT_x86_linux 1
0153 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
0154 # define PLAT_amd64_linux 1
0155 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
0156 # define PLAT_ppc32_linux 1
0157 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
0158
0159 # define PLAT_ppc64be_linux 1
0160 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
0161
0162 # define PLAT_ppc64le_linux 1
0163 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
0164 # define PLAT_arm_linux 1
0165 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
0166 # define PLAT_arm64_linux 1
0167 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
0168 # define PLAT_s390x_linux 1
0169 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
0170 # define PLAT_mips64_linux 1
0171 #elif defined(__linux__) && defined(__mips__) && (__mips==32)
0172 # define PLAT_mips32_linux 1
0173 #elif defined(__linux__) && defined(__nanomips__)
0174 # define PLAT_nanomips_linux 1
0175 #elif defined(__sun) && defined(__i386__)
0176 # define PLAT_x86_solaris 1
0177 #elif defined(__sun) && defined(__x86_64__)
0178 # define PLAT_amd64_solaris 1
0179 #else
0180
0181
0182 # if !defined(NVALGRIND)
0183 # define NVALGRIND 1
0184 # endif
0185 #endif
0186
0187
0188
0189
0190
0191
0192
0193
0194
0195
0196
0197
0198
0199
0200
0201
0202
0203
0204
0205
0206 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
0207 _zzq_request, _zzq_arg1, _zzq_arg2, \
0208 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0209 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
0210 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
0211 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
0212
0213 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
0214 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0215 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
0216 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
0217 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
0218
0219 #if defined(NVALGRIND)
0220
0221
0222
0223
0224 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0225 _zzq_default, _zzq_request, \
0226 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0227 (_zzq_default)
0228
0229 #else
0230
0231
0232
0233
0234
0235
0236
0237
0238
0239
0240
0241
0242
0243
0244
0245
0246
0247
0248
0249
0250
0251
0252
0253
0254
0255
0256
0257
0258
0259
0260
0261
0262
0263
0264
0265
0266
0267
0268 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
0269 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
0270 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
0271
0272 typedef
0273 struct {
0274 unsigned int nraddr;
0275 }
0276 OrigFn;
0277
0278 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0279 "roll $3, %%edi ; roll $13, %%edi\n\t" \
0280 "roll $29, %%edi ; roll $19, %%edi\n\t"
0281
0282 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0283 _zzq_default, _zzq_request, \
0284 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0285 __extension__ \
0286 ({volatile unsigned int _zzq_args[6]; \
0287 volatile unsigned int _zzq_result; \
0288 _zzq_args[0] = (unsigned int)(_zzq_request); \
0289 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0290 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0291 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0292 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0293 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0294 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0295 \
0296 "xchgl %%ebx,%%ebx" \
0297 : "=d" (_zzq_result) \
0298 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
0299 : "cc", "memory" \
0300 ); \
0301 _zzq_result; \
0302 })
0303
0304 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0305 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0306 volatile unsigned int __addr; \
0307 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0308 \
0309 "xchgl %%ecx,%%ecx" \
0310 : "=a" (__addr) \
0311 : \
0312 : "cc", "memory" \
0313 ); \
0314 _zzq_orig->nraddr = __addr; \
0315 }
0316
0317 #define VALGRIND_CALL_NOREDIR_EAX \
0318 __SPECIAL_INSTRUCTION_PREAMBLE \
0319 \
0320 "xchgl %%edx,%%edx\n\t"
0321
0322 #define VALGRIND_VEX_INJECT_IR() \
0323 do { \
0324 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0325 "xchgl %%edi,%%edi\n\t" \
0326 : : : "cc", "memory" \
0327 ); \
0328 } while (0)
0329
0330 #endif
0331
0332
0333
0334
0335 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
0336
0337 typedef
0338 struct {
0339 unsigned int nraddr;
0340 }
0341 OrigFn;
0342
0343 #if defined(_MSC_VER)
0344
0345 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0346 __asm rol edi, 3 __asm rol edi, 13 \
0347 __asm rol edi, 29 __asm rol edi, 19
0348
0349 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0350 _zzq_default, _zzq_request, \
0351 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0352 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
0353 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
0354 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
0355 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
0356
0357 static __inline uintptr_t
0358 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
0359 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
0360 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
0361 uintptr_t _zzq_arg5)
0362 {
0363 volatile uintptr_t _zzq_args[6];
0364 volatile unsigned int _zzq_result;
0365 _zzq_args[0] = (uintptr_t)(_zzq_request);
0366 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
0367 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
0368 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
0369 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
0370 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
0371 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
0372 __SPECIAL_INSTRUCTION_PREAMBLE
0373
0374 __asm xchg ebx,ebx
0375 __asm mov _zzq_result, edx
0376 }
0377 return _zzq_result;
0378 }
0379
0380 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0381 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0382 volatile unsigned int __addr; \
0383 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
0384 \
0385 __asm xchg ecx,ecx \
0386 __asm mov __addr, eax \
0387 } \
0388 _zzq_orig->nraddr = __addr; \
0389 }
0390
0391 #define VALGRIND_CALL_NOREDIR_EAX ERROR
0392
0393 #define VALGRIND_VEX_INJECT_IR() \
0394 do { \
0395 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
0396 __asm xchg edi,edi \
0397 } \
0398 } while (0)
0399
0400 #else
0401 #error Unsupported compiler.
0402 #endif
0403
0404 #endif
0405
0406
0407
0408 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
0409 || defined(PLAT_amd64_solaris) \
0410 || defined(PLAT_amd64_freebsd) \
0411 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
0412
0413 typedef
0414 struct {
0415 unsigned long int nraddr;
0416 }
0417 OrigFn;
0418
0419 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0420 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
0421 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
0422
0423 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0424 _zzq_default, _zzq_request, \
0425 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0426 __extension__ \
0427 ({ volatile unsigned long int _zzq_args[6]; \
0428 volatile unsigned long int _zzq_result; \
0429 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0430 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0431 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0432 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0433 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0434 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0435 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0436 \
0437 "xchgq %%rbx,%%rbx" \
0438 : "=d" (_zzq_result) \
0439 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
0440 : "cc", "memory" \
0441 ); \
0442 _zzq_result; \
0443 })
0444
0445 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0446 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0447 volatile unsigned long int __addr; \
0448 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0449 \
0450 "xchgq %%rcx,%%rcx" \
0451 : "=a" (__addr) \
0452 : \
0453 : "cc", "memory" \
0454 ); \
0455 _zzq_orig->nraddr = __addr; \
0456 }
0457
0458 #define VALGRIND_CALL_NOREDIR_RAX \
0459 __SPECIAL_INSTRUCTION_PREAMBLE \
0460 \
0461 "xchgq %%rdx,%%rdx\n\t"
0462
0463 #define VALGRIND_VEX_INJECT_IR() \
0464 do { \
0465 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0466 "xchgq %%rdi,%%rdi\n\t" \
0467 : : : "cc", "memory" \
0468 ); \
0469 } while (0)
0470
0471 #endif
0472
0473
0474
0475 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
0476
0477 #error Unsupported compiler.
0478
0479 #endif
0480
0481
0482
0483 #if defined(PLAT_ppc32_linux)
0484
0485 typedef
0486 struct {
0487 unsigned int nraddr;
0488 }
0489 OrigFn;
0490
0491 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0492 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
0493 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
0494
0495 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0496 _zzq_default, _zzq_request, \
0497 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0498 \
0499 __extension__ \
0500 ({ unsigned int _zzq_args[6]; \
0501 unsigned int _zzq_result; \
0502 unsigned int* _zzq_ptr; \
0503 _zzq_args[0] = (unsigned int)(_zzq_request); \
0504 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0505 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0506 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0507 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0508 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0509 _zzq_ptr = _zzq_args; \
0510 __asm__ volatile("mr 3,%1\n\t" \
0511 "mr 4,%2\n\t" \
0512 __SPECIAL_INSTRUCTION_PREAMBLE \
0513 \
0514 "or 1,1,1\n\t" \
0515 "mr %0,3" \
0516 : "=b" (_zzq_result) \
0517 : "b" (_zzq_default), "b" (_zzq_ptr) \
0518 : "cc", "memory", "r3", "r4"); \
0519 _zzq_result; \
0520 })
0521
0522 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0523 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0524 unsigned int __addr; \
0525 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0526 \
0527 "or 2,2,2\n\t" \
0528 "mr %0,3" \
0529 : "=b" (__addr) \
0530 : \
0531 : "cc", "memory", "r3" \
0532 ); \
0533 _zzq_orig->nraddr = __addr; \
0534 }
0535
0536 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
0537 __SPECIAL_INSTRUCTION_PREAMBLE \
0538 \
0539 "or 3,3,3\n\t"
0540
0541 #define VALGRIND_VEX_INJECT_IR() \
0542 do { \
0543 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0544 "or 5,5,5\n\t" \
0545 ); \
0546 } while (0)
0547
0548 #endif
0549
0550
0551
0552 #if defined(PLAT_ppc64be_linux)
0553
0554 typedef
0555 struct {
0556 unsigned long int nraddr;
0557 unsigned long int r2;
0558 }
0559 OrigFn;
0560
0561 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0562 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
0563 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
0564
0565 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0566 _zzq_default, _zzq_request, \
0567 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0568 \
0569 __extension__ \
0570 ({ unsigned long int _zzq_args[6]; \
0571 unsigned long int _zzq_result; \
0572 unsigned long int* _zzq_ptr; \
0573 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0574 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0575 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0576 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0577 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0578 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0579 _zzq_ptr = _zzq_args; \
0580 __asm__ volatile("mr 3,%1\n\t" \
0581 "mr 4,%2\n\t" \
0582 __SPECIAL_INSTRUCTION_PREAMBLE \
0583 \
0584 "or 1,1,1\n\t" \
0585 "mr %0,3" \
0586 : "=b" (_zzq_result) \
0587 : "b" (_zzq_default), "b" (_zzq_ptr) \
0588 : "cc", "memory", "r3", "r4"); \
0589 _zzq_result; \
0590 })
0591
0592 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0593 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0594 unsigned long int __addr; \
0595 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0596 \
0597 "or 2,2,2\n\t" \
0598 "mr %0,3" \
0599 : "=b" (__addr) \
0600 : \
0601 : "cc", "memory", "r3" \
0602 ); \
0603 _zzq_orig->nraddr = __addr; \
0604 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0605 \
0606 "or 4,4,4\n\t" \
0607 "mr %0,3" \
0608 : "=b" (__addr) \
0609 : \
0610 : "cc", "memory", "r3" \
0611 ); \
0612 _zzq_orig->r2 = __addr; \
0613 }
0614
0615 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
0616 __SPECIAL_INSTRUCTION_PREAMBLE \
0617 \
0618 "or 3,3,3\n\t"
0619
0620 #define VALGRIND_VEX_INJECT_IR() \
0621 do { \
0622 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0623 "or 5,5,5\n\t" \
0624 ); \
0625 } while (0)
0626
0627 #endif
0628
0629 #if defined(PLAT_ppc64le_linux)
0630
0631 typedef
0632 struct {
0633 unsigned long int nraddr;
0634 unsigned long int r2;
0635 }
0636 OrigFn;
0637
0638 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0639 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
0640 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
0641
0642 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0643 _zzq_default, _zzq_request, \
0644 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0645 \
0646 __extension__ \
0647 ({ unsigned long int _zzq_args[6]; \
0648 unsigned long int _zzq_result; \
0649 unsigned long int* _zzq_ptr; \
0650 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0651 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0652 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0653 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0654 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0655 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0656 _zzq_ptr = _zzq_args; \
0657 __asm__ volatile("mr 3,%1\n\t" \
0658 "mr 4,%2\n\t" \
0659 __SPECIAL_INSTRUCTION_PREAMBLE \
0660 \
0661 "or 1,1,1\n\t" \
0662 "mr %0,3" \
0663 : "=b" (_zzq_result) \
0664 : "b" (_zzq_default), "b" (_zzq_ptr) \
0665 : "cc", "memory", "r3", "r4"); \
0666 _zzq_result; \
0667 })
0668
0669 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0670 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0671 unsigned long int __addr; \
0672 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0673 \
0674 "or 2,2,2\n\t" \
0675 "mr %0,3" \
0676 : "=b" (__addr) \
0677 : \
0678 : "cc", "memory", "r3" \
0679 ); \
0680 _zzq_orig->nraddr = __addr; \
0681 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0682 \
0683 "or 4,4,4\n\t" \
0684 "mr %0,3" \
0685 : "=b" (__addr) \
0686 : \
0687 : "cc", "memory", "r3" \
0688 ); \
0689 _zzq_orig->r2 = __addr; \
0690 }
0691
0692 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
0693 __SPECIAL_INSTRUCTION_PREAMBLE \
0694 \
0695 "or 3,3,3\n\t"
0696
0697 #define VALGRIND_VEX_INJECT_IR() \
0698 do { \
0699 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0700 "or 5,5,5\n\t" \
0701 ); \
0702 } while (0)
0703
0704 #endif
0705
0706
0707
0708 #if defined(PLAT_arm_linux)
0709
0710 typedef
0711 struct {
0712 unsigned int nraddr;
0713 }
0714 OrigFn;
0715
0716 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0717 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
0718 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
0719
0720 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0721 _zzq_default, _zzq_request, \
0722 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0723 \
0724 __extension__ \
0725 ({volatile unsigned int _zzq_args[6]; \
0726 volatile unsigned int _zzq_result; \
0727 _zzq_args[0] = (unsigned int)(_zzq_request); \
0728 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0729 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0730 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0731 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0732 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0733 __asm__ volatile("mov r3, %1\n\t" \
0734 "mov r4, %2\n\t" \
0735 __SPECIAL_INSTRUCTION_PREAMBLE \
0736 \
0737 "orr r10, r10, r10\n\t" \
0738 "mov %0, r3" \
0739 : "=r" (_zzq_result) \
0740 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
0741 : "cc","memory", "r3", "r4"); \
0742 _zzq_result; \
0743 })
0744
0745 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0746 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0747 unsigned int __addr; \
0748 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0749 \
0750 "orr r11, r11, r11\n\t" \
0751 "mov %0, r3" \
0752 : "=r" (__addr) \
0753 : \
0754 : "cc", "memory", "r3" \
0755 ); \
0756 _zzq_orig->nraddr = __addr; \
0757 }
0758
0759 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
0760 __SPECIAL_INSTRUCTION_PREAMBLE \
0761 \
0762 "orr r12, r12, r12\n\t"
0763
0764 #define VALGRIND_VEX_INJECT_IR() \
0765 do { \
0766 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0767 "orr r9, r9, r9\n\t" \
0768 : : : "cc", "memory" \
0769 ); \
0770 } while (0)
0771
0772 #endif
0773
0774
0775
0776 #if defined(PLAT_arm64_linux) || defined(PLAT_arm64_freebsd)
0777
0778 typedef
0779 struct {
0780 unsigned long int nraddr;
0781 }
0782 OrigFn;
0783
0784 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0785 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
0786 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
0787
0788 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0789 _zzq_default, _zzq_request, \
0790 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0791 \
0792 __extension__ \
0793 ({volatile unsigned long int _zzq_args[6]; \
0794 volatile unsigned long int _zzq_result; \
0795 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0796 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0797 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0798 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0799 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0800 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0801 __asm__ volatile("mov x3, %1\n\t" \
0802 "mov x4, %2\n\t" \
0803 __SPECIAL_INSTRUCTION_PREAMBLE \
0804 \
0805 "orr x10, x10, x10\n\t" \
0806 "mov %0, x3" \
0807 : "=r" (_zzq_result) \
0808 : "r" ((unsigned long int)(_zzq_default)), \
0809 "r" (&_zzq_args[0]) \
0810 : "cc","memory", "x3", "x4"); \
0811 _zzq_result; \
0812 })
0813
0814 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0815 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0816 unsigned long int __addr; \
0817 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0818 \
0819 "orr x11, x11, x11\n\t" \
0820 "mov %0, x3" \
0821 : "=r" (__addr) \
0822 : \
0823 : "cc", "memory", "x3" \
0824 ); \
0825 _zzq_orig->nraddr = __addr; \
0826 }
0827
0828 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
0829 __SPECIAL_INSTRUCTION_PREAMBLE \
0830 \
0831 "orr x12, x12, x12\n\t"
0832
0833 #define VALGRIND_VEX_INJECT_IR() \
0834 do { \
0835 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0836 "orr x9, x9, x9\n\t" \
0837 : : : "cc", "memory" \
0838 ); \
0839 } while (0)
0840
0841 #endif
0842
0843
0844
0845 #if defined(PLAT_s390x_linux)
0846
0847 typedef
0848 struct {
0849 unsigned long int nraddr;
0850 }
0851 OrigFn;
0852
0853
0854
0855
0856
0857 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0858 "lr 15,15\n\t" \
0859 "lr 1,1\n\t" \
0860 "lr 2,2\n\t" \
0861 "lr 3,3\n\t"
0862
0863 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
0864 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
0865 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
0866 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
0867
0868 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0869 _zzq_default, _zzq_request, \
0870 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0871 __extension__ \
0872 ({volatile unsigned long int _zzq_args[6]; \
0873 volatile unsigned long int _zzq_result; \
0874 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0875 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0876 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0877 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0878 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0879 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0880 __asm__ volatile( \
0881 "lgr 2,%1\n\t" \
0882 \
0883 "lgr 3,%2\n\t" \
0884 __SPECIAL_INSTRUCTION_PREAMBLE \
0885 __CLIENT_REQUEST_CODE \
0886 \
0887 "lgr %0, 3\n\t" \
0888 : "=d" (_zzq_result) \
0889 : "a" (&_zzq_args[0]), \
0890 "0" ((unsigned long int)_zzq_default) \
0891 : "cc", "2", "3", "memory" \
0892 ); \
0893 _zzq_result; \
0894 })
0895
0896 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0897 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0898 volatile unsigned long int __addr; \
0899 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0900 __GET_NR_CONTEXT_CODE \
0901 "lgr %0, 3\n\t" \
0902 : "=a" (__addr) \
0903 : \
0904 : "cc", "3", "memory" \
0905 ); \
0906 _zzq_orig->nraddr = __addr; \
0907 }
0908
0909 #define VALGRIND_CALL_NOREDIR_R1 \
0910 __SPECIAL_INSTRUCTION_PREAMBLE \
0911 __CALL_NO_REDIR_CODE
0912
0913 #define VALGRIND_VEX_INJECT_IR() \
0914 do { \
0915 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0916 __VEX_INJECT_IR_CODE); \
0917 } while (0)
0918
0919 #endif
0920
0921
0922
0923 #if defined(PLAT_mips32_linux)
0924
0925 typedef
0926 struct {
0927 unsigned int nraddr;
0928 }
0929 OrigFn;
0930
0931
0932
0933
0934
0935 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0936 "srl $0, $0, 13\n\t" \
0937 "srl $0, $0, 29\n\t" \
0938 "srl $0, $0, 3\n\t" \
0939 "srl $0, $0, 19\n\t"
0940
0941 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0942 _zzq_default, _zzq_request, \
0943 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0944 __extension__ \
0945 ({ volatile unsigned int _zzq_args[6]; \
0946 volatile unsigned int _zzq_result; \
0947 _zzq_args[0] = (unsigned int)(_zzq_request); \
0948 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0949 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0950 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0951 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0952 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0953 __asm__ volatile("move $11, %1\n\t" \
0954 "move $12, %2\n\t" \
0955 __SPECIAL_INSTRUCTION_PREAMBLE \
0956 \
0957 "or $13, $13, $13\n\t" \
0958 "move %0, $11\n\t" \
0959 : "=r" (_zzq_result) \
0960 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
0961 : "$11", "$12", "memory"); \
0962 _zzq_result; \
0963 })
0964
0965 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0966 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0967 volatile unsigned int __addr; \
0968 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0969 \
0970 "or $14, $14, $14\n\t" \
0971 "move %0, $11" \
0972 : "=r" (__addr) \
0973 : \
0974 : "$11" \
0975 ); \
0976 _zzq_orig->nraddr = __addr; \
0977 }
0978
0979 #define VALGRIND_CALL_NOREDIR_T9 \
0980 __SPECIAL_INSTRUCTION_PREAMBLE \
0981 \
0982 "or $15, $15, $15\n\t"
0983
0984 #define VALGRIND_VEX_INJECT_IR() \
0985 do { \
0986 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0987 "or $11, $11, $11\n\t" \
0988 ); \
0989 } while (0)
0990
0991
0992 #endif
0993
0994
0995
0996 #if defined(PLAT_mips64_linux)
0997
0998 typedef
0999 struct {
1000 unsigned long nraddr;
1001 }
1002 OrigFn;
1003
1004
1005
1006
1007
1008 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1009 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
1010 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1011
1012 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1013 _zzq_default, _zzq_request, \
1014 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1015 __extension__ \
1016 ({ volatile unsigned long int _zzq_args[6]; \
1017 volatile unsigned long int _zzq_result; \
1018 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1019 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1020 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1021 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1022 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1023 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1024 __asm__ volatile("move $11, %1\n\t" \
1025 "move $12, %2\n\t" \
1026 __SPECIAL_INSTRUCTION_PREAMBLE \
1027 \
1028 "or $13, $13, $13\n\t" \
1029 "move %0, $11\n\t" \
1030 : "=r" (_zzq_result) \
1031 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1032 : "$11", "$12", "memory"); \
1033 _zzq_result; \
1034 })
1035
1036 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1037 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1038 volatile unsigned long int __addr; \
1039 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1040 \
1041 "or $14, $14, $14\n\t" \
1042 "move %0, $11" \
1043 : "=r" (__addr) \
1044 : \
1045 : "$11"); \
1046 _zzq_orig->nraddr = __addr; \
1047 }
1048
1049 #define VALGRIND_CALL_NOREDIR_T9 \
1050 __SPECIAL_INSTRUCTION_PREAMBLE \
1051 \
1052 "or $15, $15, $15\n\t"
1053
1054 #define VALGRIND_VEX_INJECT_IR() \
1055 do { \
1056 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1057 "or $11, $11, $11\n\t" \
1058 ); \
1059 } while (0)
1060
1061 #endif
1062
1063 #if defined(PLAT_nanomips_linux)
1064
1065 typedef
1066 struct {
1067 unsigned int nraddr;
1068 }
1069 OrigFn;
1070
1071
1072
1073
1074
1075
1076
1077 #define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1078 "srl[32] $zero, $zero, 29 \n\t" \
1079 "srl[32] $zero, $zero, 3 \n\t" \
1080 "srl[32] $zero, $zero, 19 \n\t"
1081
1082 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1083 _zzq_default, _zzq_request, \
1084 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1085 __extension__ \
1086 ({ volatile unsigned int _zzq_args[6]; \
1087 volatile unsigned int _zzq_result; \
1088 _zzq_args[0] = (unsigned int)(_zzq_request); \
1089 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1090 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1091 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1092 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1093 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1094 __asm__ volatile("move $a7, %1\n\t" \
1095 "move $t0, %2\n\t" \
1096 __SPECIAL_INSTRUCTION_PREAMBLE \
1097 \
1098 "or[32] $t0, $t0, $t0\n\t" \
1099 "move %0, $a7\n\t" \
1100 : "=r" (_zzq_result) \
1101 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1102 : "$a7", "$t0", "memory"); \
1103 _zzq_result; \
1104 })
1105
1106 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1107 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1108 volatile unsigned long int __addr; \
1109 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1110 \
1111 "or[32] $t1, $t1, $t1\n\t" \
1112 "move %0, $a7" \
1113 : "=r" (__addr) \
1114 : \
1115 : "$a7"); \
1116 _zzq_orig->nraddr = __addr; \
1117 }
1118
1119 #define VALGRIND_CALL_NOREDIR_T9 \
1120 __SPECIAL_INSTRUCTION_PREAMBLE \
1121 \
1122 "or[32] $t2, $t2, $t2\n\t"
1123
1124 #define VALGRIND_VEX_INJECT_IR() \
1125 do { \
1126 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1127 "or[32] $t3, $t3, $t3\n\t" \
1128 ); \
1129 } while (0)
1130
1131 #endif
1132
1133
1134 #endif
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1169
1170 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1171 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1172
1173 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1174 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1175
1176
1177
1178
1179
1180 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1181
1182
1183
1184
1185
1186
1187
1188 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1189 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1190
1191 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1192 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1193
1194
1195
1196
1197 #define CALL_FN_v_v(fnptr) \
1198 do { volatile unsigned long _junk; \
1199 CALL_FN_W_v(_junk,fnptr); } while (0)
1200
1201 #define CALL_FN_v_W(fnptr, arg1) \
1202 do { volatile unsigned long _junk; \
1203 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1204
1205 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1206 do { volatile unsigned long _junk; \
1207 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1208
1209 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1210 do { volatile unsigned long _junk; \
1211 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1212
1213 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1214 do { volatile unsigned long _junk; \
1215 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1216
1217 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1218 do { volatile unsigned long _junk; \
1219 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1220
1221 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1222 do { volatile unsigned long _junk; \
1223 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1224
1225 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1226 do { volatile unsigned long _junk; \
1227 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1228
1229
1230
1231 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1232 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
1233
1234
1235
1236 #define __CALLER_SAVED_REGS "ecx", "edx"
1237
1238
1239
1240
1241
1242
1243 #define VALGRIND_ALIGN_STACK \
1244 "movl %%esp,%%edi\n\t" \
1245 "andl $0xfffffff0,%%esp\n\t"
1246 #define VALGRIND_RESTORE_STACK \
1247 "movl %%edi,%%esp\n\t"
1248
1249
1250
1251
1252 #define CALL_FN_W_v(lval, orig) \
1253 do { \
1254 volatile OrigFn _orig = (orig); \
1255 volatile unsigned long _argvec[1]; \
1256 volatile unsigned long _res; \
1257 _argvec[0] = (unsigned long)_orig.nraddr; \
1258 __asm__ volatile( \
1259 VALGRIND_ALIGN_STACK \
1260 "movl (%%eax), %%eax\n\t" \
1261 VALGRIND_CALL_NOREDIR_EAX \
1262 VALGRIND_RESTORE_STACK \
1263 : "=a" (_res) \
1264 : "a" (&_argvec[0]) \
1265 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1266 ); \
1267 lval = (__typeof__(lval)) _res; \
1268 } while (0)
1269
1270 #define CALL_FN_W_W(lval, orig, arg1) \
1271 do { \
1272 volatile OrigFn _orig = (orig); \
1273 volatile unsigned long _argvec[2]; \
1274 volatile unsigned long _res; \
1275 _argvec[0] = (unsigned long)_orig.nraddr; \
1276 _argvec[1] = (unsigned long)(arg1); \
1277 __asm__ volatile( \
1278 VALGRIND_ALIGN_STACK \
1279 "subl $12, %%esp\n\t" \
1280 "pushl 4(%%eax)\n\t" \
1281 "movl (%%eax), %%eax\n\t" \
1282 VALGRIND_CALL_NOREDIR_EAX \
1283 VALGRIND_RESTORE_STACK \
1284 : "=a" (_res) \
1285 : "a" (&_argvec[0]) \
1286 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1287 ); \
1288 lval = (__typeof__(lval)) _res; \
1289 } while (0)
1290
1291 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1292 do { \
1293 volatile OrigFn _orig = (orig); \
1294 volatile unsigned long _argvec[3]; \
1295 volatile unsigned long _res; \
1296 _argvec[0] = (unsigned long)_orig.nraddr; \
1297 _argvec[1] = (unsigned long)(arg1); \
1298 _argvec[2] = (unsigned long)(arg2); \
1299 __asm__ volatile( \
1300 VALGRIND_ALIGN_STACK \
1301 "subl $8, %%esp\n\t" \
1302 "pushl 8(%%eax)\n\t" \
1303 "pushl 4(%%eax)\n\t" \
1304 "movl (%%eax), %%eax\n\t" \
1305 VALGRIND_CALL_NOREDIR_EAX \
1306 VALGRIND_RESTORE_STACK \
1307 : "=a" (_res) \
1308 : "a" (&_argvec[0]) \
1309 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1310 ); \
1311 lval = (__typeof__(lval)) _res; \
1312 } while (0)
1313
1314 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1315 do { \
1316 volatile OrigFn _orig = (orig); \
1317 volatile unsigned long _argvec[4]; \
1318 volatile unsigned long _res; \
1319 _argvec[0] = (unsigned long)_orig.nraddr; \
1320 _argvec[1] = (unsigned long)(arg1); \
1321 _argvec[2] = (unsigned long)(arg2); \
1322 _argvec[3] = (unsigned long)(arg3); \
1323 __asm__ volatile( \
1324 VALGRIND_ALIGN_STACK \
1325 "subl $4, %%esp\n\t" \
1326 "pushl 12(%%eax)\n\t" \
1327 "pushl 8(%%eax)\n\t" \
1328 "pushl 4(%%eax)\n\t" \
1329 "movl (%%eax), %%eax\n\t" \
1330 VALGRIND_CALL_NOREDIR_EAX \
1331 VALGRIND_RESTORE_STACK \
1332 : "=a" (_res) \
1333 : "a" (&_argvec[0]) \
1334 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1335 ); \
1336 lval = (__typeof__(lval)) _res; \
1337 } while (0)
1338
1339 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1340 do { \
1341 volatile OrigFn _orig = (orig); \
1342 volatile unsigned long _argvec[5]; \
1343 volatile unsigned long _res; \
1344 _argvec[0] = (unsigned long)_orig.nraddr; \
1345 _argvec[1] = (unsigned long)(arg1); \
1346 _argvec[2] = (unsigned long)(arg2); \
1347 _argvec[3] = (unsigned long)(arg3); \
1348 _argvec[4] = (unsigned long)(arg4); \
1349 __asm__ volatile( \
1350 VALGRIND_ALIGN_STACK \
1351 "pushl 16(%%eax)\n\t" \
1352 "pushl 12(%%eax)\n\t" \
1353 "pushl 8(%%eax)\n\t" \
1354 "pushl 4(%%eax)\n\t" \
1355 "movl (%%eax), %%eax\n\t" \
1356 VALGRIND_CALL_NOREDIR_EAX \
1357 VALGRIND_RESTORE_STACK \
1358 : "=a" (_res) \
1359 : "a" (&_argvec[0]) \
1360 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1361 ); \
1362 lval = (__typeof__(lval)) _res; \
1363 } while (0)
1364
1365 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1366 do { \
1367 volatile OrigFn _orig = (orig); \
1368 volatile unsigned long _argvec[6]; \
1369 volatile unsigned long _res; \
1370 _argvec[0] = (unsigned long)_orig.nraddr; \
1371 _argvec[1] = (unsigned long)(arg1); \
1372 _argvec[2] = (unsigned long)(arg2); \
1373 _argvec[3] = (unsigned long)(arg3); \
1374 _argvec[4] = (unsigned long)(arg4); \
1375 _argvec[5] = (unsigned long)(arg5); \
1376 __asm__ volatile( \
1377 VALGRIND_ALIGN_STACK \
1378 "subl $12, %%esp\n\t" \
1379 "pushl 20(%%eax)\n\t" \
1380 "pushl 16(%%eax)\n\t" \
1381 "pushl 12(%%eax)\n\t" \
1382 "pushl 8(%%eax)\n\t" \
1383 "pushl 4(%%eax)\n\t" \
1384 "movl (%%eax), %%eax\n\t" \
1385 VALGRIND_CALL_NOREDIR_EAX \
1386 VALGRIND_RESTORE_STACK \
1387 : "=a" (_res) \
1388 : "a" (&_argvec[0]) \
1389 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1390 ); \
1391 lval = (__typeof__(lval)) _res; \
1392 } while (0)
1393
1394 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1395 do { \
1396 volatile OrigFn _orig = (orig); \
1397 volatile unsigned long _argvec[7]; \
1398 volatile unsigned long _res; \
1399 _argvec[0] = (unsigned long)_orig.nraddr; \
1400 _argvec[1] = (unsigned long)(arg1); \
1401 _argvec[2] = (unsigned long)(arg2); \
1402 _argvec[3] = (unsigned long)(arg3); \
1403 _argvec[4] = (unsigned long)(arg4); \
1404 _argvec[5] = (unsigned long)(arg5); \
1405 _argvec[6] = (unsigned long)(arg6); \
1406 __asm__ volatile( \
1407 VALGRIND_ALIGN_STACK \
1408 "subl $8, %%esp\n\t" \
1409 "pushl 24(%%eax)\n\t" \
1410 "pushl 20(%%eax)\n\t" \
1411 "pushl 16(%%eax)\n\t" \
1412 "pushl 12(%%eax)\n\t" \
1413 "pushl 8(%%eax)\n\t" \
1414 "pushl 4(%%eax)\n\t" \
1415 "movl (%%eax), %%eax\n\t" \
1416 VALGRIND_CALL_NOREDIR_EAX \
1417 VALGRIND_RESTORE_STACK \
1418 : "=a" (_res) \
1419 : "a" (&_argvec[0]) \
1420 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1421 ); \
1422 lval = (__typeof__(lval)) _res; \
1423 } while (0)
1424
1425 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1426 arg7) \
1427 do { \
1428 volatile OrigFn _orig = (orig); \
1429 volatile unsigned long _argvec[8]; \
1430 volatile unsigned long _res; \
1431 _argvec[0] = (unsigned long)_orig.nraddr; \
1432 _argvec[1] = (unsigned long)(arg1); \
1433 _argvec[2] = (unsigned long)(arg2); \
1434 _argvec[3] = (unsigned long)(arg3); \
1435 _argvec[4] = (unsigned long)(arg4); \
1436 _argvec[5] = (unsigned long)(arg5); \
1437 _argvec[6] = (unsigned long)(arg6); \
1438 _argvec[7] = (unsigned long)(arg7); \
1439 __asm__ volatile( \
1440 VALGRIND_ALIGN_STACK \
1441 "subl $4, %%esp\n\t" \
1442 "pushl 28(%%eax)\n\t" \
1443 "pushl 24(%%eax)\n\t" \
1444 "pushl 20(%%eax)\n\t" \
1445 "pushl 16(%%eax)\n\t" \
1446 "pushl 12(%%eax)\n\t" \
1447 "pushl 8(%%eax)\n\t" \
1448 "pushl 4(%%eax)\n\t" \
1449 "movl (%%eax), %%eax\n\t" \
1450 VALGRIND_CALL_NOREDIR_EAX \
1451 VALGRIND_RESTORE_STACK \
1452 : "=a" (_res) \
1453 : "a" (&_argvec[0]) \
1454 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1455 ); \
1456 lval = (__typeof__(lval)) _res; \
1457 } while (0)
1458
1459 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1460 arg7,arg8) \
1461 do { \
1462 volatile OrigFn _orig = (orig); \
1463 volatile unsigned long _argvec[9]; \
1464 volatile unsigned long _res; \
1465 _argvec[0] = (unsigned long)_orig.nraddr; \
1466 _argvec[1] = (unsigned long)(arg1); \
1467 _argvec[2] = (unsigned long)(arg2); \
1468 _argvec[3] = (unsigned long)(arg3); \
1469 _argvec[4] = (unsigned long)(arg4); \
1470 _argvec[5] = (unsigned long)(arg5); \
1471 _argvec[6] = (unsigned long)(arg6); \
1472 _argvec[7] = (unsigned long)(arg7); \
1473 _argvec[8] = (unsigned long)(arg8); \
1474 __asm__ volatile( \
1475 VALGRIND_ALIGN_STACK \
1476 "pushl 32(%%eax)\n\t" \
1477 "pushl 28(%%eax)\n\t" \
1478 "pushl 24(%%eax)\n\t" \
1479 "pushl 20(%%eax)\n\t" \
1480 "pushl 16(%%eax)\n\t" \
1481 "pushl 12(%%eax)\n\t" \
1482 "pushl 8(%%eax)\n\t" \
1483 "pushl 4(%%eax)\n\t" \
1484 "movl (%%eax), %%eax\n\t" \
1485 VALGRIND_CALL_NOREDIR_EAX \
1486 VALGRIND_RESTORE_STACK \
1487 : "=a" (_res) \
1488 : "a" (&_argvec[0]) \
1489 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1490 ); \
1491 lval = (__typeof__(lval)) _res; \
1492 } while (0)
1493
1494 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1495 arg7,arg8,arg9) \
1496 do { \
1497 volatile OrigFn _orig = (orig); \
1498 volatile unsigned long _argvec[10]; \
1499 volatile unsigned long _res; \
1500 _argvec[0] = (unsigned long)_orig.nraddr; \
1501 _argvec[1] = (unsigned long)(arg1); \
1502 _argvec[2] = (unsigned long)(arg2); \
1503 _argvec[3] = (unsigned long)(arg3); \
1504 _argvec[4] = (unsigned long)(arg4); \
1505 _argvec[5] = (unsigned long)(arg5); \
1506 _argvec[6] = (unsigned long)(arg6); \
1507 _argvec[7] = (unsigned long)(arg7); \
1508 _argvec[8] = (unsigned long)(arg8); \
1509 _argvec[9] = (unsigned long)(arg9); \
1510 __asm__ volatile( \
1511 VALGRIND_ALIGN_STACK \
1512 "subl $12, %%esp\n\t" \
1513 "pushl 36(%%eax)\n\t" \
1514 "pushl 32(%%eax)\n\t" \
1515 "pushl 28(%%eax)\n\t" \
1516 "pushl 24(%%eax)\n\t" \
1517 "pushl 20(%%eax)\n\t" \
1518 "pushl 16(%%eax)\n\t" \
1519 "pushl 12(%%eax)\n\t" \
1520 "pushl 8(%%eax)\n\t" \
1521 "pushl 4(%%eax)\n\t" \
1522 "movl (%%eax), %%eax\n\t" \
1523 VALGRIND_CALL_NOREDIR_EAX \
1524 VALGRIND_RESTORE_STACK \
1525 : "=a" (_res) \
1526 : "a" (&_argvec[0]) \
1527 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1528 ); \
1529 lval = (__typeof__(lval)) _res; \
1530 } while (0)
1531
1532 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1533 arg7,arg8,arg9,arg10) \
1534 do { \
1535 volatile OrigFn _orig = (orig); \
1536 volatile unsigned long _argvec[11]; \
1537 volatile unsigned long _res; \
1538 _argvec[0] = (unsigned long)_orig.nraddr; \
1539 _argvec[1] = (unsigned long)(arg1); \
1540 _argvec[2] = (unsigned long)(arg2); \
1541 _argvec[3] = (unsigned long)(arg3); \
1542 _argvec[4] = (unsigned long)(arg4); \
1543 _argvec[5] = (unsigned long)(arg5); \
1544 _argvec[6] = (unsigned long)(arg6); \
1545 _argvec[7] = (unsigned long)(arg7); \
1546 _argvec[8] = (unsigned long)(arg8); \
1547 _argvec[9] = (unsigned long)(arg9); \
1548 _argvec[10] = (unsigned long)(arg10); \
1549 __asm__ volatile( \
1550 VALGRIND_ALIGN_STACK \
1551 "subl $8, %%esp\n\t" \
1552 "pushl 40(%%eax)\n\t" \
1553 "pushl 36(%%eax)\n\t" \
1554 "pushl 32(%%eax)\n\t" \
1555 "pushl 28(%%eax)\n\t" \
1556 "pushl 24(%%eax)\n\t" \
1557 "pushl 20(%%eax)\n\t" \
1558 "pushl 16(%%eax)\n\t" \
1559 "pushl 12(%%eax)\n\t" \
1560 "pushl 8(%%eax)\n\t" \
1561 "pushl 4(%%eax)\n\t" \
1562 "movl (%%eax), %%eax\n\t" \
1563 VALGRIND_CALL_NOREDIR_EAX \
1564 VALGRIND_RESTORE_STACK \
1565 : "=a" (_res) \
1566 : "a" (&_argvec[0]) \
1567 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1568 ); \
1569 lval = (__typeof__(lval)) _res; \
1570 } while (0)
1571
1572 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1573 arg6,arg7,arg8,arg9,arg10, \
1574 arg11) \
1575 do { \
1576 volatile OrigFn _orig = (orig); \
1577 volatile unsigned long _argvec[12]; \
1578 volatile unsigned long _res; \
1579 _argvec[0] = (unsigned long)_orig.nraddr; \
1580 _argvec[1] = (unsigned long)(arg1); \
1581 _argvec[2] = (unsigned long)(arg2); \
1582 _argvec[3] = (unsigned long)(arg3); \
1583 _argvec[4] = (unsigned long)(arg4); \
1584 _argvec[5] = (unsigned long)(arg5); \
1585 _argvec[6] = (unsigned long)(arg6); \
1586 _argvec[7] = (unsigned long)(arg7); \
1587 _argvec[8] = (unsigned long)(arg8); \
1588 _argvec[9] = (unsigned long)(arg9); \
1589 _argvec[10] = (unsigned long)(arg10); \
1590 _argvec[11] = (unsigned long)(arg11); \
1591 __asm__ volatile( \
1592 VALGRIND_ALIGN_STACK \
1593 "subl $4, %%esp\n\t" \
1594 "pushl 44(%%eax)\n\t" \
1595 "pushl 40(%%eax)\n\t" \
1596 "pushl 36(%%eax)\n\t" \
1597 "pushl 32(%%eax)\n\t" \
1598 "pushl 28(%%eax)\n\t" \
1599 "pushl 24(%%eax)\n\t" \
1600 "pushl 20(%%eax)\n\t" \
1601 "pushl 16(%%eax)\n\t" \
1602 "pushl 12(%%eax)\n\t" \
1603 "pushl 8(%%eax)\n\t" \
1604 "pushl 4(%%eax)\n\t" \
1605 "movl (%%eax), %%eax\n\t" \
1606 VALGRIND_CALL_NOREDIR_EAX \
1607 VALGRIND_RESTORE_STACK \
1608 : "=a" (_res) \
1609 : "a" (&_argvec[0]) \
1610 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1611 ); \
1612 lval = (__typeof__(lval)) _res; \
1613 } while (0)
1614
1615 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1616 arg6,arg7,arg8,arg9,arg10, \
1617 arg11,arg12) \
1618 do { \
1619 volatile OrigFn _orig = (orig); \
1620 volatile unsigned long _argvec[13]; \
1621 volatile unsigned long _res; \
1622 _argvec[0] = (unsigned long)_orig.nraddr; \
1623 _argvec[1] = (unsigned long)(arg1); \
1624 _argvec[2] = (unsigned long)(arg2); \
1625 _argvec[3] = (unsigned long)(arg3); \
1626 _argvec[4] = (unsigned long)(arg4); \
1627 _argvec[5] = (unsigned long)(arg5); \
1628 _argvec[6] = (unsigned long)(arg6); \
1629 _argvec[7] = (unsigned long)(arg7); \
1630 _argvec[8] = (unsigned long)(arg8); \
1631 _argvec[9] = (unsigned long)(arg9); \
1632 _argvec[10] = (unsigned long)(arg10); \
1633 _argvec[11] = (unsigned long)(arg11); \
1634 _argvec[12] = (unsigned long)(arg12); \
1635 __asm__ volatile( \
1636 VALGRIND_ALIGN_STACK \
1637 "pushl 48(%%eax)\n\t" \
1638 "pushl 44(%%eax)\n\t" \
1639 "pushl 40(%%eax)\n\t" \
1640 "pushl 36(%%eax)\n\t" \
1641 "pushl 32(%%eax)\n\t" \
1642 "pushl 28(%%eax)\n\t" \
1643 "pushl 24(%%eax)\n\t" \
1644 "pushl 20(%%eax)\n\t" \
1645 "pushl 16(%%eax)\n\t" \
1646 "pushl 12(%%eax)\n\t" \
1647 "pushl 8(%%eax)\n\t" \
1648 "pushl 4(%%eax)\n\t" \
1649 "movl (%%eax), %%eax\n\t" \
1650 VALGRIND_CALL_NOREDIR_EAX \
1651 VALGRIND_RESTORE_STACK \
1652 : "=a" (_res) \
1653 : "a" (&_argvec[0]) \
1654 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1655 ); \
1656 lval = (__typeof__(lval)) _res; \
1657 } while (0)
1658
1659 #endif
1660
1661
1662
1663 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1664 || defined(PLAT_amd64_solaris) || defined(PLAT_amd64_freebsd)
1665
1666
1667
1668
1669 #define __CALLER_SAVED_REGS "rcx", "rdx", "rsi", \
1670 "rdi", "r8", "r9", "r10", "r11"
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1727 # define __FRAME_POINTER \
1728 ,"r"(__builtin_dwarf_cfa())
1729 # define VALGRIND_CFI_PROLOGUE \
1730 "movq %%rbp, %%r15\n\t" \
1731 "movq %2, %%rbp\n\t" \
1732 ".cfi_remember_state\n\t" \
1733 ".cfi_def_cfa rbp, 0\n\t"
1734 # define VALGRIND_CFI_EPILOGUE \
1735 "movq %%r15, %%rbp\n\t" \
1736 ".cfi_restore_state\n\t"
1737 #else
1738 # define __FRAME_POINTER
1739 # define VALGRIND_CFI_PROLOGUE
1740 # define VALGRIND_CFI_EPILOGUE
1741 #endif
1742
1743
1744
1745
1746
1747
1748 #define VALGRIND_ALIGN_STACK \
1749 "movq %%rsp,%%r14\n\t" \
1750 "andq $0xfffffffffffffff0,%%rsp\n\t"
1751 #define VALGRIND_RESTORE_STACK \
1752 "movq %%r14,%%rsp\n\t"
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778 #define CALL_FN_W_v(lval, orig) \
1779 do { \
1780 volatile OrigFn _orig = (orig); \
1781 volatile unsigned long _argvec[1]; \
1782 volatile unsigned long _res; \
1783 _argvec[0] = (unsigned long)_orig.nraddr; \
1784 __asm__ volatile( \
1785 VALGRIND_CFI_PROLOGUE \
1786 VALGRIND_ALIGN_STACK \
1787 "subq $128,%%rsp\n\t" \
1788 "movq (%%rax), %%rax\n\t" \
1789 VALGRIND_CALL_NOREDIR_RAX \
1790 VALGRIND_RESTORE_STACK \
1791 VALGRIND_CFI_EPILOGUE \
1792 : "=a" (_res) \
1793 : "a" (&_argvec[0]) __FRAME_POINTER \
1794 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1795 ); \
1796 lval = (__typeof__(lval)) _res; \
1797 } while (0)
1798
1799 #define CALL_FN_W_W(lval, orig, arg1) \
1800 do { \
1801 volatile OrigFn _orig = (orig); \
1802 volatile unsigned long _argvec[2]; \
1803 volatile unsigned long _res; \
1804 _argvec[0] = (unsigned long)_orig.nraddr; \
1805 _argvec[1] = (unsigned long)(arg1); \
1806 __asm__ volatile( \
1807 VALGRIND_CFI_PROLOGUE \
1808 VALGRIND_ALIGN_STACK \
1809 "subq $128,%%rsp\n\t" \
1810 "movq 8(%%rax), %%rdi\n\t" \
1811 "movq (%%rax), %%rax\n\t" \
1812 VALGRIND_CALL_NOREDIR_RAX \
1813 VALGRIND_RESTORE_STACK \
1814 VALGRIND_CFI_EPILOGUE \
1815 : "=a" (_res) \
1816 : "a" (&_argvec[0]) __FRAME_POINTER \
1817 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1818 ); \
1819 lval = (__typeof__(lval)) _res; \
1820 } while (0)
1821
1822 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1823 do { \
1824 volatile OrigFn _orig = (orig); \
1825 volatile unsigned long _argvec[3]; \
1826 volatile unsigned long _res; \
1827 _argvec[0] = (unsigned long)_orig.nraddr; \
1828 _argvec[1] = (unsigned long)(arg1); \
1829 _argvec[2] = (unsigned long)(arg2); \
1830 __asm__ volatile( \
1831 VALGRIND_CFI_PROLOGUE \
1832 VALGRIND_ALIGN_STACK \
1833 "subq $128,%%rsp\n\t" \
1834 "movq 16(%%rax), %%rsi\n\t" \
1835 "movq 8(%%rax), %%rdi\n\t" \
1836 "movq (%%rax), %%rax\n\t" \
1837 VALGRIND_CALL_NOREDIR_RAX \
1838 VALGRIND_RESTORE_STACK \
1839 VALGRIND_CFI_EPILOGUE \
1840 : "=a" (_res) \
1841 : "a" (&_argvec[0]) __FRAME_POINTER \
1842 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1843 ); \
1844 lval = (__typeof__(lval)) _res; \
1845 } while (0)
1846
1847 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1848 do { \
1849 volatile OrigFn _orig = (orig); \
1850 volatile unsigned long _argvec[4]; \
1851 volatile unsigned long _res; \
1852 _argvec[0] = (unsigned long)_orig.nraddr; \
1853 _argvec[1] = (unsigned long)(arg1); \
1854 _argvec[2] = (unsigned long)(arg2); \
1855 _argvec[3] = (unsigned long)(arg3); \
1856 __asm__ volatile( \
1857 VALGRIND_CFI_PROLOGUE \
1858 VALGRIND_ALIGN_STACK \
1859 "subq $128,%%rsp\n\t" \
1860 "movq 24(%%rax), %%rdx\n\t" \
1861 "movq 16(%%rax), %%rsi\n\t" \
1862 "movq 8(%%rax), %%rdi\n\t" \
1863 "movq (%%rax), %%rax\n\t" \
1864 VALGRIND_CALL_NOREDIR_RAX \
1865 VALGRIND_RESTORE_STACK \
1866 VALGRIND_CFI_EPILOGUE \
1867 : "=a" (_res) \
1868 : "a" (&_argvec[0]) __FRAME_POINTER \
1869 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1870 ); \
1871 lval = (__typeof__(lval)) _res; \
1872 } while (0)
1873
1874 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1875 do { \
1876 volatile OrigFn _orig = (orig); \
1877 volatile unsigned long _argvec[5]; \
1878 volatile unsigned long _res; \
1879 _argvec[0] = (unsigned long)_orig.nraddr; \
1880 _argvec[1] = (unsigned long)(arg1); \
1881 _argvec[2] = (unsigned long)(arg2); \
1882 _argvec[3] = (unsigned long)(arg3); \
1883 _argvec[4] = (unsigned long)(arg4); \
1884 __asm__ volatile( \
1885 VALGRIND_CFI_PROLOGUE \
1886 VALGRIND_ALIGN_STACK \
1887 "subq $128,%%rsp\n\t" \
1888 "movq 32(%%rax), %%rcx\n\t" \
1889 "movq 24(%%rax), %%rdx\n\t" \
1890 "movq 16(%%rax), %%rsi\n\t" \
1891 "movq 8(%%rax), %%rdi\n\t" \
1892 "movq (%%rax), %%rax\n\t" \
1893 VALGRIND_CALL_NOREDIR_RAX \
1894 VALGRIND_RESTORE_STACK \
1895 VALGRIND_CFI_EPILOGUE \
1896 : "=a" (_res) \
1897 : "a" (&_argvec[0]) __FRAME_POINTER \
1898 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1899 ); \
1900 lval = (__typeof__(lval)) _res; \
1901 } while (0)
1902
1903 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1904 do { \
1905 volatile OrigFn _orig = (orig); \
1906 volatile unsigned long _argvec[6]; \
1907 volatile unsigned long _res; \
1908 _argvec[0] = (unsigned long)_orig.nraddr; \
1909 _argvec[1] = (unsigned long)(arg1); \
1910 _argvec[2] = (unsigned long)(arg2); \
1911 _argvec[3] = (unsigned long)(arg3); \
1912 _argvec[4] = (unsigned long)(arg4); \
1913 _argvec[5] = (unsigned long)(arg5); \
1914 __asm__ volatile( \
1915 VALGRIND_CFI_PROLOGUE \
1916 VALGRIND_ALIGN_STACK \
1917 "subq $128,%%rsp\n\t" \
1918 "movq 40(%%rax), %%r8\n\t" \
1919 "movq 32(%%rax), %%rcx\n\t" \
1920 "movq 24(%%rax), %%rdx\n\t" \
1921 "movq 16(%%rax), %%rsi\n\t" \
1922 "movq 8(%%rax), %%rdi\n\t" \
1923 "movq (%%rax), %%rax\n\t" \
1924 VALGRIND_CALL_NOREDIR_RAX \
1925 VALGRIND_RESTORE_STACK \
1926 VALGRIND_CFI_EPILOGUE \
1927 : "=a" (_res) \
1928 : "a" (&_argvec[0]) __FRAME_POINTER \
1929 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1930 ); \
1931 lval = (__typeof__(lval)) _res; \
1932 } while (0)
1933
1934 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1935 do { \
1936 volatile OrigFn _orig = (orig); \
1937 volatile unsigned long _argvec[7]; \
1938 volatile unsigned long _res; \
1939 _argvec[0] = (unsigned long)_orig.nraddr; \
1940 _argvec[1] = (unsigned long)(arg1); \
1941 _argvec[2] = (unsigned long)(arg2); \
1942 _argvec[3] = (unsigned long)(arg3); \
1943 _argvec[4] = (unsigned long)(arg4); \
1944 _argvec[5] = (unsigned long)(arg5); \
1945 _argvec[6] = (unsigned long)(arg6); \
1946 __asm__ volatile( \
1947 VALGRIND_CFI_PROLOGUE \
1948 VALGRIND_ALIGN_STACK \
1949 "subq $128,%%rsp\n\t" \
1950 "movq 48(%%rax), %%r9\n\t" \
1951 "movq 40(%%rax), %%r8\n\t" \
1952 "movq 32(%%rax), %%rcx\n\t" \
1953 "movq 24(%%rax), %%rdx\n\t" \
1954 "movq 16(%%rax), %%rsi\n\t" \
1955 "movq 8(%%rax), %%rdi\n\t" \
1956 "movq (%%rax), %%rax\n\t" \
1957 VALGRIND_CALL_NOREDIR_RAX \
1958 VALGRIND_RESTORE_STACK \
1959 VALGRIND_CFI_EPILOGUE \
1960 : "=a" (_res) \
1961 : "a" (&_argvec[0]) __FRAME_POINTER \
1962 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1963 ); \
1964 lval = (__typeof__(lval)) _res; \
1965 } while (0)
1966
1967 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1968 arg7) \
1969 do { \
1970 volatile OrigFn _orig = (orig); \
1971 volatile unsigned long _argvec[8]; \
1972 volatile unsigned long _res; \
1973 _argvec[0] = (unsigned long)_orig.nraddr; \
1974 _argvec[1] = (unsigned long)(arg1); \
1975 _argvec[2] = (unsigned long)(arg2); \
1976 _argvec[3] = (unsigned long)(arg3); \
1977 _argvec[4] = (unsigned long)(arg4); \
1978 _argvec[5] = (unsigned long)(arg5); \
1979 _argvec[6] = (unsigned long)(arg6); \
1980 _argvec[7] = (unsigned long)(arg7); \
1981 __asm__ volatile( \
1982 VALGRIND_CFI_PROLOGUE \
1983 VALGRIND_ALIGN_STACK \
1984 "subq $136,%%rsp\n\t" \
1985 "pushq 56(%%rax)\n\t" \
1986 "movq 48(%%rax), %%r9\n\t" \
1987 "movq 40(%%rax), %%r8\n\t" \
1988 "movq 32(%%rax), %%rcx\n\t" \
1989 "movq 24(%%rax), %%rdx\n\t" \
1990 "movq 16(%%rax), %%rsi\n\t" \
1991 "movq 8(%%rax), %%rdi\n\t" \
1992 "movq (%%rax), %%rax\n\t" \
1993 VALGRIND_CALL_NOREDIR_RAX \
1994 VALGRIND_RESTORE_STACK \
1995 VALGRIND_CFI_EPILOGUE \
1996 : "=a" (_res) \
1997 : "a" (&_argvec[0]) __FRAME_POINTER \
1998 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1999 ); \
2000 lval = (__typeof__(lval)) _res; \
2001 } while (0)
2002
2003 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2004 arg7,arg8) \
2005 do { \
2006 volatile OrigFn _orig = (orig); \
2007 volatile unsigned long _argvec[9]; \
2008 volatile unsigned long _res; \
2009 _argvec[0] = (unsigned long)_orig.nraddr; \
2010 _argvec[1] = (unsigned long)(arg1); \
2011 _argvec[2] = (unsigned long)(arg2); \
2012 _argvec[3] = (unsigned long)(arg3); \
2013 _argvec[4] = (unsigned long)(arg4); \
2014 _argvec[5] = (unsigned long)(arg5); \
2015 _argvec[6] = (unsigned long)(arg6); \
2016 _argvec[7] = (unsigned long)(arg7); \
2017 _argvec[8] = (unsigned long)(arg8); \
2018 __asm__ volatile( \
2019 VALGRIND_CFI_PROLOGUE \
2020 VALGRIND_ALIGN_STACK \
2021 "subq $128,%%rsp\n\t" \
2022 "pushq 64(%%rax)\n\t" \
2023 "pushq 56(%%rax)\n\t" \
2024 "movq 48(%%rax), %%r9\n\t" \
2025 "movq 40(%%rax), %%r8\n\t" \
2026 "movq 32(%%rax), %%rcx\n\t" \
2027 "movq 24(%%rax), %%rdx\n\t" \
2028 "movq 16(%%rax), %%rsi\n\t" \
2029 "movq 8(%%rax), %%rdi\n\t" \
2030 "movq (%%rax), %%rax\n\t" \
2031 VALGRIND_CALL_NOREDIR_RAX \
2032 VALGRIND_RESTORE_STACK \
2033 VALGRIND_CFI_EPILOGUE \
2034 : "=a" (_res) \
2035 : "a" (&_argvec[0]) __FRAME_POINTER \
2036 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2037 ); \
2038 lval = (__typeof__(lval)) _res; \
2039 } while (0)
2040
2041 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2042 arg7,arg8,arg9) \
2043 do { \
2044 volatile OrigFn _orig = (orig); \
2045 volatile unsigned long _argvec[10]; \
2046 volatile unsigned long _res; \
2047 _argvec[0] = (unsigned long)_orig.nraddr; \
2048 _argvec[1] = (unsigned long)(arg1); \
2049 _argvec[2] = (unsigned long)(arg2); \
2050 _argvec[3] = (unsigned long)(arg3); \
2051 _argvec[4] = (unsigned long)(arg4); \
2052 _argvec[5] = (unsigned long)(arg5); \
2053 _argvec[6] = (unsigned long)(arg6); \
2054 _argvec[7] = (unsigned long)(arg7); \
2055 _argvec[8] = (unsigned long)(arg8); \
2056 _argvec[9] = (unsigned long)(arg9); \
2057 __asm__ volatile( \
2058 VALGRIND_CFI_PROLOGUE \
2059 VALGRIND_ALIGN_STACK \
2060 "subq $136,%%rsp\n\t" \
2061 "pushq 72(%%rax)\n\t" \
2062 "pushq 64(%%rax)\n\t" \
2063 "pushq 56(%%rax)\n\t" \
2064 "movq 48(%%rax), %%r9\n\t" \
2065 "movq 40(%%rax), %%r8\n\t" \
2066 "movq 32(%%rax), %%rcx\n\t" \
2067 "movq 24(%%rax), %%rdx\n\t" \
2068 "movq 16(%%rax), %%rsi\n\t" \
2069 "movq 8(%%rax), %%rdi\n\t" \
2070 "movq (%%rax), %%rax\n\t" \
2071 VALGRIND_CALL_NOREDIR_RAX \
2072 VALGRIND_RESTORE_STACK \
2073 VALGRIND_CFI_EPILOGUE \
2074 : "=a" (_res) \
2075 : "a" (&_argvec[0]) __FRAME_POINTER \
2076 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2077 ); \
2078 lval = (__typeof__(lval)) _res; \
2079 } while (0)
2080
2081 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2082 arg7,arg8,arg9,arg10) \
2083 do { \
2084 volatile OrigFn _orig = (orig); \
2085 volatile unsigned long _argvec[11]; \
2086 volatile unsigned long _res; \
2087 _argvec[0] = (unsigned long)_orig.nraddr; \
2088 _argvec[1] = (unsigned long)(arg1); \
2089 _argvec[2] = (unsigned long)(arg2); \
2090 _argvec[3] = (unsigned long)(arg3); \
2091 _argvec[4] = (unsigned long)(arg4); \
2092 _argvec[5] = (unsigned long)(arg5); \
2093 _argvec[6] = (unsigned long)(arg6); \
2094 _argvec[7] = (unsigned long)(arg7); \
2095 _argvec[8] = (unsigned long)(arg8); \
2096 _argvec[9] = (unsigned long)(arg9); \
2097 _argvec[10] = (unsigned long)(arg10); \
2098 __asm__ volatile( \
2099 VALGRIND_CFI_PROLOGUE \
2100 VALGRIND_ALIGN_STACK \
2101 "subq $128,%%rsp\n\t" \
2102 "pushq 80(%%rax)\n\t" \
2103 "pushq 72(%%rax)\n\t" \
2104 "pushq 64(%%rax)\n\t" \
2105 "pushq 56(%%rax)\n\t" \
2106 "movq 48(%%rax), %%r9\n\t" \
2107 "movq 40(%%rax), %%r8\n\t" \
2108 "movq 32(%%rax), %%rcx\n\t" \
2109 "movq 24(%%rax), %%rdx\n\t" \
2110 "movq 16(%%rax), %%rsi\n\t" \
2111 "movq 8(%%rax), %%rdi\n\t" \
2112 "movq (%%rax), %%rax\n\t" \
2113 VALGRIND_CALL_NOREDIR_RAX \
2114 VALGRIND_RESTORE_STACK \
2115 VALGRIND_CFI_EPILOGUE \
2116 : "=a" (_res) \
2117 : "a" (&_argvec[0]) __FRAME_POINTER \
2118 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2119 ); \
2120 lval = (__typeof__(lval)) _res; \
2121 } while (0)
2122
2123 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2124 arg7,arg8,arg9,arg10,arg11) \
2125 do { \
2126 volatile OrigFn _orig = (orig); \
2127 volatile unsigned long _argvec[12]; \
2128 volatile unsigned long _res; \
2129 _argvec[0] = (unsigned long)_orig.nraddr; \
2130 _argvec[1] = (unsigned long)(arg1); \
2131 _argvec[2] = (unsigned long)(arg2); \
2132 _argvec[3] = (unsigned long)(arg3); \
2133 _argvec[4] = (unsigned long)(arg4); \
2134 _argvec[5] = (unsigned long)(arg5); \
2135 _argvec[6] = (unsigned long)(arg6); \
2136 _argvec[7] = (unsigned long)(arg7); \
2137 _argvec[8] = (unsigned long)(arg8); \
2138 _argvec[9] = (unsigned long)(arg9); \
2139 _argvec[10] = (unsigned long)(arg10); \
2140 _argvec[11] = (unsigned long)(arg11); \
2141 __asm__ volatile( \
2142 VALGRIND_CFI_PROLOGUE \
2143 VALGRIND_ALIGN_STACK \
2144 "subq $136,%%rsp\n\t" \
2145 "pushq 88(%%rax)\n\t" \
2146 "pushq 80(%%rax)\n\t" \
2147 "pushq 72(%%rax)\n\t" \
2148 "pushq 64(%%rax)\n\t" \
2149 "pushq 56(%%rax)\n\t" \
2150 "movq 48(%%rax), %%r9\n\t" \
2151 "movq 40(%%rax), %%r8\n\t" \
2152 "movq 32(%%rax), %%rcx\n\t" \
2153 "movq 24(%%rax), %%rdx\n\t" \
2154 "movq 16(%%rax), %%rsi\n\t" \
2155 "movq 8(%%rax), %%rdi\n\t" \
2156 "movq (%%rax), %%rax\n\t" \
2157 VALGRIND_CALL_NOREDIR_RAX \
2158 VALGRIND_RESTORE_STACK \
2159 VALGRIND_CFI_EPILOGUE \
2160 : "=a" (_res) \
2161 : "a" (&_argvec[0]) __FRAME_POINTER \
2162 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2163 ); \
2164 lval = (__typeof__(lval)) _res; \
2165 } while (0)
2166
2167 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2168 arg7,arg8,arg9,arg10,arg11,arg12) \
2169 do { \
2170 volatile OrigFn _orig = (orig); \
2171 volatile unsigned long _argvec[13]; \
2172 volatile unsigned long _res; \
2173 _argvec[0] = (unsigned long)_orig.nraddr; \
2174 _argvec[1] = (unsigned long)(arg1); \
2175 _argvec[2] = (unsigned long)(arg2); \
2176 _argvec[3] = (unsigned long)(arg3); \
2177 _argvec[4] = (unsigned long)(arg4); \
2178 _argvec[5] = (unsigned long)(arg5); \
2179 _argvec[6] = (unsigned long)(arg6); \
2180 _argvec[7] = (unsigned long)(arg7); \
2181 _argvec[8] = (unsigned long)(arg8); \
2182 _argvec[9] = (unsigned long)(arg9); \
2183 _argvec[10] = (unsigned long)(arg10); \
2184 _argvec[11] = (unsigned long)(arg11); \
2185 _argvec[12] = (unsigned long)(arg12); \
2186 __asm__ volatile( \
2187 VALGRIND_CFI_PROLOGUE \
2188 VALGRIND_ALIGN_STACK \
2189 "subq $128,%%rsp\n\t" \
2190 "pushq 96(%%rax)\n\t" \
2191 "pushq 88(%%rax)\n\t" \
2192 "pushq 80(%%rax)\n\t" \
2193 "pushq 72(%%rax)\n\t" \
2194 "pushq 64(%%rax)\n\t" \
2195 "pushq 56(%%rax)\n\t" \
2196 "movq 48(%%rax), %%r9\n\t" \
2197 "movq 40(%%rax), %%r8\n\t" \
2198 "movq 32(%%rax), %%rcx\n\t" \
2199 "movq 24(%%rax), %%rdx\n\t" \
2200 "movq 16(%%rax), %%rsi\n\t" \
2201 "movq 8(%%rax), %%rdi\n\t" \
2202 "movq (%%rax), %%rax\n\t" \
2203 VALGRIND_CALL_NOREDIR_RAX \
2204 VALGRIND_RESTORE_STACK \
2205 VALGRIND_CFI_EPILOGUE \
2206 : "=a" (_res) \
2207 : "a" (&_argvec[0]) __FRAME_POINTER \
2208 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2209 ); \
2210 lval = (__typeof__(lval)) _res; \
2211 } while (0)
2212
2213 #endif
2214
2215
2216
2217 #if defined(PLAT_ppc32_linux)
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243 #define __CALLER_SAVED_REGS \
2244 "lr", "ctr", "xer", \
2245 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2246 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2247 "r11", "r12", "r13"
2248
2249
2250
2251
2252
2253
2254 #define VALGRIND_ALIGN_STACK \
2255 "mr 28,1\n\t" \
2256 "rlwinm 1,1,0,0,27\n\t"
2257 #define VALGRIND_RESTORE_STACK \
2258 "mr 1,28\n\t"
2259
2260
2261
2262
2263 #define CALL_FN_W_v(lval, orig) \
2264 do { \
2265 volatile OrigFn _orig = (orig); \
2266 volatile unsigned long _argvec[1]; \
2267 volatile unsigned long _res; \
2268 _argvec[0] = (unsigned long)_orig.nraddr; \
2269 __asm__ volatile( \
2270 VALGRIND_ALIGN_STACK \
2271 "mr 11,%1\n\t" \
2272 "lwz 11,0(11)\n\t" \
2273 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2274 VALGRIND_RESTORE_STACK \
2275 "mr %0,3" \
2276 : "=r" (_res) \
2277 : "r" (&_argvec[0]) \
2278 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2279 ); \
2280 lval = (__typeof__(lval)) _res; \
2281 } while (0)
2282
2283 #define CALL_FN_W_W(lval, orig, arg1) \
2284 do { \
2285 volatile OrigFn _orig = (orig); \
2286 volatile unsigned long _argvec[2]; \
2287 volatile unsigned long _res; \
2288 _argvec[0] = (unsigned long)_orig.nraddr; \
2289 _argvec[1] = (unsigned long)arg1; \
2290 __asm__ volatile( \
2291 VALGRIND_ALIGN_STACK \
2292 "mr 11,%1\n\t" \
2293 "lwz 3,4(11)\n\t" \
2294 "lwz 11,0(11)\n\t" \
2295 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2296 VALGRIND_RESTORE_STACK \
2297 "mr %0,3" \
2298 : "=r" (_res) \
2299 : "r" (&_argvec[0]) \
2300 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2301 ); \
2302 lval = (__typeof__(lval)) _res; \
2303 } while (0)
2304
2305 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2306 do { \
2307 volatile OrigFn _orig = (orig); \
2308 volatile unsigned long _argvec[3]; \
2309 volatile unsigned long _res; \
2310 _argvec[0] = (unsigned long)_orig.nraddr; \
2311 _argvec[1] = (unsigned long)arg1; \
2312 _argvec[2] = (unsigned long)arg2; \
2313 __asm__ volatile( \
2314 VALGRIND_ALIGN_STACK \
2315 "mr 11,%1\n\t" \
2316 "lwz 3,4(11)\n\t" \
2317 "lwz 4,8(11)\n\t" \
2318 "lwz 11,0(11)\n\t" \
2319 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2320 VALGRIND_RESTORE_STACK \
2321 "mr %0,3" \
2322 : "=r" (_res) \
2323 : "r" (&_argvec[0]) \
2324 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2325 ); \
2326 lval = (__typeof__(lval)) _res; \
2327 } while (0)
2328
2329 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2330 do { \
2331 volatile OrigFn _orig = (orig); \
2332 volatile unsigned long _argvec[4]; \
2333 volatile unsigned long _res; \
2334 _argvec[0] = (unsigned long)_orig.nraddr; \
2335 _argvec[1] = (unsigned long)arg1; \
2336 _argvec[2] = (unsigned long)arg2; \
2337 _argvec[3] = (unsigned long)arg3; \
2338 __asm__ volatile( \
2339 VALGRIND_ALIGN_STACK \
2340 "mr 11,%1\n\t" \
2341 "lwz 3,4(11)\n\t" \
2342 "lwz 4,8(11)\n\t" \
2343 "lwz 5,12(11)\n\t" \
2344 "lwz 11,0(11)\n\t" \
2345 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2346 VALGRIND_RESTORE_STACK \
2347 "mr %0,3" \
2348 : "=r" (_res) \
2349 : "r" (&_argvec[0]) \
2350 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2351 ); \
2352 lval = (__typeof__(lval)) _res; \
2353 } while (0)
2354
2355 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2356 do { \
2357 volatile OrigFn _orig = (orig); \
2358 volatile unsigned long _argvec[5]; \
2359 volatile unsigned long _res; \
2360 _argvec[0] = (unsigned long)_orig.nraddr; \
2361 _argvec[1] = (unsigned long)arg1; \
2362 _argvec[2] = (unsigned long)arg2; \
2363 _argvec[3] = (unsigned long)arg3; \
2364 _argvec[4] = (unsigned long)arg4; \
2365 __asm__ volatile( \
2366 VALGRIND_ALIGN_STACK \
2367 "mr 11,%1\n\t" \
2368 "lwz 3,4(11)\n\t" \
2369 "lwz 4,8(11)\n\t" \
2370 "lwz 5,12(11)\n\t" \
2371 "lwz 6,16(11)\n\t" \
2372 "lwz 11,0(11)\n\t" \
2373 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2374 VALGRIND_RESTORE_STACK \
2375 "mr %0,3" \
2376 : "=r" (_res) \
2377 : "r" (&_argvec[0]) \
2378 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2379 ); \
2380 lval = (__typeof__(lval)) _res; \
2381 } while (0)
2382
2383 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2384 do { \
2385 volatile OrigFn _orig = (orig); \
2386 volatile unsigned long _argvec[6]; \
2387 volatile unsigned long _res; \
2388 _argvec[0] = (unsigned long)_orig.nraddr; \
2389 _argvec[1] = (unsigned long)arg1; \
2390 _argvec[2] = (unsigned long)arg2; \
2391 _argvec[3] = (unsigned long)arg3; \
2392 _argvec[4] = (unsigned long)arg4; \
2393 _argvec[5] = (unsigned long)arg5; \
2394 __asm__ volatile( \
2395 VALGRIND_ALIGN_STACK \
2396 "mr 11,%1\n\t" \
2397 "lwz 3,4(11)\n\t" \
2398 "lwz 4,8(11)\n\t" \
2399 "lwz 5,12(11)\n\t" \
2400 "lwz 6,16(11)\n\t" \
2401 "lwz 7,20(11)\n\t" \
2402 "lwz 11,0(11)\n\t" \
2403 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2404 VALGRIND_RESTORE_STACK \
2405 "mr %0,3" \
2406 : "=r" (_res) \
2407 : "r" (&_argvec[0]) \
2408 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2409 ); \
2410 lval = (__typeof__(lval)) _res; \
2411 } while (0)
2412
2413 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2414 do { \
2415 volatile OrigFn _orig = (orig); \
2416 volatile unsigned long _argvec[7]; \
2417 volatile unsigned long _res; \
2418 _argvec[0] = (unsigned long)_orig.nraddr; \
2419 _argvec[1] = (unsigned long)arg1; \
2420 _argvec[2] = (unsigned long)arg2; \
2421 _argvec[3] = (unsigned long)arg3; \
2422 _argvec[4] = (unsigned long)arg4; \
2423 _argvec[5] = (unsigned long)arg5; \
2424 _argvec[6] = (unsigned long)arg6; \
2425 __asm__ volatile( \
2426 VALGRIND_ALIGN_STACK \
2427 "mr 11,%1\n\t" \
2428 "lwz 3,4(11)\n\t" \
2429 "lwz 4,8(11)\n\t" \
2430 "lwz 5,12(11)\n\t" \
2431 "lwz 6,16(11)\n\t" \
2432 "lwz 7,20(11)\n\t" \
2433 "lwz 8,24(11)\n\t" \
2434 "lwz 11,0(11)\n\t" \
2435 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2436 VALGRIND_RESTORE_STACK \
2437 "mr %0,3" \
2438 : "=r" (_res) \
2439 : "r" (&_argvec[0]) \
2440 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2441 ); \
2442 lval = (__typeof__(lval)) _res; \
2443 } while (0)
2444
2445 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2446 arg7) \
2447 do { \
2448 volatile OrigFn _orig = (orig); \
2449 volatile unsigned long _argvec[8]; \
2450 volatile unsigned long _res; \
2451 _argvec[0] = (unsigned long)_orig.nraddr; \
2452 _argvec[1] = (unsigned long)arg1; \
2453 _argvec[2] = (unsigned long)arg2; \
2454 _argvec[3] = (unsigned long)arg3; \
2455 _argvec[4] = (unsigned long)arg4; \
2456 _argvec[5] = (unsigned long)arg5; \
2457 _argvec[6] = (unsigned long)arg6; \
2458 _argvec[7] = (unsigned long)arg7; \
2459 __asm__ volatile( \
2460 VALGRIND_ALIGN_STACK \
2461 "mr 11,%1\n\t" \
2462 "lwz 3,4(11)\n\t" \
2463 "lwz 4,8(11)\n\t" \
2464 "lwz 5,12(11)\n\t" \
2465 "lwz 6,16(11)\n\t" \
2466 "lwz 7,20(11)\n\t" \
2467 "lwz 8,24(11)\n\t" \
2468 "lwz 9,28(11)\n\t" \
2469 "lwz 11,0(11)\n\t" \
2470 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2471 VALGRIND_RESTORE_STACK \
2472 "mr %0,3" \
2473 : "=r" (_res) \
2474 : "r" (&_argvec[0]) \
2475 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2476 ); \
2477 lval = (__typeof__(lval)) _res; \
2478 } while (0)
2479
2480 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2481 arg7,arg8) \
2482 do { \
2483 volatile OrigFn _orig = (orig); \
2484 volatile unsigned long _argvec[9]; \
2485 volatile unsigned long _res; \
2486 _argvec[0] = (unsigned long)_orig.nraddr; \
2487 _argvec[1] = (unsigned long)arg1; \
2488 _argvec[2] = (unsigned long)arg2; \
2489 _argvec[3] = (unsigned long)arg3; \
2490 _argvec[4] = (unsigned long)arg4; \
2491 _argvec[5] = (unsigned long)arg5; \
2492 _argvec[6] = (unsigned long)arg6; \
2493 _argvec[7] = (unsigned long)arg7; \
2494 _argvec[8] = (unsigned long)arg8; \
2495 __asm__ volatile( \
2496 VALGRIND_ALIGN_STACK \
2497 "mr 11,%1\n\t" \
2498 "lwz 3,4(11)\n\t" \
2499 "lwz 4,8(11)\n\t" \
2500 "lwz 5,12(11)\n\t" \
2501 "lwz 6,16(11)\n\t" \
2502 "lwz 7,20(11)\n\t" \
2503 "lwz 8,24(11)\n\t" \
2504 "lwz 9,28(11)\n\t" \
2505 "lwz 10,32(11)\n\t" \
2506 "lwz 11,0(11)\n\t" \
2507 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2508 VALGRIND_RESTORE_STACK \
2509 "mr %0,3" \
2510 : "=r" (_res) \
2511 : "r" (&_argvec[0]) \
2512 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2513 ); \
2514 lval = (__typeof__(lval)) _res; \
2515 } while (0)
2516
2517 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2518 arg7,arg8,arg9) \
2519 do { \
2520 volatile OrigFn _orig = (orig); \
2521 volatile unsigned long _argvec[10]; \
2522 volatile unsigned long _res; \
2523 _argvec[0] = (unsigned long)_orig.nraddr; \
2524 _argvec[1] = (unsigned long)arg1; \
2525 _argvec[2] = (unsigned long)arg2; \
2526 _argvec[3] = (unsigned long)arg3; \
2527 _argvec[4] = (unsigned long)arg4; \
2528 _argvec[5] = (unsigned long)arg5; \
2529 _argvec[6] = (unsigned long)arg6; \
2530 _argvec[7] = (unsigned long)arg7; \
2531 _argvec[8] = (unsigned long)arg8; \
2532 _argvec[9] = (unsigned long)arg9; \
2533 __asm__ volatile( \
2534 VALGRIND_ALIGN_STACK \
2535 "mr 11,%1\n\t" \
2536 "addi 1,1,-16\n\t" \
2537 \
2538 "lwz 3,36(11)\n\t" \
2539 "stw 3,8(1)\n\t" \
2540 \
2541 "lwz 3,4(11)\n\t" \
2542 "lwz 4,8(11)\n\t" \
2543 "lwz 5,12(11)\n\t" \
2544 "lwz 6,16(11)\n\t" \
2545 "lwz 7,20(11)\n\t" \
2546 "lwz 8,24(11)\n\t" \
2547 "lwz 9,28(11)\n\t" \
2548 "lwz 10,32(11)\n\t" \
2549 "lwz 11,0(11)\n\t" \
2550 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2551 VALGRIND_RESTORE_STACK \
2552 "mr %0,3" \
2553 : "=r" (_res) \
2554 : "r" (&_argvec[0]) \
2555 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2556 ); \
2557 lval = (__typeof__(lval)) _res; \
2558 } while (0)
2559
2560 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2561 arg7,arg8,arg9,arg10) \
2562 do { \
2563 volatile OrigFn _orig = (orig); \
2564 volatile unsigned long _argvec[11]; \
2565 volatile unsigned long _res; \
2566 _argvec[0] = (unsigned long)_orig.nraddr; \
2567 _argvec[1] = (unsigned long)arg1; \
2568 _argvec[2] = (unsigned long)arg2; \
2569 _argvec[3] = (unsigned long)arg3; \
2570 _argvec[4] = (unsigned long)arg4; \
2571 _argvec[5] = (unsigned long)arg5; \
2572 _argvec[6] = (unsigned long)arg6; \
2573 _argvec[7] = (unsigned long)arg7; \
2574 _argvec[8] = (unsigned long)arg8; \
2575 _argvec[9] = (unsigned long)arg9; \
2576 _argvec[10] = (unsigned long)arg10; \
2577 __asm__ volatile( \
2578 VALGRIND_ALIGN_STACK \
2579 "mr 11,%1\n\t" \
2580 "addi 1,1,-16\n\t" \
2581 \
2582 "lwz 3,40(11)\n\t" \
2583 "stw 3,12(1)\n\t" \
2584 \
2585 "lwz 3,36(11)\n\t" \
2586 "stw 3,8(1)\n\t" \
2587 \
2588 "lwz 3,4(11)\n\t" \
2589 "lwz 4,8(11)\n\t" \
2590 "lwz 5,12(11)\n\t" \
2591 "lwz 6,16(11)\n\t" \
2592 "lwz 7,20(11)\n\t" \
2593 "lwz 8,24(11)\n\t" \
2594 "lwz 9,28(11)\n\t" \
2595 "lwz 10,32(11)\n\t" \
2596 "lwz 11,0(11)\n\t" \
2597 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2598 VALGRIND_RESTORE_STACK \
2599 "mr %0,3" \
2600 : "=r" (_res) \
2601 : "r" (&_argvec[0]) \
2602 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2603 ); \
2604 lval = (__typeof__(lval)) _res; \
2605 } while (0)
2606
2607 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2608 arg7,arg8,arg9,arg10,arg11) \
2609 do { \
2610 volatile OrigFn _orig = (orig); \
2611 volatile unsigned long _argvec[12]; \
2612 volatile unsigned long _res; \
2613 _argvec[0] = (unsigned long)_orig.nraddr; \
2614 _argvec[1] = (unsigned long)arg1; \
2615 _argvec[2] = (unsigned long)arg2; \
2616 _argvec[3] = (unsigned long)arg3; \
2617 _argvec[4] = (unsigned long)arg4; \
2618 _argvec[5] = (unsigned long)arg5; \
2619 _argvec[6] = (unsigned long)arg6; \
2620 _argvec[7] = (unsigned long)arg7; \
2621 _argvec[8] = (unsigned long)arg8; \
2622 _argvec[9] = (unsigned long)arg9; \
2623 _argvec[10] = (unsigned long)arg10; \
2624 _argvec[11] = (unsigned long)arg11; \
2625 __asm__ volatile( \
2626 VALGRIND_ALIGN_STACK \
2627 "mr 11,%1\n\t" \
2628 "addi 1,1,-32\n\t" \
2629 \
2630 "lwz 3,44(11)\n\t" \
2631 "stw 3,16(1)\n\t" \
2632 \
2633 "lwz 3,40(11)\n\t" \
2634 "stw 3,12(1)\n\t" \
2635 \
2636 "lwz 3,36(11)\n\t" \
2637 "stw 3,8(1)\n\t" \
2638 \
2639 "lwz 3,4(11)\n\t" \
2640 "lwz 4,8(11)\n\t" \
2641 "lwz 5,12(11)\n\t" \
2642 "lwz 6,16(11)\n\t" \
2643 "lwz 7,20(11)\n\t" \
2644 "lwz 8,24(11)\n\t" \
2645 "lwz 9,28(11)\n\t" \
2646 "lwz 10,32(11)\n\t" \
2647 "lwz 11,0(11)\n\t" \
2648 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2649 VALGRIND_RESTORE_STACK \
2650 "mr %0,3" \
2651 : "=r" (_res) \
2652 : "r" (&_argvec[0]) \
2653 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2654 ); \
2655 lval = (__typeof__(lval)) _res; \
2656 } while (0)
2657
2658 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2659 arg7,arg8,arg9,arg10,arg11,arg12) \
2660 do { \
2661 volatile OrigFn _orig = (orig); \
2662 volatile unsigned long _argvec[13]; \
2663 volatile unsigned long _res; \
2664 _argvec[0] = (unsigned long)_orig.nraddr; \
2665 _argvec[1] = (unsigned long)arg1; \
2666 _argvec[2] = (unsigned long)arg2; \
2667 _argvec[3] = (unsigned long)arg3; \
2668 _argvec[4] = (unsigned long)arg4; \
2669 _argvec[5] = (unsigned long)arg5; \
2670 _argvec[6] = (unsigned long)arg6; \
2671 _argvec[7] = (unsigned long)arg7; \
2672 _argvec[8] = (unsigned long)arg8; \
2673 _argvec[9] = (unsigned long)arg9; \
2674 _argvec[10] = (unsigned long)arg10; \
2675 _argvec[11] = (unsigned long)arg11; \
2676 _argvec[12] = (unsigned long)arg12; \
2677 __asm__ volatile( \
2678 VALGRIND_ALIGN_STACK \
2679 "mr 11,%1\n\t" \
2680 "addi 1,1,-32\n\t" \
2681 \
2682 "lwz 3,48(11)\n\t" \
2683 "stw 3,20(1)\n\t" \
2684 \
2685 "lwz 3,44(11)\n\t" \
2686 "stw 3,16(1)\n\t" \
2687 \
2688 "lwz 3,40(11)\n\t" \
2689 "stw 3,12(1)\n\t" \
2690 \
2691 "lwz 3,36(11)\n\t" \
2692 "stw 3,8(1)\n\t" \
2693 \
2694 "lwz 3,4(11)\n\t" \
2695 "lwz 4,8(11)\n\t" \
2696 "lwz 5,12(11)\n\t" \
2697 "lwz 6,16(11)\n\t" \
2698 "lwz 7,20(11)\n\t" \
2699 "lwz 8,24(11)\n\t" \
2700 "lwz 9,28(11)\n\t" \
2701 "lwz 10,32(11)\n\t" \
2702 "lwz 11,0(11)\n\t" \
2703 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2704 VALGRIND_RESTORE_STACK \
2705 "mr %0,3" \
2706 : "=r" (_res) \
2707 : "r" (&_argvec[0]) \
2708 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2709 ); \
2710 lval = (__typeof__(lval)) _res; \
2711 } while (0)
2712
2713 #endif
2714
2715
2716
2717 #if defined(PLAT_ppc64be_linux)
2718
2719
2720
2721
2722 #define __CALLER_SAVED_REGS \
2723 "lr", "ctr", "xer", \
2724 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2725 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2726 "r11", "r12", "r13"
2727
2728
2729
2730
2731
2732
2733 #define VALGRIND_ALIGN_STACK \
2734 "mr 28,1\n\t" \
2735 "rldicr 1,1,0,59\n\t"
2736 #define VALGRIND_RESTORE_STACK \
2737 "mr 1,28\n\t"
2738
2739
2740
2741
2742 #define CALL_FN_W_v(lval, orig) \
2743 do { \
2744 volatile OrigFn _orig = (orig); \
2745 volatile unsigned long _argvec[3+0]; \
2746 volatile unsigned long _res; \
2747 \
2748 _argvec[1] = (unsigned long)_orig.r2; \
2749 _argvec[2] = (unsigned long)_orig.nraddr; \
2750 __asm__ volatile( \
2751 VALGRIND_ALIGN_STACK \
2752 "mr 11,%1\n\t" \
2753 "std 2,-16(11)\n\t" \
2754 "ld 2,-8(11)\n\t" \
2755 "ld 11, 0(11)\n\t" \
2756 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2757 "mr 11,%1\n\t" \
2758 "mr %0,3\n\t" \
2759 "ld 2,-16(11)\n\t" \
2760 VALGRIND_RESTORE_STACK \
2761 : "=r" (_res) \
2762 : "r" (&_argvec[2]) \
2763 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2764 ); \
2765 lval = (__typeof__(lval)) _res; \
2766 } while (0)
2767
2768 #define CALL_FN_W_W(lval, orig, arg1) \
2769 do { \
2770 volatile OrigFn _orig = (orig); \
2771 volatile unsigned long _argvec[3+1]; \
2772 volatile unsigned long _res; \
2773 \
2774 _argvec[1] = (unsigned long)_orig.r2; \
2775 _argvec[2] = (unsigned long)_orig.nraddr; \
2776 _argvec[2+1] = (unsigned long)arg1; \
2777 __asm__ volatile( \
2778 VALGRIND_ALIGN_STACK \
2779 "mr 11,%1\n\t" \
2780 "std 2,-16(11)\n\t" \
2781 "ld 2,-8(11)\n\t" \
2782 "ld 3, 8(11)\n\t" \
2783 "ld 11, 0(11)\n\t" \
2784 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2785 "mr 11,%1\n\t" \
2786 "mr %0,3\n\t" \
2787 "ld 2,-16(11)\n\t" \
2788 VALGRIND_RESTORE_STACK \
2789 : "=r" (_res) \
2790 : "r" (&_argvec[2]) \
2791 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2792 ); \
2793 lval = (__typeof__(lval)) _res; \
2794 } while (0)
2795
2796 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2797 do { \
2798 volatile OrigFn _orig = (orig); \
2799 volatile unsigned long _argvec[3+2]; \
2800 volatile unsigned long _res; \
2801 \
2802 _argvec[1] = (unsigned long)_orig.r2; \
2803 _argvec[2] = (unsigned long)_orig.nraddr; \
2804 _argvec[2+1] = (unsigned long)arg1; \
2805 _argvec[2+2] = (unsigned long)arg2; \
2806 __asm__ volatile( \
2807 VALGRIND_ALIGN_STACK \
2808 "mr 11,%1\n\t" \
2809 "std 2,-16(11)\n\t" \
2810 "ld 2,-8(11)\n\t" \
2811 "ld 3, 8(11)\n\t" \
2812 "ld 4, 16(11)\n\t" \
2813 "ld 11, 0(11)\n\t" \
2814 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2815 "mr 11,%1\n\t" \
2816 "mr %0,3\n\t" \
2817 "ld 2,-16(11)\n\t" \
2818 VALGRIND_RESTORE_STACK \
2819 : "=r" (_res) \
2820 : "r" (&_argvec[2]) \
2821 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2822 ); \
2823 lval = (__typeof__(lval)) _res; \
2824 } while (0)
2825
2826 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2827 do { \
2828 volatile OrigFn _orig = (orig); \
2829 volatile unsigned long _argvec[3+3]; \
2830 volatile unsigned long _res; \
2831 \
2832 _argvec[1] = (unsigned long)_orig.r2; \
2833 _argvec[2] = (unsigned long)_orig.nraddr; \
2834 _argvec[2+1] = (unsigned long)arg1; \
2835 _argvec[2+2] = (unsigned long)arg2; \
2836 _argvec[2+3] = (unsigned long)arg3; \
2837 __asm__ volatile( \
2838 VALGRIND_ALIGN_STACK \
2839 "mr 11,%1\n\t" \
2840 "std 2,-16(11)\n\t" \
2841 "ld 2,-8(11)\n\t" \
2842 "ld 3, 8(11)\n\t" \
2843 "ld 4, 16(11)\n\t" \
2844 "ld 5, 24(11)\n\t" \
2845 "ld 11, 0(11)\n\t" \
2846 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2847 "mr 11,%1\n\t" \
2848 "mr %0,3\n\t" \
2849 "ld 2,-16(11)\n\t" \
2850 VALGRIND_RESTORE_STACK \
2851 : "=r" (_res) \
2852 : "r" (&_argvec[2]) \
2853 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2854 ); \
2855 lval = (__typeof__(lval)) _res; \
2856 } while (0)
2857
2858 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2859 do { \
2860 volatile OrigFn _orig = (orig); \
2861 volatile unsigned long _argvec[3+4]; \
2862 volatile unsigned long _res; \
2863 \
2864 _argvec[1] = (unsigned long)_orig.r2; \
2865 _argvec[2] = (unsigned long)_orig.nraddr; \
2866 _argvec[2+1] = (unsigned long)arg1; \
2867 _argvec[2+2] = (unsigned long)arg2; \
2868 _argvec[2+3] = (unsigned long)arg3; \
2869 _argvec[2+4] = (unsigned long)arg4; \
2870 __asm__ volatile( \
2871 VALGRIND_ALIGN_STACK \
2872 "mr 11,%1\n\t" \
2873 "std 2,-16(11)\n\t" \
2874 "ld 2,-8(11)\n\t" \
2875 "ld 3, 8(11)\n\t" \
2876 "ld 4, 16(11)\n\t" \
2877 "ld 5, 24(11)\n\t" \
2878 "ld 6, 32(11)\n\t" \
2879 "ld 11, 0(11)\n\t" \
2880 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2881 "mr 11,%1\n\t" \
2882 "mr %0,3\n\t" \
2883 "ld 2,-16(11)\n\t" \
2884 VALGRIND_RESTORE_STACK \
2885 : "=r" (_res) \
2886 : "r" (&_argvec[2]) \
2887 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2888 ); \
2889 lval = (__typeof__(lval)) _res; \
2890 } while (0)
2891
2892 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2893 do { \
2894 volatile OrigFn _orig = (orig); \
2895 volatile unsigned long _argvec[3+5]; \
2896 volatile unsigned long _res; \
2897 \
2898 _argvec[1] = (unsigned long)_orig.r2; \
2899 _argvec[2] = (unsigned long)_orig.nraddr; \
2900 _argvec[2+1] = (unsigned long)arg1; \
2901 _argvec[2+2] = (unsigned long)arg2; \
2902 _argvec[2+3] = (unsigned long)arg3; \
2903 _argvec[2+4] = (unsigned long)arg4; \
2904 _argvec[2+5] = (unsigned long)arg5; \
2905 __asm__ volatile( \
2906 VALGRIND_ALIGN_STACK \
2907 "mr 11,%1\n\t" \
2908 "std 2,-16(11)\n\t" \
2909 "ld 2,-8(11)\n\t" \
2910 "ld 3, 8(11)\n\t" \
2911 "ld 4, 16(11)\n\t" \
2912 "ld 5, 24(11)\n\t" \
2913 "ld 6, 32(11)\n\t" \
2914 "ld 7, 40(11)\n\t" \
2915 "ld 11, 0(11)\n\t" \
2916 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2917 "mr 11,%1\n\t" \
2918 "mr %0,3\n\t" \
2919 "ld 2,-16(11)\n\t" \
2920 VALGRIND_RESTORE_STACK \
2921 : "=r" (_res) \
2922 : "r" (&_argvec[2]) \
2923 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2924 ); \
2925 lval = (__typeof__(lval)) _res; \
2926 } while (0)
2927
2928 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2929 do { \
2930 volatile OrigFn _orig = (orig); \
2931 volatile unsigned long _argvec[3+6]; \
2932 volatile unsigned long _res; \
2933 \
2934 _argvec[1] = (unsigned long)_orig.r2; \
2935 _argvec[2] = (unsigned long)_orig.nraddr; \
2936 _argvec[2+1] = (unsigned long)arg1; \
2937 _argvec[2+2] = (unsigned long)arg2; \
2938 _argvec[2+3] = (unsigned long)arg3; \
2939 _argvec[2+4] = (unsigned long)arg4; \
2940 _argvec[2+5] = (unsigned long)arg5; \
2941 _argvec[2+6] = (unsigned long)arg6; \
2942 __asm__ volatile( \
2943 VALGRIND_ALIGN_STACK \
2944 "mr 11,%1\n\t" \
2945 "std 2,-16(11)\n\t" \
2946 "ld 2,-8(11)\n\t" \
2947 "ld 3, 8(11)\n\t" \
2948 "ld 4, 16(11)\n\t" \
2949 "ld 5, 24(11)\n\t" \
2950 "ld 6, 32(11)\n\t" \
2951 "ld 7, 40(11)\n\t" \
2952 "ld 8, 48(11)\n\t" \
2953 "ld 11, 0(11)\n\t" \
2954 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2955 "mr 11,%1\n\t" \
2956 "mr %0,3\n\t" \
2957 "ld 2,-16(11)\n\t" \
2958 VALGRIND_RESTORE_STACK \
2959 : "=r" (_res) \
2960 : "r" (&_argvec[2]) \
2961 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2962 ); \
2963 lval = (__typeof__(lval)) _res; \
2964 } while (0)
2965
2966 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2967 arg7) \
2968 do { \
2969 volatile OrigFn _orig = (orig); \
2970 volatile unsigned long _argvec[3+7]; \
2971 volatile unsigned long _res; \
2972 \
2973 _argvec[1] = (unsigned long)_orig.r2; \
2974 _argvec[2] = (unsigned long)_orig.nraddr; \
2975 _argvec[2+1] = (unsigned long)arg1; \
2976 _argvec[2+2] = (unsigned long)arg2; \
2977 _argvec[2+3] = (unsigned long)arg3; \
2978 _argvec[2+4] = (unsigned long)arg4; \
2979 _argvec[2+5] = (unsigned long)arg5; \
2980 _argvec[2+6] = (unsigned long)arg6; \
2981 _argvec[2+7] = (unsigned long)arg7; \
2982 __asm__ volatile( \
2983 VALGRIND_ALIGN_STACK \
2984 "mr 11,%1\n\t" \
2985 "std 2,-16(11)\n\t" \
2986 "ld 2,-8(11)\n\t" \
2987 "ld 3, 8(11)\n\t" \
2988 "ld 4, 16(11)\n\t" \
2989 "ld 5, 24(11)\n\t" \
2990 "ld 6, 32(11)\n\t" \
2991 "ld 7, 40(11)\n\t" \
2992 "ld 8, 48(11)\n\t" \
2993 "ld 9, 56(11)\n\t" \
2994 "ld 11, 0(11)\n\t" \
2995 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2996 "mr 11,%1\n\t" \
2997 "mr %0,3\n\t" \
2998 "ld 2,-16(11)\n\t" \
2999 VALGRIND_RESTORE_STACK \
3000 : "=r" (_res) \
3001 : "r" (&_argvec[2]) \
3002 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3003 ); \
3004 lval = (__typeof__(lval)) _res; \
3005 } while (0)
3006
3007 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3008 arg7,arg8) \
3009 do { \
3010 volatile OrigFn _orig = (orig); \
3011 volatile unsigned long _argvec[3+8]; \
3012 volatile unsigned long _res; \
3013 \
3014 _argvec[1] = (unsigned long)_orig.r2; \
3015 _argvec[2] = (unsigned long)_orig.nraddr; \
3016 _argvec[2+1] = (unsigned long)arg1; \
3017 _argvec[2+2] = (unsigned long)arg2; \
3018 _argvec[2+3] = (unsigned long)arg3; \
3019 _argvec[2+4] = (unsigned long)arg4; \
3020 _argvec[2+5] = (unsigned long)arg5; \
3021 _argvec[2+6] = (unsigned long)arg6; \
3022 _argvec[2+7] = (unsigned long)arg7; \
3023 _argvec[2+8] = (unsigned long)arg8; \
3024 __asm__ volatile( \
3025 VALGRIND_ALIGN_STACK \
3026 "mr 11,%1\n\t" \
3027 "std 2,-16(11)\n\t" \
3028 "ld 2,-8(11)\n\t" \
3029 "ld 3, 8(11)\n\t" \
3030 "ld 4, 16(11)\n\t" \
3031 "ld 5, 24(11)\n\t" \
3032 "ld 6, 32(11)\n\t" \
3033 "ld 7, 40(11)\n\t" \
3034 "ld 8, 48(11)\n\t" \
3035 "ld 9, 56(11)\n\t" \
3036 "ld 10, 64(11)\n\t" \
3037 "ld 11, 0(11)\n\t" \
3038 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3039 "mr 11,%1\n\t" \
3040 "mr %0,3\n\t" \
3041 "ld 2,-16(11)\n\t" \
3042 VALGRIND_RESTORE_STACK \
3043 : "=r" (_res) \
3044 : "r" (&_argvec[2]) \
3045 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3046 ); \
3047 lval = (__typeof__(lval)) _res; \
3048 } while (0)
3049
3050 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3051 arg7,arg8,arg9) \
3052 do { \
3053 volatile OrigFn _orig = (orig); \
3054 volatile unsigned long _argvec[3+9]; \
3055 volatile unsigned long _res; \
3056 \
3057 _argvec[1] = (unsigned long)_orig.r2; \
3058 _argvec[2] = (unsigned long)_orig.nraddr; \
3059 _argvec[2+1] = (unsigned long)arg1; \
3060 _argvec[2+2] = (unsigned long)arg2; \
3061 _argvec[2+3] = (unsigned long)arg3; \
3062 _argvec[2+4] = (unsigned long)arg4; \
3063 _argvec[2+5] = (unsigned long)arg5; \
3064 _argvec[2+6] = (unsigned long)arg6; \
3065 _argvec[2+7] = (unsigned long)arg7; \
3066 _argvec[2+8] = (unsigned long)arg8; \
3067 _argvec[2+9] = (unsigned long)arg9; \
3068 __asm__ volatile( \
3069 VALGRIND_ALIGN_STACK \
3070 "mr 11,%1\n\t" \
3071 "std 2,-16(11)\n\t" \
3072 "ld 2,-8(11)\n\t" \
3073 "addi 1,1,-128\n\t" \
3074 \
3075 "ld 3,72(11)\n\t" \
3076 "std 3,112(1)\n\t" \
3077 \
3078 "ld 3, 8(11)\n\t" \
3079 "ld 4, 16(11)\n\t" \
3080 "ld 5, 24(11)\n\t" \
3081 "ld 6, 32(11)\n\t" \
3082 "ld 7, 40(11)\n\t" \
3083 "ld 8, 48(11)\n\t" \
3084 "ld 9, 56(11)\n\t" \
3085 "ld 10, 64(11)\n\t" \
3086 "ld 11, 0(11)\n\t" \
3087 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3088 "mr 11,%1\n\t" \
3089 "mr %0,3\n\t" \
3090 "ld 2,-16(11)\n\t" \
3091 VALGRIND_RESTORE_STACK \
3092 : "=r" (_res) \
3093 : "r" (&_argvec[2]) \
3094 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3095 ); \
3096 lval = (__typeof__(lval)) _res; \
3097 } while (0)
3098
3099 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3100 arg7,arg8,arg9,arg10) \
3101 do { \
3102 volatile OrigFn _orig = (orig); \
3103 volatile unsigned long _argvec[3+10]; \
3104 volatile unsigned long _res; \
3105 \
3106 _argvec[1] = (unsigned long)_orig.r2; \
3107 _argvec[2] = (unsigned long)_orig.nraddr; \
3108 _argvec[2+1] = (unsigned long)arg1; \
3109 _argvec[2+2] = (unsigned long)arg2; \
3110 _argvec[2+3] = (unsigned long)arg3; \
3111 _argvec[2+4] = (unsigned long)arg4; \
3112 _argvec[2+5] = (unsigned long)arg5; \
3113 _argvec[2+6] = (unsigned long)arg6; \
3114 _argvec[2+7] = (unsigned long)arg7; \
3115 _argvec[2+8] = (unsigned long)arg8; \
3116 _argvec[2+9] = (unsigned long)arg9; \
3117 _argvec[2+10] = (unsigned long)arg10; \
3118 __asm__ volatile( \
3119 VALGRIND_ALIGN_STACK \
3120 "mr 11,%1\n\t" \
3121 "std 2,-16(11)\n\t" \
3122 "ld 2,-8(11)\n\t" \
3123 "addi 1,1,-128\n\t" \
3124 \
3125 "ld 3,80(11)\n\t" \
3126 "std 3,120(1)\n\t" \
3127 \
3128 "ld 3,72(11)\n\t" \
3129 "std 3,112(1)\n\t" \
3130 \
3131 "ld 3, 8(11)\n\t" \
3132 "ld 4, 16(11)\n\t" \
3133 "ld 5, 24(11)\n\t" \
3134 "ld 6, 32(11)\n\t" \
3135 "ld 7, 40(11)\n\t" \
3136 "ld 8, 48(11)\n\t" \
3137 "ld 9, 56(11)\n\t" \
3138 "ld 10, 64(11)\n\t" \
3139 "ld 11, 0(11)\n\t" \
3140 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3141 "mr 11,%1\n\t" \
3142 "mr %0,3\n\t" \
3143 "ld 2,-16(11)\n\t" \
3144 VALGRIND_RESTORE_STACK \
3145 : "=r" (_res) \
3146 : "r" (&_argvec[2]) \
3147 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3148 ); \
3149 lval = (__typeof__(lval)) _res; \
3150 } while (0)
3151
3152 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3153 arg7,arg8,arg9,arg10,arg11) \
3154 do { \
3155 volatile OrigFn _orig = (orig); \
3156 volatile unsigned long _argvec[3+11]; \
3157 volatile unsigned long _res; \
3158 \
3159 _argvec[1] = (unsigned long)_orig.r2; \
3160 _argvec[2] = (unsigned long)_orig.nraddr; \
3161 _argvec[2+1] = (unsigned long)arg1; \
3162 _argvec[2+2] = (unsigned long)arg2; \
3163 _argvec[2+3] = (unsigned long)arg3; \
3164 _argvec[2+4] = (unsigned long)arg4; \
3165 _argvec[2+5] = (unsigned long)arg5; \
3166 _argvec[2+6] = (unsigned long)arg6; \
3167 _argvec[2+7] = (unsigned long)arg7; \
3168 _argvec[2+8] = (unsigned long)arg8; \
3169 _argvec[2+9] = (unsigned long)arg9; \
3170 _argvec[2+10] = (unsigned long)arg10; \
3171 _argvec[2+11] = (unsigned long)arg11; \
3172 __asm__ volatile( \
3173 VALGRIND_ALIGN_STACK \
3174 "mr 11,%1\n\t" \
3175 "std 2,-16(11)\n\t" \
3176 "ld 2,-8(11)\n\t" \
3177 "addi 1,1,-144\n\t" \
3178 \
3179 "ld 3,88(11)\n\t" \
3180 "std 3,128(1)\n\t" \
3181 \
3182 "ld 3,80(11)\n\t" \
3183 "std 3,120(1)\n\t" \
3184 \
3185 "ld 3,72(11)\n\t" \
3186 "std 3,112(1)\n\t" \
3187 \
3188 "ld 3, 8(11)\n\t" \
3189 "ld 4, 16(11)\n\t" \
3190 "ld 5, 24(11)\n\t" \
3191 "ld 6, 32(11)\n\t" \
3192 "ld 7, 40(11)\n\t" \
3193 "ld 8, 48(11)\n\t" \
3194 "ld 9, 56(11)\n\t" \
3195 "ld 10, 64(11)\n\t" \
3196 "ld 11, 0(11)\n\t" \
3197 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3198 "mr 11,%1\n\t" \
3199 "mr %0,3\n\t" \
3200 "ld 2,-16(11)\n\t" \
3201 VALGRIND_RESTORE_STACK \
3202 : "=r" (_res) \
3203 : "r" (&_argvec[2]) \
3204 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3205 ); \
3206 lval = (__typeof__(lval)) _res; \
3207 } while (0)
3208
3209 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3210 arg7,arg8,arg9,arg10,arg11,arg12) \
3211 do { \
3212 volatile OrigFn _orig = (orig); \
3213 volatile unsigned long _argvec[3+12]; \
3214 volatile unsigned long _res; \
3215 \
3216 _argvec[1] = (unsigned long)_orig.r2; \
3217 _argvec[2] = (unsigned long)_orig.nraddr; \
3218 _argvec[2+1] = (unsigned long)arg1; \
3219 _argvec[2+2] = (unsigned long)arg2; \
3220 _argvec[2+3] = (unsigned long)arg3; \
3221 _argvec[2+4] = (unsigned long)arg4; \
3222 _argvec[2+5] = (unsigned long)arg5; \
3223 _argvec[2+6] = (unsigned long)arg6; \
3224 _argvec[2+7] = (unsigned long)arg7; \
3225 _argvec[2+8] = (unsigned long)arg8; \
3226 _argvec[2+9] = (unsigned long)arg9; \
3227 _argvec[2+10] = (unsigned long)arg10; \
3228 _argvec[2+11] = (unsigned long)arg11; \
3229 _argvec[2+12] = (unsigned long)arg12; \
3230 __asm__ volatile( \
3231 VALGRIND_ALIGN_STACK \
3232 "mr 11,%1\n\t" \
3233 "std 2,-16(11)\n\t" \
3234 "ld 2,-8(11)\n\t" \
3235 "addi 1,1,-144\n\t" \
3236 \
3237 "ld 3,96(11)\n\t" \
3238 "std 3,136(1)\n\t" \
3239 \
3240 "ld 3,88(11)\n\t" \
3241 "std 3,128(1)\n\t" \
3242 \
3243 "ld 3,80(11)\n\t" \
3244 "std 3,120(1)\n\t" \
3245 \
3246 "ld 3,72(11)\n\t" \
3247 "std 3,112(1)\n\t" \
3248 \
3249 "ld 3, 8(11)\n\t" \
3250 "ld 4, 16(11)\n\t" \
3251 "ld 5, 24(11)\n\t" \
3252 "ld 6, 32(11)\n\t" \
3253 "ld 7, 40(11)\n\t" \
3254 "ld 8, 48(11)\n\t" \
3255 "ld 9, 56(11)\n\t" \
3256 "ld 10, 64(11)\n\t" \
3257 "ld 11, 0(11)\n\t" \
3258 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3259 "mr 11,%1\n\t" \
3260 "mr %0,3\n\t" \
3261 "ld 2,-16(11)\n\t" \
3262 VALGRIND_RESTORE_STACK \
3263 : "=r" (_res) \
3264 : "r" (&_argvec[2]) \
3265 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3266 ); \
3267 lval = (__typeof__(lval)) _res; \
3268 } while (0)
3269
3270 #endif
3271
3272
3273 #if defined(PLAT_ppc64le_linux)
3274
3275
3276
3277
3278 #define __CALLER_SAVED_REGS \
3279 "lr", "ctr", "xer", \
3280 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3281 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3282 "r11", "r12", "r13"
3283
3284
3285
3286
3287
3288
3289 #define VALGRIND_ALIGN_STACK \
3290 "mr 28,1\n\t" \
3291 "rldicr 1,1,0,59\n\t"
3292 #define VALGRIND_RESTORE_STACK \
3293 "mr 1,28\n\t"
3294
3295
3296
3297
3298 #define CALL_FN_W_v(lval, orig) \
3299 do { \
3300 volatile OrigFn _orig = (orig); \
3301 volatile unsigned long _argvec[3+0]; \
3302 volatile unsigned long _res; \
3303 \
3304 _argvec[1] = (unsigned long)_orig.r2; \
3305 _argvec[2] = (unsigned long)_orig.nraddr; \
3306 __asm__ volatile( \
3307 VALGRIND_ALIGN_STACK \
3308 "mr 12,%1\n\t" \
3309 "std 2,-16(12)\n\t" \
3310 "ld 2,-8(12)\n\t" \
3311 "ld 12, 0(12)\n\t" \
3312 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3313 "mr 12,%1\n\t" \
3314 "mr %0,3\n\t" \
3315 "ld 2,-16(12)\n\t" \
3316 VALGRIND_RESTORE_STACK \
3317 : "=r" (_res) \
3318 : "r" (&_argvec[2]) \
3319 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3320 ); \
3321 lval = (__typeof__(lval)) _res; \
3322 } while (0)
3323
3324 #define CALL_FN_W_W(lval, orig, arg1) \
3325 do { \
3326 volatile OrigFn _orig = (orig); \
3327 volatile unsigned long _argvec[3+1]; \
3328 volatile unsigned long _res; \
3329 \
3330 _argvec[1] = (unsigned long)_orig.r2; \
3331 _argvec[2] = (unsigned long)_orig.nraddr; \
3332 _argvec[2+1] = (unsigned long)arg1; \
3333 __asm__ volatile( \
3334 VALGRIND_ALIGN_STACK \
3335 "mr 12,%1\n\t" \
3336 "std 2,-16(12)\n\t" \
3337 "ld 2,-8(12)\n\t" \
3338 "ld 3, 8(12)\n\t" \
3339 "ld 12, 0(12)\n\t" \
3340 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3341 "mr 12,%1\n\t" \
3342 "mr %0,3\n\t" \
3343 "ld 2,-16(12)\n\t" \
3344 VALGRIND_RESTORE_STACK \
3345 : "=r" (_res) \
3346 : "r" (&_argvec[2]) \
3347 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3348 ); \
3349 lval = (__typeof__(lval)) _res; \
3350 } while (0)
3351
3352 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3353 do { \
3354 volatile OrigFn _orig = (orig); \
3355 volatile unsigned long _argvec[3+2]; \
3356 volatile unsigned long _res; \
3357 \
3358 _argvec[1] = (unsigned long)_orig.r2; \
3359 _argvec[2] = (unsigned long)_orig.nraddr; \
3360 _argvec[2+1] = (unsigned long)arg1; \
3361 _argvec[2+2] = (unsigned long)arg2; \
3362 __asm__ volatile( \
3363 VALGRIND_ALIGN_STACK \
3364 "mr 12,%1\n\t" \
3365 "std 2,-16(12)\n\t" \
3366 "ld 2,-8(12)\n\t" \
3367 "ld 3, 8(12)\n\t" \
3368 "ld 4, 16(12)\n\t" \
3369 "ld 12, 0(12)\n\t" \
3370 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3371 "mr 12,%1\n\t" \
3372 "mr %0,3\n\t" \
3373 "ld 2,-16(12)\n\t" \
3374 VALGRIND_RESTORE_STACK \
3375 : "=r" (_res) \
3376 : "r" (&_argvec[2]) \
3377 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3378 ); \
3379 lval = (__typeof__(lval)) _res; \
3380 } while (0)
3381
3382 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3383 do { \
3384 volatile OrigFn _orig = (orig); \
3385 volatile unsigned long _argvec[3+3]; \
3386 volatile unsigned long _res; \
3387 \
3388 _argvec[1] = (unsigned long)_orig.r2; \
3389 _argvec[2] = (unsigned long)_orig.nraddr; \
3390 _argvec[2+1] = (unsigned long)arg1; \
3391 _argvec[2+2] = (unsigned long)arg2; \
3392 _argvec[2+3] = (unsigned long)arg3; \
3393 __asm__ volatile( \
3394 VALGRIND_ALIGN_STACK \
3395 "mr 12,%1\n\t" \
3396 "std 2,-16(12)\n\t" \
3397 "ld 2,-8(12)\n\t" \
3398 "ld 3, 8(12)\n\t" \
3399 "ld 4, 16(12)\n\t" \
3400 "ld 5, 24(12)\n\t" \
3401 "ld 12, 0(12)\n\t" \
3402 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3403 "mr 12,%1\n\t" \
3404 "mr %0,3\n\t" \
3405 "ld 2,-16(12)\n\t" \
3406 VALGRIND_RESTORE_STACK \
3407 : "=r" (_res) \
3408 : "r" (&_argvec[2]) \
3409 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3410 ); \
3411 lval = (__typeof__(lval)) _res; \
3412 } while (0)
3413
3414 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3415 do { \
3416 volatile OrigFn _orig = (orig); \
3417 volatile unsigned long _argvec[3+4]; \
3418 volatile unsigned long _res; \
3419 \
3420 _argvec[1] = (unsigned long)_orig.r2; \
3421 _argvec[2] = (unsigned long)_orig.nraddr; \
3422 _argvec[2+1] = (unsigned long)arg1; \
3423 _argvec[2+2] = (unsigned long)arg2; \
3424 _argvec[2+3] = (unsigned long)arg3; \
3425 _argvec[2+4] = (unsigned long)arg4; \
3426 __asm__ volatile( \
3427 VALGRIND_ALIGN_STACK \
3428 "mr 12,%1\n\t" \
3429 "std 2,-16(12)\n\t" \
3430 "ld 2,-8(12)\n\t" \
3431 "ld 3, 8(12)\n\t" \
3432 "ld 4, 16(12)\n\t" \
3433 "ld 5, 24(12)\n\t" \
3434 "ld 6, 32(12)\n\t" \
3435 "ld 12, 0(12)\n\t" \
3436 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3437 "mr 12,%1\n\t" \
3438 "mr %0,3\n\t" \
3439 "ld 2,-16(12)\n\t" \
3440 VALGRIND_RESTORE_STACK \
3441 : "=r" (_res) \
3442 : "r" (&_argvec[2]) \
3443 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3444 ); \
3445 lval = (__typeof__(lval)) _res; \
3446 } while (0)
3447
3448 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3449 do { \
3450 volatile OrigFn _orig = (orig); \
3451 volatile unsigned long _argvec[3+5]; \
3452 volatile unsigned long _res; \
3453 \
3454 _argvec[1] = (unsigned long)_orig.r2; \
3455 _argvec[2] = (unsigned long)_orig.nraddr; \
3456 _argvec[2+1] = (unsigned long)arg1; \
3457 _argvec[2+2] = (unsigned long)arg2; \
3458 _argvec[2+3] = (unsigned long)arg3; \
3459 _argvec[2+4] = (unsigned long)arg4; \
3460 _argvec[2+5] = (unsigned long)arg5; \
3461 __asm__ volatile( \
3462 VALGRIND_ALIGN_STACK \
3463 "mr 12,%1\n\t" \
3464 "std 2,-16(12)\n\t" \
3465 "ld 2,-8(12)\n\t" \
3466 "ld 3, 8(12)\n\t" \
3467 "ld 4, 16(12)\n\t" \
3468 "ld 5, 24(12)\n\t" \
3469 "ld 6, 32(12)\n\t" \
3470 "ld 7, 40(12)\n\t" \
3471 "ld 12, 0(12)\n\t" \
3472 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3473 "mr 12,%1\n\t" \
3474 "mr %0,3\n\t" \
3475 "ld 2,-16(12)\n\t" \
3476 VALGRIND_RESTORE_STACK \
3477 : "=r" (_res) \
3478 : "r" (&_argvec[2]) \
3479 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3480 ); \
3481 lval = (__typeof__(lval)) _res; \
3482 } while (0)
3483
3484 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3485 do { \
3486 volatile OrigFn _orig = (orig); \
3487 volatile unsigned long _argvec[3+6]; \
3488 volatile unsigned long _res; \
3489 \
3490 _argvec[1] = (unsigned long)_orig.r2; \
3491 _argvec[2] = (unsigned long)_orig.nraddr; \
3492 _argvec[2+1] = (unsigned long)arg1; \
3493 _argvec[2+2] = (unsigned long)arg2; \
3494 _argvec[2+3] = (unsigned long)arg3; \
3495 _argvec[2+4] = (unsigned long)arg4; \
3496 _argvec[2+5] = (unsigned long)arg5; \
3497 _argvec[2+6] = (unsigned long)arg6; \
3498 __asm__ volatile( \
3499 VALGRIND_ALIGN_STACK \
3500 "mr 12,%1\n\t" \
3501 "std 2,-16(12)\n\t" \
3502 "ld 2,-8(12)\n\t" \
3503 "ld 3, 8(12)\n\t" \
3504 "ld 4, 16(12)\n\t" \
3505 "ld 5, 24(12)\n\t" \
3506 "ld 6, 32(12)\n\t" \
3507 "ld 7, 40(12)\n\t" \
3508 "ld 8, 48(12)\n\t" \
3509 "ld 12, 0(12)\n\t" \
3510 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3511 "mr 12,%1\n\t" \
3512 "mr %0,3\n\t" \
3513 "ld 2,-16(12)\n\t" \
3514 VALGRIND_RESTORE_STACK \
3515 : "=r" (_res) \
3516 : "r" (&_argvec[2]) \
3517 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3518 ); \
3519 lval = (__typeof__(lval)) _res; \
3520 } while (0)
3521
3522 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3523 arg7) \
3524 do { \
3525 volatile OrigFn _orig = (orig); \
3526 volatile unsigned long _argvec[3+7]; \
3527 volatile unsigned long _res; \
3528 \
3529 _argvec[1] = (unsigned long)_orig.r2; \
3530 _argvec[2] = (unsigned long)_orig.nraddr; \
3531 _argvec[2+1] = (unsigned long)arg1; \
3532 _argvec[2+2] = (unsigned long)arg2; \
3533 _argvec[2+3] = (unsigned long)arg3; \
3534 _argvec[2+4] = (unsigned long)arg4; \
3535 _argvec[2+5] = (unsigned long)arg5; \
3536 _argvec[2+6] = (unsigned long)arg6; \
3537 _argvec[2+7] = (unsigned long)arg7; \
3538 __asm__ volatile( \
3539 VALGRIND_ALIGN_STACK \
3540 "mr 12,%1\n\t" \
3541 "std 2,-16(12)\n\t" \
3542 "ld 2,-8(12)\n\t" \
3543 "ld 3, 8(12)\n\t" \
3544 "ld 4, 16(12)\n\t" \
3545 "ld 5, 24(12)\n\t" \
3546 "ld 6, 32(12)\n\t" \
3547 "ld 7, 40(12)\n\t" \
3548 "ld 8, 48(12)\n\t" \
3549 "ld 9, 56(12)\n\t" \
3550 "ld 12, 0(12)\n\t" \
3551 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3552 "mr 12,%1\n\t" \
3553 "mr %0,3\n\t" \
3554 "ld 2,-16(12)\n\t" \
3555 VALGRIND_RESTORE_STACK \
3556 : "=r" (_res) \
3557 : "r" (&_argvec[2]) \
3558 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3559 ); \
3560 lval = (__typeof__(lval)) _res; \
3561 } while (0)
3562
3563 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3564 arg7,arg8) \
3565 do { \
3566 volatile OrigFn _orig = (orig); \
3567 volatile unsigned long _argvec[3+8]; \
3568 volatile unsigned long _res; \
3569 \
3570 _argvec[1] = (unsigned long)_orig.r2; \
3571 _argvec[2] = (unsigned long)_orig.nraddr; \
3572 _argvec[2+1] = (unsigned long)arg1; \
3573 _argvec[2+2] = (unsigned long)arg2; \
3574 _argvec[2+3] = (unsigned long)arg3; \
3575 _argvec[2+4] = (unsigned long)arg4; \
3576 _argvec[2+5] = (unsigned long)arg5; \
3577 _argvec[2+6] = (unsigned long)arg6; \
3578 _argvec[2+7] = (unsigned long)arg7; \
3579 _argvec[2+8] = (unsigned long)arg8; \
3580 __asm__ volatile( \
3581 VALGRIND_ALIGN_STACK \
3582 "mr 12,%1\n\t" \
3583 "std 2,-16(12)\n\t" \
3584 "ld 2,-8(12)\n\t" \
3585 "ld 3, 8(12)\n\t" \
3586 "ld 4, 16(12)\n\t" \
3587 "ld 5, 24(12)\n\t" \
3588 "ld 6, 32(12)\n\t" \
3589 "ld 7, 40(12)\n\t" \
3590 "ld 8, 48(12)\n\t" \
3591 "ld 9, 56(12)\n\t" \
3592 "ld 10, 64(12)\n\t" \
3593 "ld 12, 0(12)\n\t" \
3594 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3595 "mr 12,%1\n\t" \
3596 "mr %0,3\n\t" \
3597 "ld 2,-16(12)\n\t" \
3598 VALGRIND_RESTORE_STACK \
3599 : "=r" (_res) \
3600 : "r" (&_argvec[2]) \
3601 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3602 ); \
3603 lval = (__typeof__(lval)) _res; \
3604 } while (0)
3605
3606 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3607 arg7,arg8,arg9) \
3608 do { \
3609 volatile OrigFn _orig = (orig); \
3610 volatile unsigned long _argvec[3+9]; \
3611 volatile unsigned long _res; \
3612 \
3613 _argvec[1] = (unsigned long)_orig.r2; \
3614 _argvec[2] = (unsigned long)_orig.nraddr; \
3615 _argvec[2+1] = (unsigned long)arg1; \
3616 _argvec[2+2] = (unsigned long)arg2; \
3617 _argvec[2+3] = (unsigned long)arg3; \
3618 _argvec[2+4] = (unsigned long)arg4; \
3619 _argvec[2+5] = (unsigned long)arg5; \
3620 _argvec[2+6] = (unsigned long)arg6; \
3621 _argvec[2+7] = (unsigned long)arg7; \
3622 _argvec[2+8] = (unsigned long)arg8; \
3623 _argvec[2+9] = (unsigned long)arg9; \
3624 __asm__ volatile( \
3625 VALGRIND_ALIGN_STACK \
3626 "mr 12,%1\n\t" \
3627 "std 2,-16(12)\n\t" \
3628 "ld 2,-8(12)\n\t" \
3629 "addi 1,1,-128\n\t" \
3630 \
3631 "ld 3,72(12)\n\t" \
3632 "std 3,96(1)\n\t" \
3633 \
3634 "ld 3, 8(12)\n\t" \
3635 "ld 4, 16(12)\n\t" \
3636 "ld 5, 24(12)\n\t" \
3637 "ld 6, 32(12)\n\t" \
3638 "ld 7, 40(12)\n\t" \
3639 "ld 8, 48(12)\n\t" \
3640 "ld 9, 56(12)\n\t" \
3641 "ld 10, 64(12)\n\t" \
3642 "ld 12, 0(12)\n\t" \
3643 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3644 "mr 12,%1\n\t" \
3645 "mr %0,3\n\t" \
3646 "ld 2,-16(12)\n\t" \
3647 VALGRIND_RESTORE_STACK \
3648 : "=r" (_res) \
3649 : "r" (&_argvec[2]) \
3650 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3651 ); \
3652 lval = (__typeof__(lval)) _res; \
3653 } while (0)
3654
3655 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3656 arg7,arg8,arg9,arg10) \
3657 do { \
3658 volatile OrigFn _orig = (orig); \
3659 volatile unsigned long _argvec[3+10]; \
3660 volatile unsigned long _res; \
3661 \
3662 _argvec[1] = (unsigned long)_orig.r2; \
3663 _argvec[2] = (unsigned long)_orig.nraddr; \
3664 _argvec[2+1] = (unsigned long)arg1; \
3665 _argvec[2+2] = (unsigned long)arg2; \
3666 _argvec[2+3] = (unsigned long)arg3; \
3667 _argvec[2+4] = (unsigned long)arg4; \
3668 _argvec[2+5] = (unsigned long)arg5; \
3669 _argvec[2+6] = (unsigned long)arg6; \
3670 _argvec[2+7] = (unsigned long)arg7; \
3671 _argvec[2+8] = (unsigned long)arg8; \
3672 _argvec[2+9] = (unsigned long)arg9; \
3673 _argvec[2+10] = (unsigned long)arg10; \
3674 __asm__ volatile( \
3675 VALGRIND_ALIGN_STACK \
3676 "mr 12,%1\n\t" \
3677 "std 2,-16(12)\n\t" \
3678 "ld 2,-8(12)\n\t" \
3679 "addi 1,1,-128\n\t" \
3680 \
3681 "ld 3,80(12)\n\t" \
3682 "std 3,104(1)\n\t" \
3683 \
3684 "ld 3,72(12)\n\t" \
3685 "std 3,96(1)\n\t" \
3686 \
3687 "ld 3, 8(12)\n\t" \
3688 "ld 4, 16(12)\n\t" \
3689 "ld 5, 24(12)\n\t" \
3690 "ld 6, 32(12)\n\t" \
3691 "ld 7, 40(12)\n\t" \
3692 "ld 8, 48(12)\n\t" \
3693 "ld 9, 56(12)\n\t" \
3694 "ld 10, 64(12)\n\t" \
3695 "ld 12, 0(12)\n\t" \
3696 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3697 "mr 12,%1\n\t" \
3698 "mr %0,3\n\t" \
3699 "ld 2,-16(12)\n\t" \
3700 VALGRIND_RESTORE_STACK \
3701 : "=r" (_res) \
3702 : "r" (&_argvec[2]) \
3703 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3704 ); \
3705 lval = (__typeof__(lval)) _res; \
3706 } while (0)
3707
3708 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3709 arg7,arg8,arg9,arg10,arg11) \
3710 do { \
3711 volatile OrigFn _orig = (orig); \
3712 volatile unsigned long _argvec[3+11]; \
3713 volatile unsigned long _res; \
3714 \
3715 _argvec[1] = (unsigned long)_orig.r2; \
3716 _argvec[2] = (unsigned long)_orig.nraddr; \
3717 _argvec[2+1] = (unsigned long)arg1; \
3718 _argvec[2+2] = (unsigned long)arg2; \
3719 _argvec[2+3] = (unsigned long)arg3; \
3720 _argvec[2+4] = (unsigned long)arg4; \
3721 _argvec[2+5] = (unsigned long)arg5; \
3722 _argvec[2+6] = (unsigned long)arg6; \
3723 _argvec[2+7] = (unsigned long)arg7; \
3724 _argvec[2+8] = (unsigned long)arg8; \
3725 _argvec[2+9] = (unsigned long)arg9; \
3726 _argvec[2+10] = (unsigned long)arg10; \
3727 _argvec[2+11] = (unsigned long)arg11; \
3728 __asm__ volatile( \
3729 VALGRIND_ALIGN_STACK \
3730 "mr 12,%1\n\t" \
3731 "std 2,-16(12)\n\t" \
3732 "ld 2,-8(12)\n\t" \
3733 "addi 1,1,-144\n\t" \
3734 \
3735 "ld 3,88(12)\n\t" \
3736 "std 3,112(1)\n\t" \
3737 \
3738 "ld 3,80(12)\n\t" \
3739 "std 3,104(1)\n\t" \
3740 \
3741 "ld 3,72(12)\n\t" \
3742 "std 3,96(1)\n\t" \
3743 \
3744 "ld 3, 8(12)\n\t" \
3745 "ld 4, 16(12)\n\t" \
3746 "ld 5, 24(12)\n\t" \
3747 "ld 6, 32(12)\n\t" \
3748 "ld 7, 40(12)\n\t" \
3749 "ld 8, 48(12)\n\t" \
3750 "ld 9, 56(12)\n\t" \
3751 "ld 10, 64(12)\n\t" \
3752 "ld 12, 0(12)\n\t" \
3753 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3754 "mr 12,%1\n\t" \
3755 "mr %0,3\n\t" \
3756 "ld 2,-16(12)\n\t" \
3757 VALGRIND_RESTORE_STACK \
3758 : "=r" (_res) \
3759 : "r" (&_argvec[2]) \
3760 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3761 ); \
3762 lval = (__typeof__(lval)) _res; \
3763 } while (0)
3764
3765 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3766 arg7,arg8,arg9,arg10,arg11,arg12) \
3767 do { \
3768 volatile OrigFn _orig = (orig); \
3769 volatile unsigned long _argvec[3+12]; \
3770 volatile unsigned long _res; \
3771 \
3772 _argvec[1] = (unsigned long)_orig.r2; \
3773 _argvec[2] = (unsigned long)_orig.nraddr; \
3774 _argvec[2+1] = (unsigned long)arg1; \
3775 _argvec[2+2] = (unsigned long)arg2; \
3776 _argvec[2+3] = (unsigned long)arg3; \
3777 _argvec[2+4] = (unsigned long)arg4; \
3778 _argvec[2+5] = (unsigned long)arg5; \
3779 _argvec[2+6] = (unsigned long)arg6; \
3780 _argvec[2+7] = (unsigned long)arg7; \
3781 _argvec[2+8] = (unsigned long)arg8; \
3782 _argvec[2+9] = (unsigned long)arg9; \
3783 _argvec[2+10] = (unsigned long)arg10; \
3784 _argvec[2+11] = (unsigned long)arg11; \
3785 _argvec[2+12] = (unsigned long)arg12; \
3786 __asm__ volatile( \
3787 VALGRIND_ALIGN_STACK \
3788 "mr 12,%1\n\t" \
3789 "std 2,-16(12)\n\t" \
3790 "ld 2,-8(12)\n\t" \
3791 "addi 1,1,-144\n\t" \
3792 \
3793 "ld 3,96(12)\n\t" \
3794 "std 3,120(1)\n\t" \
3795 \
3796 "ld 3,88(12)\n\t" \
3797 "std 3,112(1)\n\t" \
3798 \
3799 "ld 3,80(12)\n\t" \
3800 "std 3,104(1)\n\t" \
3801 \
3802 "ld 3,72(12)\n\t" \
3803 "std 3,96(1)\n\t" \
3804 \
3805 "ld 3, 8(12)\n\t" \
3806 "ld 4, 16(12)\n\t" \
3807 "ld 5, 24(12)\n\t" \
3808 "ld 6, 32(12)\n\t" \
3809 "ld 7, 40(12)\n\t" \
3810 "ld 8, 48(12)\n\t" \
3811 "ld 9, 56(12)\n\t" \
3812 "ld 10, 64(12)\n\t" \
3813 "ld 12, 0(12)\n\t" \
3814 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3815 "mr 12,%1\n\t" \
3816 "mr %0,3\n\t" \
3817 "ld 2,-16(12)\n\t" \
3818 VALGRIND_RESTORE_STACK \
3819 : "=r" (_res) \
3820 : "r" (&_argvec[2]) \
3821 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3822 ); \
3823 lval = (__typeof__(lval)) _res; \
3824 } while (0)
3825
3826 #endif
3827
3828
3829
3830 #if defined(PLAT_arm_linux)
3831
3832
3833 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3834
3835
3836
3837
3838
3839
3840
3841
3842
3843
3844
3845
3846
3847
3848 #define VALGRIND_ALIGN_STACK \
3849 "mov r10, sp\n\t" \
3850 "mov r4, sp\n\t" \
3851 "bic r4, r4, #7\n\t" \
3852 "mov sp, r4\n\t"
3853 #define VALGRIND_RESTORE_STACK \
3854 "mov sp, r10\n\t"
3855
3856
3857
3858
3859 #define CALL_FN_W_v(lval, orig) \
3860 do { \
3861 volatile OrigFn _orig = (orig); \
3862 volatile unsigned long _argvec[1]; \
3863 volatile unsigned long _res; \
3864 _argvec[0] = (unsigned long)_orig.nraddr; \
3865 __asm__ volatile( \
3866 VALGRIND_ALIGN_STACK \
3867 "ldr r4, [%1] \n\t" \
3868 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3869 VALGRIND_RESTORE_STACK \
3870 "mov %0, r0\n" \
3871 : "=r" (_res) \
3872 : "0" (&_argvec[0]) \
3873 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3874 ); \
3875 lval = (__typeof__(lval)) _res; \
3876 } while (0)
3877
3878 #define CALL_FN_W_W(lval, orig, arg1) \
3879 do { \
3880 volatile OrigFn _orig = (orig); \
3881 volatile unsigned long _argvec[2]; \
3882 volatile unsigned long _res; \
3883 _argvec[0] = (unsigned long)_orig.nraddr; \
3884 _argvec[1] = (unsigned long)(arg1); \
3885 __asm__ volatile( \
3886 VALGRIND_ALIGN_STACK \
3887 "ldr r0, [%1, #4] \n\t" \
3888 "ldr r4, [%1] \n\t" \
3889 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3890 VALGRIND_RESTORE_STACK \
3891 "mov %0, r0\n" \
3892 : "=r" (_res) \
3893 : "0" (&_argvec[0]) \
3894 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3895 ); \
3896 lval = (__typeof__(lval)) _res; \
3897 } while (0)
3898
3899 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3900 do { \
3901 volatile OrigFn _orig = (orig); \
3902 volatile unsigned long _argvec[3]; \
3903 volatile unsigned long _res; \
3904 _argvec[0] = (unsigned long)_orig.nraddr; \
3905 _argvec[1] = (unsigned long)(arg1); \
3906 _argvec[2] = (unsigned long)(arg2); \
3907 __asm__ volatile( \
3908 VALGRIND_ALIGN_STACK \
3909 "ldr r0, [%1, #4] \n\t" \
3910 "ldr r1, [%1, #8] \n\t" \
3911 "ldr r4, [%1] \n\t" \
3912 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3913 VALGRIND_RESTORE_STACK \
3914 "mov %0, r0\n" \
3915 : "=r" (_res) \
3916 : "0" (&_argvec[0]) \
3917 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3918 ); \
3919 lval = (__typeof__(lval)) _res; \
3920 } while (0)
3921
3922 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3923 do { \
3924 volatile OrigFn _orig = (orig); \
3925 volatile unsigned long _argvec[4]; \
3926 volatile unsigned long _res; \
3927 _argvec[0] = (unsigned long)_orig.nraddr; \
3928 _argvec[1] = (unsigned long)(arg1); \
3929 _argvec[2] = (unsigned long)(arg2); \
3930 _argvec[3] = (unsigned long)(arg3); \
3931 __asm__ volatile( \
3932 VALGRIND_ALIGN_STACK \
3933 "ldr r0, [%1, #4] \n\t" \
3934 "ldr r1, [%1, #8] \n\t" \
3935 "ldr r2, [%1, #12] \n\t" \
3936 "ldr r4, [%1] \n\t" \
3937 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3938 VALGRIND_RESTORE_STACK \
3939 "mov %0, r0\n" \
3940 : "=r" (_res) \
3941 : "0" (&_argvec[0]) \
3942 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3943 ); \
3944 lval = (__typeof__(lval)) _res; \
3945 } while (0)
3946
3947 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3948 do { \
3949 volatile OrigFn _orig = (orig); \
3950 volatile unsigned long _argvec[5]; \
3951 volatile unsigned long _res; \
3952 _argvec[0] = (unsigned long)_orig.nraddr; \
3953 _argvec[1] = (unsigned long)(arg1); \
3954 _argvec[2] = (unsigned long)(arg2); \
3955 _argvec[3] = (unsigned long)(arg3); \
3956 _argvec[4] = (unsigned long)(arg4); \
3957 __asm__ volatile( \
3958 VALGRIND_ALIGN_STACK \
3959 "ldr r0, [%1, #4] \n\t" \
3960 "ldr r1, [%1, #8] \n\t" \
3961 "ldr r2, [%1, #12] \n\t" \
3962 "ldr r3, [%1, #16] \n\t" \
3963 "ldr r4, [%1] \n\t" \
3964 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3965 VALGRIND_RESTORE_STACK \
3966 "mov %0, r0" \
3967 : "=r" (_res) \
3968 : "0" (&_argvec[0]) \
3969 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3970 ); \
3971 lval = (__typeof__(lval)) _res; \
3972 } while (0)
3973
3974 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3975 do { \
3976 volatile OrigFn _orig = (orig); \
3977 volatile unsigned long _argvec[6]; \
3978 volatile unsigned long _res; \
3979 _argvec[0] = (unsigned long)_orig.nraddr; \
3980 _argvec[1] = (unsigned long)(arg1); \
3981 _argvec[2] = (unsigned long)(arg2); \
3982 _argvec[3] = (unsigned long)(arg3); \
3983 _argvec[4] = (unsigned long)(arg4); \
3984 _argvec[5] = (unsigned long)(arg5); \
3985 __asm__ volatile( \
3986 VALGRIND_ALIGN_STACK \
3987 "sub sp, sp, #4 \n\t" \
3988 "ldr r0, [%1, #20] \n\t" \
3989 "push {r0} \n\t" \
3990 "ldr r0, [%1, #4] \n\t" \
3991 "ldr r1, [%1, #8] \n\t" \
3992 "ldr r2, [%1, #12] \n\t" \
3993 "ldr r3, [%1, #16] \n\t" \
3994 "ldr r4, [%1] \n\t" \
3995 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3996 VALGRIND_RESTORE_STACK \
3997 "mov %0, r0" \
3998 : "=r" (_res) \
3999 : "0" (&_argvec[0]) \
4000 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4001 ); \
4002 lval = (__typeof__(lval)) _res; \
4003 } while (0)
4004
4005 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4006 do { \
4007 volatile OrigFn _orig = (orig); \
4008 volatile unsigned long _argvec[7]; \
4009 volatile unsigned long _res; \
4010 _argvec[0] = (unsigned long)_orig.nraddr; \
4011 _argvec[1] = (unsigned long)(arg1); \
4012 _argvec[2] = (unsigned long)(arg2); \
4013 _argvec[3] = (unsigned long)(arg3); \
4014 _argvec[4] = (unsigned long)(arg4); \
4015 _argvec[5] = (unsigned long)(arg5); \
4016 _argvec[6] = (unsigned long)(arg6); \
4017 __asm__ volatile( \
4018 VALGRIND_ALIGN_STACK \
4019 "ldr r0, [%1, #20] \n\t" \
4020 "ldr r1, [%1, #24] \n\t" \
4021 "push {r0, r1} \n\t" \
4022 "ldr r0, [%1, #4] \n\t" \
4023 "ldr r1, [%1, #8] \n\t" \
4024 "ldr r2, [%1, #12] \n\t" \
4025 "ldr r3, [%1, #16] \n\t" \
4026 "ldr r4, [%1] \n\t" \
4027 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4028 VALGRIND_RESTORE_STACK \
4029 "mov %0, r0" \
4030 : "=r" (_res) \
4031 : "0" (&_argvec[0]) \
4032 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4033 ); \
4034 lval = (__typeof__(lval)) _res; \
4035 } while (0)
4036
4037 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4038 arg7) \
4039 do { \
4040 volatile OrigFn _orig = (orig); \
4041 volatile unsigned long _argvec[8]; \
4042 volatile unsigned long _res; \
4043 _argvec[0] = (unsigned long)_orig.nraddr; \
4044 _argvec[1] = (unsigned long)(arg1); \
4045 _argvec[2] = (unsigned long)(arg2); \
4046 _argvec[3] = (unsigned long)(arg3); \
4047 _argvec[4] = (unsigned long)(arg4); \
4048 _argvec[5] = (unsigned long)(arg5); \
4049 _argvec[6] = (unsigned long)(arg6); \
4050 _argvec[7] = (unsigned long)(arg7); \
4051 __asm__ volatile( \
4052 VALGRIND_ALIGN_STACK \
4053 "sub sp, sp, #4 \n\t" \
4054 "ldr r0, [%1, #20] \n\t" \
4055 "ldr r1, [%1, #24] \n\t" \
4056 "ldr r2, [%1, #28] \n\t" \
4057 "push {r0, r1, r2} \n\t" \
4058 "ldr r0, [%1, #4] \n\t" \
4059 "ldr r1, [%1, #8] \n\t" \
4060 "ldr r2, [%1, #12] \n\t" \
4061 "ldr r3, [%1, #16] \n\t" \
4062 "ldr r4, [%1] \n\t" \
4063 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4064 VALGRIND_RESTORE_STACK \
4065 "mov %0, r0" \
4066 : "=r" (_res) \
4067 : "0" (&_argvec[0]) \
4068 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4069 ); \
4070 lval = (__typeof__(lval)) _res; \
4071 } while (0)
4072
4073 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4074 arg7,arg8) \
4075 do { \
4076 volatile OrigFn _orig = (orig); \
4077 volatile unsigned long _argvec[9]; \
4078 volatile unsigned long _res; \
4079 _argvec[0] = (unsigned long)_orig.nraddr; \
4080 _argvec[1] = (unsigned long)(arg1); \
4081 _argvec[2] = (unsigned long)(arg2); \
4082 _argvec[3] = (unsigned long)(arg3); \
4083 _argvec[4] = (unsigned long)(arg4); \
4084 _argvec[5] = (unsigned long)(arg5); \
4085 _argvec[6] = (unsigned long)(arg6); \
4086 _argvec[7] = (unsigned long)(arg7); \
4087 _argvec[8] = (unsigned long)(arg8); \
4088 __asm__ volatile( \
4089 VALGRIND_ALIGN_STACK \
4090 "ldr r0, [%1, #20] \n\t" \
4091 "ldr r1, [%1, #24] \n\t" \
4092 "ldr r2, [%1, #28] \n\t" \
4093 "ldr r3, [%1, #32] \n\t" \
4094 "push {r0, r1, r2, r3} \n\t" \
4095 "ldr r0, [%1, #4] \n\t" \
4096 "ldr r1, [%1, #8] \n\t" \
4097 "ldr r2, [%1, #12] \n\t" \
4098 "ldr r3, [%1, #16] \n\t" \
4099 "ldr r4, [%1] \n\t" \
4100 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4101 VALGRIND_RESTORE_STACK \
4102 "mov %0, r0" \
4103 : "=r" (_res) \
4104 : "0" (&_argvec[0]) \
4105 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4106 ); \
4107 lval = (__typeof__(lval)) _res; \
4108 } while (0)
4109
4110 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4111 arg7,arg8,arg9) \
4112 do { \
4113 volatile OrigFn _orig = (orig); \
4114 volatile unsigned long _argvec[10]; \
4115 volatile unsigned long _res; \
4116 _argvec[0] = (unsigned long)_orig.nraddr; \
4117 _argvec[1] = (unsigned long)(arg1); \
4118 _argvec[2] = (unsigned long)(arg2); \
4119 _argvec[3] = (unsigned long)(arg3); \
4120 _argvec[4] = (unsigned long)(arg4); \
4121 _argvec[5] = (unsigned long)(arg5); \
4122 _argvec[6] = (unsigned long)(arg6); \
4123 _argvec[7] = (unsigned long)(arg7); \
4124 _argvec[8] = (unsigned long)(arg8); \
4125 _argvec[9] = (unsigned long)(arg9); \
4126 __asm__ volatile( \
4127 VALGRIND_ALIGN_STACK \
4128 "sub sp, sp, #4 \n\t" \
4129 "ldr r0, [%1, #20] \n\t" \
4130 "ldr r1, [%1, #24] \n\t" \
4131 "ldr r2, [%1, #28] \n\t" \
4132 "ldr r3, [%1, #32] \n\t" \
4133 "ldr r4, [%1, #36] \n\t" \
4134 "push {r0, r1, r2, r3, r4} \n\t" \
4135 "ldr r0, [%1, #4] \n\t" \
4136 "ldr r1, [%1, #8] \n\t" \
4137 "ldr r2, [%1, #12] \n\t" \
4138 "ldr r3, [%1, #16] \n\t" \
4139 "ldr r4, [%1] \n\t" \
4140 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4141 VALGRIND_RESTORE_STACK \
4142 "mov %0, r0" \
4143 : "=r" (_res) \
4144 : "0" (&_argvec[0]) \
4145 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4146 ); \
4147 lval = (__typeof__(lval)) _res; \
4148 } while (0)
4149
4150 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4151 arg7,arg8,arg9,arg10) \
4152 do { \
4153 volatile OrigFn _orig = (orig); \
4154 volatile unsigned long _argvec[11]; \
4155 volatile unsigned long _res; \
4156 _argvec[0] = (unsigned long)_orig.nraddr; \
4157 _argvec[1] = (unsigned long)(arg1); \
4158 _argvec[2] = (unsigned long)(arg2); \
4159 _argvec[3] = (unsigned long)(arg3); \
4160 _argvec[4] = (unsigned long)(arg4); \
4161 _argvec[5] = (unsigned long)(arg5); \
4162 _argvec[6] = (unsigned long)(arg6); \
4163 _argvec[7] = (unsigned long)(arg7); \
4164 _argvec[8] = (unsigned long)(arg8); \
4165 _argvec[9] = (unsigned long)(arg9); \
4166 _argvec[10] = (unsigned long)(arg10); \
4167 __asm__ volatile( \
4168 VALGRIND_ALIGN_STACK \
4169 "ldr r0, [%1, #40] \n\t" \
4170 "push {r0} \n\t" \
4171 "ldr r0, [%1, #20] \n\t" \
4172 "ldr r1, [%1, #24] \n\t" \
4173 "ldr r2, [%1, #28] \n\t" \
4174 "ldr r3, [%1, #32] \n\t" \
4175 "ldr r4, [%1, #36] \n\t" \
4176 "push {r0, r1, r2, r3, r4} \n\t" \
4177 "ldr r0, [%1, #4] \n\t" \
4178 "ldr r1, [%1, #8] \n\t" \
4179 "ldr r2, [%1, #12] \n\t" \
4180 "ldr r3, [%1, #16] \n\t" \
4181 "ldr r4, [%1] \n\t" \
4182 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4183 VALGRIND_RESTORE_STACK \
4184 "mov %0, r0" \
4185 : "=r" (_res) \
4186 : "0" (&_argvec[0]) \
4187 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4188 ); \
4189 lval = (__typeof__(lval)) _res; \
4190 } while (0)
4191
4192 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4193 arg6,arg7,arg8,arg9,arg10, \
4194 arg11) \
4195 do { \
4196 volatile OrigFn _orig = (orig); \
4197 volatile unsigned long _argvec[12]; \
4198 volatile unsigned long _res; \
4199 _argvec[0] = (unsigned long)_orig.nraddr; \
4200 _argvec[1] = (unsigned long)(arg1); \
4201 _argvec[2] = (unsigned long)(arg2); \
4202 _argvec[3] = (unsigned long)(arg3); \
4203 _argvec[4] = (unsigned long)(arg4); \
4204 _argvec[5] = (unsigned long)(arg5); \
4205 _argvec[6] = (unsigned long)(arg6); \
4206 _argvec[7] = (unsigned long)(arg7); \
4207 _argvec[8] = (unsigned long)(arg8); \
4208 _argvec[9] = (unsigned long)(arg9); \
4209 _argvec[10] = (unsigned long)(arg10); \
4210 _argvec[11] = (unsigned long)(arg11); \
4211 __asm__ volatile( \
4212 VALGRIND_ALIGN_STACK \
4213 "sub sp, sp, #4 \n\t" \
4214 "ldr r0, [%1, #40] \n\t" \
4215 "ldr r1, [%1, #44] \n\t" \
4216 "push {r0, r1} \n\t" \
4217 "ldr r0, [%1, #20] \n\t" \
4218 "ldr r1, [%1, #24] \n\t" \
4219 "ldr r2, [%1, #28] \n\t" \
4220 "ldr r3, [%1, #32] \n\t" \
4221 "ldr r4, [%1, #36] \n\t" \
4222 "push {r0, r1, r2, r3, r4} \n\t" \
4223 "ldr r0, [%1, #4] \n\t" \
4224 "ldr r1, [%1, #8] \n\t" \
4225 "ldr r2, [%1, #12] \n\t" \
4226 "ldr r3, [%1, #16] \n\t" \
4227 "ldr r4, [%1] \n\t" \
4228 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4229 VALGRIND_RESTORE_STACK \
4230 "mov %0, r0" \
4231 : "=r" (_res) \
4232 : "0" (&_argvec[0]) \
4233 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4234 ); \
4235 lval = (__typeof__(lval)) _res; \
4236 } while (0)
4237
4238 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4239 arg6,arg7,arg8,arg9,arg10, \
4240 arg11,arg12) \
4241 do { \
4242 volatile OrigFn _orig = (orig); \
4243 volatile unsigned long _argvec[13]; \
4244 volatile unsigned long _res; \
4245 _argvec[0] = (unsigned long)_orig.nraddr; \
4246 _argvec[1] = (unsigned long)(arg1); \
4247 _argvec[2] = (unsigned long)(arg2); \
4248 _argvec[3] = (unsigned long)(arg3); \
4249 _argvec[4] = (unsigned long)(arg4); \
4250 _argvec[5] = (unsigned long)(arg5); \
4251 _argvec[6] = (unsigned long)(arg6); \
4252 _argvec[7] = (unsigned long)(arg7); \
4253 _argvec[8] = (unsigned long)(arg8); \
4254 _argvec[9] = (unsigned long)(arg9); \
4255 _argvec[10] = (unsigned long)(arg10); \
4256 _argvec[11] = (unsigned long)(arg11); \
4257 _argvec[12] = (unsigned long)(arg12); \
4258 __asm__ volatile( \
4259 VALGRIND_ALIGN_STACK \
4260 "ldr r0, [%1, #40] \n\t" \
4261 "ldr r1, [%1, #44] \n\t" \
4262 "ldr r2, [%1, #48] \n\t" \
4263 "push {r0, r1, r2} \n\t" \
4264 "ldr r0, [%1, #20] \n\t" \
4265 "ldr r1, [%1, #24] \n\t" \
4266 "ldr r2, [%1, #28] \n\t" \
4267 "ldr r3, [%1, #32] \n\t" \
4268 "ldr r4, [%1, #36] \n\t" \
4269 "push {r0, r1, r2, r3, r4} \n\t" \
4270 "ldr r0, [%1, #4] \n\t" \
4271 "ldr r1, [%1, #8] \n\t" \
4272 "ldr r2, [%1, #12] \n\t" \
4273 "ldr r3, [%1, #16] \n\t" \
4274 "ldr r4, [%1] \n\t" \
4275 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4276 VALGRIND_RESTORE_STACK \
4277 "mov %0, r0" \
4278 : "=r" (_res) \
4279 : "0" (&_argvec[0]) \
4280 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4281 ); \
4282 lval = (__typeof__(lval)) _res; \
4283 } while (0)
4284
4285 #endif
4286
4287
4288
4289 #if defined(PLAT_arm64_linux) || defined(PLAT_arm64_freebsd)
4290
4291
4292 #define __CALLER_SAVED_REGS \
4293 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4294 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4295 "x18", "x19", "x20", "x30", \
4296 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4297 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4298 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4299 "v26", "v27", "v28", "v29", "v30", "v31"
4300
4301
4302
4303 #define VALGRIND_ALIGN_STACK \
4304 "mov x21, sp\n\t" \
4305 "bic sp, x21, #15\n\t"
4306 #define VALGRIND_RESTORE_STACK \
4307 "mov sp, x21\n\t"
4308
4309
4310
4311
4312 #define CALL_FN_W_v(lval, orig) \
4313 do { \
4314 volatile OrigFn _orig = (orig); \
4315 volatile unsigned long _argvec[1]; \
4316 volatile unsigned long _res; \
4317 _argvec[0] = (unsigned long)_orig.nraddr; \
4318 __asm__ volatile( \
4319 VALGRIND_ALIGN_STACK \
4320 "ldr x8, [%1] \n\t" \
4321 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4322 VALGRIND_RESTORE_STACK \
4323 "mov %0, x0\n" \
4324 : "=r" (_res) \
4325 : "0" (&_argvec[0]) \
4326 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4327 ); \
4328 lval = (__typeof__(lval)) _res; \
4329 } while (0)
4330
4331 #define CALL_FN_W_W(lval, orig, arg1) \
4332 do { \
4333 volatile OrigFn _orig = (orig); \
4334 volatile unsigned long _argvec[2]; \
4335 volatile unsigned long _res; \
4336 _argvec[0] = (unsigned long)_orig.nraddr; \
4337 _argvec[1] = (unsigned long)(arg1); \
4338 __asm__ volatile( \
4339 VALGRIND_ALIGN_STACK \
4340 "ldr x0, [%1, #8] \n\t" \
4341 "ldr x8, [%1] \n\t" \
4342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4343 VALGRIND_RESTORE_STACK \
4344 "mov %0, x0\n" \
4345 : "=r" (_res) \
4346 : "0" (&_argvec[0]) \
4347 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4348 ); \
4349 lval = (__typeof__(lval)) _res; \
4350 } while (0)
4351
4352 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4353 do { \
4354 volatile OrigFn _orig = (orig); \
4355 volatile unsigned long _argvec[3]; \
4356 volatile unsigned long _res; \
4357 _argvec[0] = (unsigned long)_orig.nraddr; \
4358 _argvec[1] = (unsigned long)(arg1); \
4359 _argvec[2] = (unsigned long)(arg2); \
4360 __asm__ volatile( \
4361 VALGRIND_ALIGN_STACK \
4362 "ldr x0, [%1, #8] \n\t" \
4363 "ldr x1, [%1, #16] \n\t" \
4364 "ldr x8, [%1] \n\t" \
4365 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4366 VALGRIND_RESTORE_STACK \
4367 "mov %0, x0\n" \
4368 : "=r" (_res) \
4369 : "0" (&_argvec[0]) \
4370 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4371 ); \
4372 lval = (__typeof__(lval)) _res; \
4373 } while (0)
4374
4375 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4376 do { \
4377 volatile OrigFn _orig = (orig); \
4378 volatile unsigned long _argvec[4]; \
4379 volatile unsigned long _res; \
4380 _argvec[0] = (unsigned long)_orig.nraddr; \
4381 _argvec[1] = (unsigned long)(arg1); \
4382 _argvec[2] = (unsigned long)(arg2); \
4383 _argvec[3] = (unsigned long)(arg3); \
4384 __asm__ volatile( \
4385 VALGRIND_ALIGN_STACK \
4386 "ldr x0, [%1, #8] \n\t" \
4387 "ldr x1, [%1, #16] \n\t" \
4388 "ldr x2, [%1, #24] \n\t" \
4389 "ldr x8, [%1] \n\t" \
4390 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4391 VALGRIND_RESTORE_STACK \
4392 "mov %0, x0\n" \
4393 : "=r" (_res) \
4394 : "0" (&_argvec[0]) \
4395 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4396 ); \
4397 lval = (__typeof__(lval)) _res; \
4398 } while (0)
4399
4400 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4401 do { \
4402 volatile OrigFn _orig = (orig); \
4403 volatile unsigned long _argvec[5]; \
4404 volatile unsigned long _res; \
4405 _argvec[0] = (unsigned long)_orig.nraddr; \
4406 _argvec[1] = (unsigned long)(arg1); \
4407 _argvec[2] = (unsigned long)(arg2); \
4408 _argvec[3] = (unsigned long)(arg3); \
4409 _argvec[4] = (unsigned long)(arg4); \
4410 __asm__ volatile( \
4411 VALGRIND_ALIGN_STACK \
4412 "ldr x0, [%1, #8] \n\t" \
4413 "ldr x1, [%1, #16] \n\t" \
4414 "ldr x2, [%1, #24] \n\t" \
4415 "ldr x3, [%1, #32] \n\t" \
4416 "ldr x8, [%1] \n\t" \
4417 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4418 VALGRIND_RESTORE_STACK \
4419 "mov %0, x0" \
4420 : "=r" (_res) \
4421 : "0" (&_argvec[0]) \
4422 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4423 ); \
4424 lval = (__typeof__(lval)) _res; \
4425 } while (0)
4426
4427 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4428 do { \
4429 volatile OrigFn _orig = (orig); \
4430 volatile unsigned long _argvec[6]; \
4431 volatile unsigned long _res; \
4432 _argvec[0] = (unsigned long)_orig.nraddr; \
4433 _argvec[1] = (unsigned long)(arg1); \
4434 _argvec[2] = (unsigned long)(arg2); \
4435 _argvec[3] = (unsigned long)(arg3); \
4436 _argvec[4] = (unsigned long)(arg4); \
4437 _argvec[5] = (unsigned long)(arg5); \
4438 __asm__ volatile( \
4439 VALGRIND_ALIGN_STACK \
4440 "ldr x0, [%1, #8] \n\t" \
4441 "ldr x1, [%1, #16] \n\t" \
4442 "ldr x2, [%1, #24] \n\t" \
4443 "ldr x3, [%1, #32] \n\t" \
4444 "ldr x4, [%1, #40] \n\t" \
4445 "ldr x8, [%1] \n\t" \
4446 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4447 VALGRIND_RESTORE_STACK \
4448 "mov %0, x0" \
4449 : "=r" (_res) \
4450 : "0" (&_argvec[0]) \
4451 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4452 ); \
4453 lval = (__typeof__(lval)) _res; \
4454 } while (0)
4455
4456 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4457 do { \
4458 volatile OrigFn _orig = (orig); \
4459 volatile unsigned long _argvec[7]; \
4460 volatile unsigned long _res; \
4461 _argvec[0] = (unsigned long)_orig.nraddr; \
4462 _argvec[1] = (unsigned long)(arg1); \
4463 _argvec[2] = (unsigned long)(arg2); \
4464 _argvec[3] = (unsigned long)(arg3); \
4465 _argvec[4] = (unsigned long)(arg4); \
4466 _argvec[5] = (unsigned long)(arg5); \
4467 _argvec[6] = (unsigned long)(arg6); \
4468 __asm__ volatile( \
4469 VALGRIND_ALIGN_STACK \
4470 "ldr x0, [%1, #8] \n\t" \
4471 "ldr x1, [%1, #16] \n\t" \
4472 "ldr x2, [%1, #24] \n\t" \
4473 "ldr x3, [%1, #32] \n\t" \
4474 "ldr x4, [%1, #40] \n\t" \
4475 "ldr x5, [%1, #48] \n\t" \
4476 "ldr x8, [%1] \n\t" \
4477 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4478 VALGRIND_RESTORE_STACK \
4479 "mov %0, x0" \
4480 : "=r" (_res) \
4481 : "0" (&_argvec[0]) \
4482 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4483 ); \
4484 lval = (__typeof__(lval)) _res; \
4485 } while (0)
4486
4487 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4488 arg7) \
4489 do { \
4490 volatile OrigFn _orig = (orig); \
4491 volatile unsigned long _argvec[8]; \
4492 volatile unsigned long _res; \
4493 _argvec[0] = (unsigned long)_orig.nraddr; \
4494 _argvec[1] = (unsigned long)(arg1); \
4495 _argvec[2] = (unsigned long)(arg2); \
4496 _argvec[3] = (unsigned long)(arg3); \
4497 _argvec[4] = (unsigned long)(arg4); \
4498 _argvec[5] = (unsigned long)(arg5); \
4499 _argvec[6] = (unsigned long)(arg6); \
4500 _argvec[7] = (unsigned long)(arg7); \
4501 __asm__ volatile( \
4502 VALGRIND_ALIGN_STACK \
4503 "ldr x0, [%1, #8] \n\t" \
4504 "ldr x1, [%1, #16] \n\t" \
4505 "ldr x2, [%1, #24] \n\t" \
4506 "ldr x3, [%1, #32] \n\t" \
4507 "ldr x4, [%1, #40] \n\t" \
4508 "ldr x5, [%1, #48] \n\t" \
4509 "ldr x6, [%1, #56] \n\t" \
4510 "ldr x8, [%1] \n\t" \
4511 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4512 VALGRIND_RESTORE_STACK \
4513 "mov %0, x0" \
4514 : "=r" (_res) \
4515 : "0" (&_argvec[0]) \
4516 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4517 ); \
4518 lval = (__typeof__(lval)) _res; \
4519 } while (0)
4520
4521 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4522 arg7,arg8) \
4523 do { \
4524 volatile OrigFn _orig = (orig); \
4525 volatile unsigned long _argvec[9]; \
4526 volatile unsigned long _res; \
4527 _argvec[0] = (unsigned long)_orig.nraddr; \
4528 _argvec[1] = (unsigned long)(arg1); \
4529 _argvec[2] = (unsigned long)(arg2); \
4530 _argvec[3] = (unsigned long)(arg3); \
4531 _argvec[4] = (unsigned long)(arg4); \
4532 _argvec[5] = (unsigned long)(arg5); \
4533 _argvec[6] = (unsigned long)(arg6); \
4534 _argvec[7] = (unsigned long)(arg7); \
4535 _argvec[8] = (unsigned long)(arg8); \
4536 __asm__ volatile( \
4537 VALGRIND_ALIGN_STACK \
4538 "ldr x0, [%1, #8] \n\t" \
4539 "ldr x1, [%1, #16] \n\t" \
4540 "ldr x2, [%1, #24] \n\t" \
4541 "ldr x3, [%1, #32] \n\t" \
4542 "ldr x4, [%1, #40] \n\t" \
4543 "ldr x5, [%1, #48] \n\t" \
4544 "ldr x6, [%1, #56] \n\t" \
4545 "ldr x7, [%1, #64] \n\t" \
4546 "ldr x8, [%1] \n\t" \
4547 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4548 VALGRIND_RESTORE_STACK \
4549 "mov %0, x0" \
4550 : "=r" (_res) \
4551 : "0" (&_argvec[0]) \
4552 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4553 ); \
4554 lval = (__typeof__(lval)) _res; \
4555 } while (0)
4556
4557 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4558 arg7,arg8,arg9) \
4559 do { \
4560 volatile OrigFn _orig = (orig); \
4561 volatile unsigned long _argvec[10]; \
4562 volatile unsigned long _res; \
4563 _argvec[0] = (unsigned long)_orig.nraddr; \
4564 _argvec[1] = (unsigned long)(arg1); \
4565 _argvec[2] = (unsigned long)(arg2); \
4566 _argvec[3] = (unsigned long)(arg3); \
4567 _argvec[4] = (unsigned long)(arg4); \
4568 _argvec[5] = (unsigned long)(arg5); \
4569 _argvec[6] = (unsigned long)(arg6); \
4570 _argvec[7] = (unsigned long)(arg7); \
4571 _argvec[8] = (unsigned long)(arg8); \
4572 _argvec[9] = (unsigned long)(arg9); \
4573 __asm__ volatile( \
4574 VALGRIND_ALIGN_STACK \
4575 "sub sp, sp, #0x20 \n\t" \
4576 "ldr x0, [%1, #8] \n\t" \
4577 "ldr x1, [%1, #16] \n\t" \
4578 "ldr x2, [%1, #24] \n\t" \
4579 "ldr x3, [%1, #32] \n\t" \
4580 "ldr x4, [%1, #40] \n\t" \
4581 "ldr x5, [%1, #48] \n\t" \
4582 "ldr x6, [%1, #56] \n\t" \
4583 "ldr x7, [%1, #64] \n\t" \
4584 "ldr x8, [%1, #72] \n\t" \
4585 "str x8, [sp, #0] \n\t" \
4586 "ldr x8, [%1] \n\t" \
4587 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4588 VALGRIND_RESTORE_STACK \
4589 "mov %0, x0" \
4590 : "=r" (_res) \
4591 : "0" (&_argvec[0]) \
4592 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4593 ); \
4594 lval = (__typeof__(lval)) _res; \
4595 } while (0)
4596
4597 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4598 arg7,arg8,arg9,arg10) \
4599 do { \
4600 volatile OrigFn _orig = (orig); \
4601 volatile unsigned long _argvec[11]; \
4602 volatile unsigned long _res; \
4603 _argvec[0] = (unsigned long)_orig.nraddr; \
4604 _argvec[1] = (unsigned long)(arg1); \
4605 _argvec[2] = (unsigned long)(arg2); \
4606 _argvec[3] = (unsigned long)(arg3); \
4607 _argvec[4] = (unsigned long)(arg4); \
4608 _argvec[5] = (unsigned long)(arg5); \
4609 _argvec[6] = (unsigned long)(arg6); \
4610 _argvec[7] = (unsigned long)(arg7); \
4611 _argvec[8] = (unsigned long)(arg8); \
4612 _argvec[9] = (unsigned long)(arg9); \
4613 _argvec[10] = (unsigned long)(arg10); \
4614 __asm__ volatile( \
4615 VALGRIND_ALIGN_STACK \
4616 "sub sp, sp, #0x20 \n\t" \
4617 "ldr x0, [%1, #8] \n\t" \
4618 "ldr x1, [%1, #16] \n\t" \
4619 "ldr x2, [%1, #24] \n\t" \
4620 "ldr x3, [%1, #32] \n\t" \
4621 "ldr x4, [%1, #40] \n\t" \
4622 "ldr x5, [%1, #48] \n\t" \
4623 "ldr x6, [%1, #56] \n\t" \
4624 "ldr x7, [%1, #64] \n\t" \
4625 "ldr x8, [%1, #72] \n\t" \
4626 "str x8, [sp, #0] \n\t" \
4627 "ldr x8, [%1, #80] \n\t" \
4628 "str x8, [sp, #8] \n\t" \
4629 "ldr x8, [%1] \n\t" \
4630 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4631 VALGRIND_RESTORE_STACK \
4632 "mov %0, x0" \
4633 : "=r" (_res) \
4634 : "0" (&_argvec[0]) \
4635 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4636 ); \
4637 lval = (__typeof__(lval)) _res; \
4638 } while (0)
4639
4640 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4641 arg7,arg8,arg9,arg10,arg11) \
4642 do { \
4643 volatile OrigFn _orig = (orig); \
4644 volatile unsigned long _argvec[12]; \
4645 volatile unsigned long _res; \
4646 _argvec[0] = (unsigned long)_orig.nraddr; \
4647 _argvec[1] = (unsigned long)(arg1); \
4648 _argvec[2] = (unsigned long)(arg2); \
4649 _argvec[3] = (unsigned long)(arg3); \
4650 _argvec[4] = (unsigned long)(arg4); \
4651 _argvec[5] = (unsigned long)(arg5); \
4652 _argvec[6] = (unsigned long)(arg6); \
4653 _argvec[7] = (unsigned long)(arg7); \
4654 _argvec[8] = (unsigned long)(arg8); \
4655 _argvec[9] = (unsigned long)(arg9); \
4656 _argvec[10] = (unsigned long)(arg10); \
4657 _argvec[11] = (unsigned long)(arg11); \
4658 __asm__ volatile( \
4659 VALGRIND_ALIGN_STACK \
4660 "sub sp, sp, #0x30 \n\t" \
4661 "ldr x0, [%1, #8] \n\t" \
4662 "ldr x1, [%1, #16] \n\t" \
4663 "ldr x2, [%1, #24] \n\t" \
4664 "ldr x3, [%1, #32] \n\t" \
4665 "ldr x4, [%1, #40] \n\t" \
4666 "ldr x5, [%1, #48] \n\t" \
4667 "ldr x6, [%1, #56] \n\t" \
4668 "ldr x7, [%1, #64] \n\t" \
4669 "ldr x8, [%1, #72] \n\t" \
4670 "str x8, [sp, #0] \n\t" \
4671 "ldr x8, [%1, #80] \n\t" \
4672 "str x8, [sp, #8] \n\t" \
4673 "ldr x8, [%1, #88] \n\t" \
4674 "str x8, [sp, #16] \n\t" \
4675 "ldr x8, [%1] \n\t" \
4676 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4677 VALGRIND_RESTORE_STACK \
4678 "mov %0, x0" \
4679 : "=r" (_res) \
4680 : "0" (&_argvec[0]) \
4681 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4682 ); \
4683 lval = (__typeof__(lval)) _res; \
4684 } while (0)
4685
4686 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4687 arg7,arg8,arg9,arg10,arg11, \
4688 arg12) \
4689 do { \
4690 volatile OrigFn _orig = (orig); \
4691 volatile unsigned long _argvec[13]; \
4692 volatile unsigned long _res; \
4693 _argvec[0] = (unsigned long)_orig.nraddr; \
4694 _argvec[1] = (unsigned long)(arg1); \
4695 _argvec[2] = (unsigned long)(arg2); \
4696 _argvec[3] = (unsigned long)(arg3); \
4697 _argvec[4] = (unsigned long)(arg4); \
4698 _argvec[5] = (unsigned long)(arg5); \
4699 _argvec[6] = (unsigned long)(arg6); \
4700 _argvec[7] = (unsigned long)(arg7); \
4701 _argvec[8] = (unsigned long)(arg8); \
4702 _argvec[9] = (unsigned long)(arg9); \
4703 _argvec[10] = (unsigned long)(arg10); \
4704 _argvec[11] = (unsigned long)(arg11); \
4705 _argvec[12] = (unsigned long)(arg12); \
4706 __asm__ volatile( \
4707 VALGRIND_ALIGN_STACK \
4708 "sub sp, sp, #0x30 \n\t" \
4709 "ldr x0, [%1, #8] \n\t" \
4710 "ldr x1, [%1, #16] \n\t" \
4711 "ldr x2, [%1, #24] \n\t" \
4712 "ldr x3, [%1, #32] \n\t" \
4713 "ldr x4, [%1, #40] \n\t" \
4714 "ldr x5, [%1, #48] \n\t" \
4715 "ldr x6, [%1, #56] \n\t" \
4716 "ldr x7, [%1, #64] \n\t" \
4717 "ldr x8, [%1, #72] \n\t" \
4718 "str x8, [sp, #0] \n\t" \
4719 "ldr x8, [%1, #80] \n\t" \
4720 "str x8, [sp, #8] \n\t" \
4721 "ldr x8, [%1, #88] \n\t" \
4722 "str x8, [sp, #16] \n\t" \
4723 "ldr x8, [%1, #96] \n\t" \
4724 "str x8, [sp, #24] \n\t" \
4725 "ldr x8, [%1] \n\t" \
4726 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4727 VALGRIND_RESTORE_STACK \
4728 "mov %0, x0" \
4729 : "=r" (_res) \
4730 : "0" (&_argvec[0]) \
4731 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4732 ); \
4733 lval = (__typeof__(lval)) _res; \
4734 } while (0)
4735
4736 #endif
4737
4738
4739
4740 #if defined(PLAT_s390x_linux)
4741
4742
4743
4744
4745
4746 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4747 # define __FRAME_POINTER \
4748 ,"d"(__builtin_dwarf_cfa())
4749 # define VALGRIND_CFI_PROLOGUE \
4750 ".cfi_remember_state\n\t" \
4751 "lgr 1,%1\n\t" \
4752 "lgr 7,11\n\t" \
4753 "lgr 11,%2\n\t" \
4754 ".cfi_def_cfa 11, 0\n\t"
4755 # define VALGRIND_CFI_EPILOGUE \
4756 "lgr 11, 7\n\t" \
4757 ".cfi_restore_state\n\t"
4758 #else
4759 # define __FRAME_POINTER
4760 # define VALGRIND_CFI_PROLOGUE \
4761 "lgr 1,%1\n\t"
4762 # define VALGRIND_CFI_EPILOGUE
4763 #endif
4764
4765
4766
4767
4768
4769
4770
4771
4772
4773
4774 #if defined(__VX__) || defined(__S390_VX__)
4775 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4776 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4777 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4778 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4779 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4780 #else
4781 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4782 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4783 #endif
4784
4785
4786
4787
4788
4789
4790
4791
4792
4793
4794 #define CALL_FN_W_v(lval, orig) \
4795 do { \
4796 volatile OrigFn _orig = (orig); \
4797 volatile unsigned long _argvec[1]; \
4798 volatile unsigned long _res; \
4799 _argvec[0] = (unsigned long)_orig.nraddr; \
4800 __asm__ volatile( \
4801 VALGRIND_CFI_PROLOGUE \
4802 "aghi 15,-160\n\t" \
4803 "lg 1, 0(1)\n\t" \
4804 VALGRIND_CALL_NOREDIR_R1 \
4805 "aghi 15,160\n\t" \
4806 VALGRIND_CFI_EPILOGUE \
4807 "lgr %0, 2\n\t" \
4808 : "=d" (_res) \
4809 : "d" (&_argvec[0]) __FRAME_POINTER \
4810 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4811 ); \
4812 lval = (__typeof__(lval)) _res; \
4813 } while (0)
4814
4815
4816 #define CALL_FN_W_W(lval, orig, arg1) \
4817 do { \
4818 volatile OrigFn _orig = (orig); \
4819 volatile unsigned long _argvec[2]; \
4820 volatile unsigned long _res; \
4821 _argvec[0] = (unsigned long)_orig.nraddr; \
4822 _argvec[1] = (unsigned long)arg1; \
4823 __asm__ volatile( \
4824 VALGRIND_CFI_PROLOGUE \
4825 "aghi 15,-160\n\t" \
4826 "lg 2, 8(1)\n\t" \
4827 "lg 1, 0(1)\n\t" \
4828 VALGRIND_CALL_NOREDIR_R1 \
4829 "aghi 15,160\n\t" \
4830 VALGRIND_CFI_EPILOGUE \
4831 "lgr %0, 2\n\t" \
4832 : "=d" (_res) \
4833 : "a" (&_argvec[0]) __FRAME_POINTER \
4834 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4835 ); \
4836 lval = (__typeof__(lval)) _res; \
4837 } while (0)
4838
4839 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4840 do { \
4841 volatile OrigFn _orig = (orig); \
4842 volatile unsigned long _argvec[3]; \
4843 volatile unsigned long _res; \
4844 _argvec[0] = (unsigned long)_orig.nraddr; \
4845 _argvec[1] = (unsigned long)arg1; \
4846 _argvec[2] = (unsigned long)arg2; \
4847 __asm__ volatile( \
4848 VALGRIND_CFI_PROLOGUE \
4849 "aghi 15,-160\n\t" \
4850 "lg 2, 8(1)\n\t" \
4851 "lg 3,16(1)\n\t" \
4852 "lg 1, 0(1)\n\t" \
4853 VALGRIND_CALL_NOREDIR_R1 \
4854 "aghi 15,160\n\t" \
4855 VALGRIND_CFI_EPILOGUE \
4856 "lgr %0, 2\n\t" \
4857 : "=d" (_res) \
4858 : "a" (&_argvec[0]) __FRAME_POINTER \
4859 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4860 ); \
4861 lval = (__typeof__(lval)) _res; \
4862 } while (0)
4863
4864 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4865 do { \
4866 volatile OrigFn _orig = (orig); \
4867 volatile unsigned long _argvec[4]; \
4868 volatile unsigned long _res; \
4869 _argvec[0] = (unsigned long)_orig.nraddr; \
4870 _argvec[1] = (unsigned long)arg1; \
4871 _argvec[2] = (unsigned long)arg2; \
4872 _argvec[3] = (unsigned long)arg3; \
4873 __asm__ volatile( \
4874 VALGRIND_CFI_PROLOGUE \
4875 "aghi 15,-160\n\t" \
4876 "lg 2, 8(1)\n\t" \
4877 "lg 3,16(1)\n\t" \
4878 "lg 4,24(1)\n\t" \
4879 "lg 1, 0(1)\n\t" \
4880 VALGRIND_CALL_NOREDIR_R1 \
4881 "aghi 15,160\n\t" \
4882 VALGRIND_CFI_EPILOGUE \
4883 "lgr %0, 2\n\t" \
4884 : "=d" (_res) \
4885 : "a" (&_argvec[0]) __FRAME_POINTER \
4886 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4887 ); \
4888 lval = (__typeof__(lval)) _res; \
4889 } while (0)
4890
4891 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4892 do { \
4893 volatile OrigFn _orig = (orig); \
4894 volatile unsigned long _argvec[5]; \
4895 volatile unsigned long _res; \
4896 _argvec[0] = (unsigned long)_orig.nraddr; \
4897 _argvec[1] = (unsigned long)arg1; \
4898 _argvec[2] = (unsigned long)arg2; \
4899 _argvec[3] = (unsigned long)arg3; \
4900 _argvec[4] = (unsigned long)arg4; \
4901 __asm__ volatile( \
4902 VALGRIND_CFI_PROLOGUE \
4903 "aghi 15,-160\n\t" \
4904 "lg 2, 8(1)\n\t" \
4905 "lg 3,16(1)\n\t" \
4906 "lg 4,24(1)\n\t" \
4907 "lg 5,32(1)\n\t" \
4908 "lg 1, 0(1)\n\t" \
4909 VALGRIND_CALL_NOREDIR_R1 \
4910 "aghi 15,160\n\t" \
4911 VALGRIND_CFI_EPILOGUE \
4912 "lgr %0, 2\n\t" \
4913 : "=d" (_res) \
4914 : "a" (&_argvec[0]) __FRAME_POINTER \
4915 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4916 ); \
4917 lval = (__typeof__(lval)) _res; \
4918 } while (0)
4919
4920 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4921 do { \
4922 volatile OrigFn _orig = (orig); \
4923 volatile unsigned long _argvec[6]; \
4924 volatile unsigned long _res; \
4925 _argvec[0] = (unsigned long)_orig.nraddr; \
4926 _argvec[1] = (unsigned long)arg1; \
4927 _argvec[2] = (unsigned long)arg2; \
4928 _argvec[3] = (unsigned long)arg3; \
4929 _argvec[4] = (unsigned long)arg4; \
4930 _argvec[5] = (unsigned long)arg5; \
4931 __asm__ volatile( \
4932 VALGRIND_CFI_PROLOGUE \
4933 "aghi 15,-160\n\t" \
4934 "lg 2, 8(1)\n\t" \
4935 "lg 3,16(1)\n\t" \
4936 "lg 4,24(1)\n\t" \
4937 "lg 5,32(1)\n\t" \
4938 "lg 6,40(1)\n\t" \
4939 "lg 1, 0(1)\n\t" \
4940 VALGRIND_CALL_NOREDIR_R1 \
4941 "aghi 15,160\n\t" \
4942 VALGRIND_CFI_EPILOGUE \
4943 "lgr %0, 2\n\t" \
4944 : "=d" (_res) \
4945 : "a" (&_argvec[0]) __FRAME_POINTER \
4946 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4947 ); \
4948 lval = (__typeof__(lval)) _res; \
4949 } while (0)
4950
4951 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4952 arg6) \
4953 do { \
4954 volatile OrigFn _orig = (orig); \
4955 volatile unsigned long _argvec[7]; \
4956 volatile unsigned long _res; \
4957 _argvec[0] = (unsigned long)_orig.nraddr; \
4958 _argvec[1] = (unsigned long)arg1; \
4959 _argvec[2] = (unsigned long)arg2; \
4960 _argvec[3] = (unsigned long)arg3; \
4961 _argvec[4] = (unsigned long)arg4; \
4962 _argvec[5] = (unsigned long)arg5; \
4963 _argvec[6] = (unsigned long)arg6; \
4964 __asm__ volatile( \
4965 VALGRIND_CFI_PROLOGUE \
4966 "aghi 15,-168\n\t" \
4967 "lg 2, 8(1)\n\t" \
4968 "lg 3,16(1)\n\t" \
4969 "lg 4,24(1)\n\t" \
4970 "lg 5,32(1)\n\t" \
4971 "lg 6,40(1)\n\t" \
4972 "mvc 160(8,15), 48(1)\n\t" \
4973 "lg 1, 0(1)\n\t" \
4974 VALGRIND_CALL_NOREDIR_R1 \
4975 "aghi 15,168\n\t" \
4976 VALGRIND_CFI_EPILOGUE \
4977 "lgr %0, 2\n\t" \
4978 : "=d" (_res) \
4979 : "a" (&_argvec[0]) __FRAME_POINTER \
4980 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4981 ); \
4982 lval = (__typeof__(lval)) _res; \
4983 } while (0)
4984
4985 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4986 arg6, arg7) \
4987 do { \
4988 volatile OrigFn _orig = (orig); \
4989 volatile unsigned long _argvec[8]; \
4990 volatile unsigned long _res; \
4991 _argvec[0] = (unsigned long)_orig.nraddr; \
4992 _argvec[1] = (unsigned long)arg1; \
4993 _argvec[2] = (unsigned long)arg2; \
4994 _argvec[3] = (unsigned long)arg3; \
4995 _argvec[4] = (unsigned long)arg4; \
4996 _argvec[5] = (unsigned long)arg5; \
4997 _argvec[6] = (unsigned long)arg6; \
4998 _argvec[7] = (unsigned long)arg7; \
4999 __asm__ volatile( \
5000 VALGRIND_CFI_PROLOGUE \
5001 "aghi 15,-176\n\t" \
5002 "lg 2, 8(1)\n\t" \
5003 "lg 3,16(1)\n\t" \
5004 "lg 4,24(1)\n\t" \
5005 "lg 5,32(1)\n\t" \
5006 "lg 6,40(1)\n\t" \
5007 "mvc 160(8,15), 48(1)\n\t" \
5008 "mvc 168(8,15), 56(1)\n\t" \
5009 "lg 1, 0(1)\n\t" \
5010 VALGRIND_CALL_NOREDIR_R1 \
5011 "aghi 15,176\n\t" \
5012 VALGRIND_CFI_EPILOGUE \
5013 "lgr %0, 2\n\t" \
5014 : "=d" (_res) \
5015 : "a" (&_argvec[0]) __FRAME_POINTER \
5016 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5017 ); \
5018 lval = (__typeof__(lval)) _res; \
5019 } while (0)
5020
5021 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5022 arg6, arg7 ,arg8) \
5023 do { \
5024 volatile OrigFn _orig = (orig); \
5025 volatile unsigned long _argvec[9]; \
5026 volatile unsigned long _res; \
5027 _argvec[0] = (unsigned long)_orig.nraddr; \
5028 _argvec[1] = (unsigned long)arg1; \
5029 _argvec[2] = (unsigned long)arg2; \
5030 _argvec[3] = (unsigned long)arg3; \
5031 _argvec[4] = (unsigned long)arg4; \
5032 _argvec[5] = (unsigned long)arg5; \
5033 _argvec[6] = (unsigned long)arg6; \
5034 _argvec[7] = (unsigned long)arg7; \
5035 _argvec[8] = (unsigned long)arg8; \
5036 __asm__ volatile( \
5037 VALGRIND_CFI_PROLOGUE \
5038 "aghi 15,-184\n\t" \
5039 "lg 2, 8(1)\n\t" \
5040 "lg 3,16(1)\n\t" \
5041 "lg 4,24(1)\n\t" \
5042 "lg 5,32(1)\n\t" \
5043 "lg 6,40(1)\n\t" \
5044 "mvc 160(8,15), 48(1)\n\t" \
5045 "mvc 168(8,15), 56(1)\n\t" \
5046 "mvc 176(8,15), 64(1)\n\t" \
5047 "lg 1, 0(1)\n\t" \
5048 VALGRIND_CALL_NOREDIR_R1 \
5049 "aghi 15,184\n\t" \
5050 VALGRIND_CFI_EPILOGUE \
5051 "lgr %0, 2\n\t" \
5052 : "=d" (_res) \
5053 : "a" (&_argvec[0]) __FRAME_POINTER \
5054 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5055 ); \
5056 lval = (__typeof__(lval)) _res; \
5057 } while (0)
5058
5059 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5060 arg6, arg7 ,arg8, arg9) \
5061 do { \
5062 volatile OrigFn _orig = (orig); \
5063 volatile unsigned long _argvec[10]; \
5064 volatile unsigned long _res; \
5065 _argvec[0] = (unsigned long)_orig.nraddr; \
5066 _argvec[1] = (unsigned long)arg1; \
5067 _argvec[2] = (unsigned long)arg2; \
5068 _argvec[3] = (unsigned long)arg3; \
5069 _argvec[4] = (unsigned long)arg4; \
5070 _argvec[5] = (unsigned long)arg5; \
5071 _argvec[6] = (unsigned long)arg6; \
5072 _argvec[7] = (unsigned long)arg7; \
5073 _argvec[8] = (unsigned long)arg8; \
5074 _argvec[9] = (unsigned long)arg9; \
5075 __asm__ volatile( \
5076 VALGRIND_CFI_PROLOGUE \
5077 "aghi 15,-192\n\t" \
5078 "lg 2, 8(1)\n\t" \
5079 "lg 3,16(1)\n\t" \
5080 "lg 4,24(1)\n\t" \
5081 "lg 5,32(1)\n\t" \
5082 "lg 6,40(1)\n\t" \
5083 "mvc 160(8,15), 48(1)\n\t" \
5084 "mvc 168(8,15), 56(1)\n\t" \
5085 "mvc 176(8,15), 64(1)\n\t" \
5086 "mvc 184(8,15), 72(1)\n\t" \
5087 "lg 1, 0(1)\n\t" \
5088 VALGRIND_CALL_NOREDIR_R1 \
5089 "aghi 15,192\n\t" \
5090 VALGRIND_CFI_EPILOGUE \
5091 "lgr %0, 2\n\t" \
5092 : "=d" (_res) \
5093 : "a" (&_argvec[0]) __FRAME_POINTER \
5094 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5095 ); \
5096 lval = (__typeof__(lval)) _res; \
5097 } while (0)
5098
5099 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5100 arg6, arg7 ,arg8, arg9, arg10) \
5101 do { \
5102 volatile OrigFn _orig = (orig); \
5103 volatile unsigned long _argvec[11]; \
5104 volatile unsigned long _res; \
5105 _argvec[0] = (unsigned long)_orig.nraddr; \
5106 _argvec[1] = (unsigned long)arg1; \
5107 _argvec[2] = (unsigned long)arg2; \
5108 _argvec[3] = (unsigned long)arg3; \
5109 _argvec[4] = (unsigned long)arg4; \
5110 _argvec[5] = (unsigned long)arg5; \
5111 _argvec[6] = (unsigned long)arg6; \
5112 _argvec[7] = (unsigned long)arg7; \
5113 _argvec[8] = (unsigned long)arg8; \
5114 _argvec[9] = (unsigned long)arg9; \
5115 _argvec[10] = (unsigned long)arg10; \
5116 __asm__ volatile( \
5117 VALGRIND_CFI_PROLOGUE \
5118 "aghi 15,-200\n\t" \
5119 "lg 2, 8(1)\n\t" \
5120 "lg 3,16(1)\n\t" \
5121 "lg 4,24(1)\n\t" \
5122 "lg 5,32(1)\n\t" \
5123 "lg 6,40(1)\n\t" \
5124 "mvc 160(8,15), 48(1)\n\t" \
5125 "mvc 168(8,15), 56(1)\n\t" \
5126 "mvc 176(8,15), 64(1)\n\t" \
5127 "mvc 184(8,15), 72(1)\n\t" \
5128 "mvc 192(8,15), 80(1)\n\t" \
5129 "lg 1, 0(1)\n\t" \
5130 VALGRIND_CALL_NOREDIR_R1 \
5131 "aghi 15,200\n\t" \
5132 VALGRIND_CFI_EPILOGUE \
5133 "lgr %0, 2\n\t" \
5134 : "=d" (_res) \
5135 : "a" (&_argvec[0]) __FRAME_POINTER \
5136 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5137 ); \
5138 lval = (__typeof__(lval)) _res; \
5139 } while (0)
5140
5141 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5142 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5143 do { \
5144 volatile OrigFn _orig = (orig); \
5145 volatile unsigned long _argvec[12]; \
5146 volatile unsigned long _res; \
5147 _argvec[0] = (unsigned long)_orig.nraddr; \
5148 _argvec[1] = (unsigned long)arg1; \
5149 _argvec[2] = (unsigned long)arg2; \
5150 _argvec[3] = (unsigned long)arg3; \
5151 _argvec[4] = (unsigned long)arg4; \
5152 _argvec[5] = (unsigned long)arg5; \
5153 _argvec[6] = (unsigned long)arg6; \
5154 _argvec[7] = (unsigned long)arg7; \
5155 _argvec[8] = (unsigned long)arg8; \
5156 _argvec[9] = (unsigned long)arg9; \
5157 _argvec[10] = (unsigned long)arg10; \
5158 _argvec[11] = (unsigned long)arg11; \
5159 __asm__ volatile( \
5160 VALGRIND_CFI_PROLOGUE \
5161 "aghi 15,-208\n\t" \
5162 "lg 2, 8(1)\n\t" \
5163 "lg 3,16(1)\n\t" \
5164 "lg 4,24(1)\n\t" \
5165 "lg 5,32(1)\n\t" \
5166 "lg 6,40(1)\n\t" \
5167 "mvc 160(8,15), 48(1)\n\t" \
5168 "mvc 168(8,15), 56(1)\n\t" \
5169 "mvc 176(8,15), 64(1)\n\t" \
5170 "mvc 184(8,15), 72(1)\n\t" \
5171 "mvc 192(8,15), 80(1)\n\t" \
5172 "mvc 200(8,15), 88(1)\n\t" \
5173 "lg 1, 0(1)\n\t" \
5174 VALGRIND_CALL_NOREDIR_R1 \
5175 "aghi 15,208\n\t" \
5176 VALGRIND_CFI_EPILOGUE \
5177 "lgr %0, 2\n\t" \
5178 : "=d" (_res) \
5179 : "a" (&_argvec[0]) __FRAME_POINTER \
5180 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5181 ); \
5182 lval = (__typeof__(lval)) _res; \
5183 } while (0)
5184
5185 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5186 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5187 do { \
5188 volatile OrigFn _orig = (orig); \
5189 volatile unsigned long _argvec[13]; \
5190 volatile unsigned long _res; \
5191 _argvec[0] = (unsigned long)_orig.nraddr; \
5192 _argvec[1] = (unsigned long)arg1; \
5193 _argvec[2] = (unsigned long)arg2; \
5194 _argvec[3] = (unsigned long)arg3; \
5195 _argvec[4] = (unsigned long)arg4; \
5196 _argvec[5] = (unsigned long)arg5; \
5197 _argvec[6] = (unsigned long)arg6; \
5198 _argvec[7] = (unsigned long)arg7; \
5199 _argvec[8] = (unsigned long)arg8; \
5200 _argvec[9] = (unsigned long)arg9; \
5201 _argvec[10] = (unsigned long)arg10; \
5202 _argvec[11] = (unsigned long)arg11; \
5203 _argvec[12] = (unsigned long)arg12; \
5204 __asm__ volatile( \
5205 VALGRIND_CFI_PROLOGUE \
5206 "aghi 15,-216\n\t" \
5207 "lg 2, 8(1)\n\t" \
5208 "lg 3,16(1)\n\t" \
5209 "lg 4,24(1)\n\t" \
5210 "lg 5,32(1)\n\t" \
5211 "lg 6,40(1)\n\t" \
5212 "mvc 160(8,15), 48(1)\n\t" \
5213 "mvc 168(8,15), 56(1)\n\t" \
5214 "mvc 176(8,15), 64(1)\n\t" \
5215 "mvc 184(8,15), 72(1)\n\t" \
5216 "mvc 192(8,15), 80(1)\n\t" \
5217 "mvc 200(8,15), 88(1)\n\t" \
5218 "mvc 208(8,15), 96(1)\n\t" \
5219 "lg 1, 0(1)\n\t" \
5220 VALGRIND_CALL_NOREDIR_R1 \
5221 "aghi 15,216\n\t" \
5222 VALGRIND_CFI_EPILOGUE \
5223 "lgr %0, 2\n\t" \
5224 : "=d" (_res) \
5225 : "a" (&_argvec[0]) __FRAME_POINTER \
5226 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5227 ); \
5228 lval = (__typeof__(lval)) _res; \
5229 } while (0)
5230
5231
5232 #endif
5233
5234
5235
5236 #if defined(PLAT_mips32_linux)
5237
5238
5239 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5240 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5241 "$25", "$31"
5242
5243
5244
5245
5246 #define CALL_FN_W_v(lval, orig) \
5247 do { \
5248 volatile OrigFn _orig = (orig); \
5249 volatile unsigned long _argvec[1]; \
5250 volatile unsigned long _res; \
5251 _argvec[0] = (unsigned long)_orig.nraddr; \
5252 __asm__ volatile( \
5253 "subu $29, $29, 8 \n\t" \
5254 "sw $28, 0($29) \n\t" \
5255 "sw $31, 4($29) \n\t" \
5256 "subu $29, $29, 16 \n\t" \
5257 "lw $25, 0(%1) \n\t" \
5258 VALGRIND_CALL_NOREDIR_T9 \
5259 "addu $29, $29, 16\n\t" \
5260 "lw $28, 0($29) \n\t" \
5261 "lw $31, 4($29) \n\t" \
5262 "addu $29, $29, 8 \n\t" \
5263 "move %0, $2\n" \
5264 : "=r" (_res) \
5265 : "0" (&_argvec[0]) \
5266 : "memory", __CALLER_SAVED_REGS \
5267 ); \
5268 lval = (__typeof__(lval)) _res; \
5269 } while (0)
5270
5271 #define CALL_FN_W_W(lval, orig, arg1) \
5272 do { \
5273 volatile OrigFn _orig = (orig); \
5274 volatile unsigned long _argvec[2]; \
5275 volatile unsigned long _res; \
5276 _argvec[0] = (unsigned long)_orig.nraddr; \
5277 _argvec[1] = (unsigned long)(arg1); \
5278 __asm__ volatile( \
5279 "subu $29, $29, 8 \n\t" \
5280 "sw $28, 0($29) \n\t" \
5281 "sw $31, 4($29) \n\t" \
5282 "subu $29, $29, 16 \n\t" \
5283 "lw $4, 4(%1) \n\t" \
5284 "lw $25, 0(%1) \n\t" \
5285 VALGRIND_CALL_NOREDIR_T9 \
5286 "addu $29, $29, 16 \n\t" \
5287 "lw $28, 0($29) \n\t" \
5288 "lw $31, 4($29) \n\t" \
5289 "addu $29, $29, 8 \n\t" \
5290 "move %0, $2\n" \
5291 : "=r" (_res) \
5292 : "0" (&_argvec[0]) \
5293 : "memory", __CALLER_SAVED_REGS \
5294 ); \
5295 lval = (__typeof__(lval)) _res; \
5296 } while (0)
5297
5298 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5299 do { \
5300 volatile OrigFn _orig = (orig); \
5301 volatile unsigned long _argvec[3]; \
5302 volatile unsigned long _res; \
5303 _argvec[0] = (unsigned long)_orig.nraddr; \
5304 _argvec[1] = (unsigned long)(arg1); \
5305 _argvec[2] = (unsigned long)(arg2); \
5306 __asm__ volatile( \
5307 "subu $29, $29, 8 \n\t" \
5308 "sw $28, 0($29) \n\t" \
5309 "sw $31, 4($29) \n\t" \
5310 "subu $29, $29, 16 \n\t" \
5311 "lw $4, 4(%1) \n\t" \
5312 "lw $5, 8(%1) \n\t" \
5313 "lw $25, 0(%1) \n\t" \
5314 VALGRIND_CALL_NOREDIR_T9 \
5315 "addu $29, $29, 16 \n\t" \
5316 "lw $28, 0($29) \n\t" \
5317 "lw $31, 4($29) \n\t" \
5318 "addu $29, $29, 8 \n\t" \
5319 "move %0, $2\n" \
5320 : "=r" (_res) \
5321 : "0" (&_argvec[0]) \
5322 : "memory", __CALLER_SAVED_REGS \
5323 ); \
5324 lval = (__typeof__(lval)) _res; \
5325 } while (0)
5326
5327 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5328 do { \
5329 volatile OrigFn _orig = (orig); \
5330 volatile unsigned long _argvec[4]; \
5331 volatile unsigned long _res; \
5332 _argvec[0] = (unsigned long)_orig.nraddr; \
5333 _argvec[1] = (unsigned long)(arg1); \
5334 _argvec[2] = (unsigned long)(arg2); \
5335 _argvec[3] = (unsigned long)(arg3); \
5336 __asm__ volatile( \
5337 "subu $29, $29, 8 \n\t" \
5338 "sw $28, 0($29) \n\t" \
5339 "sw $31, 4($29) \n\t" \
5340 "subu $29, $29, 16 \n\t" \
5341 "lw $4, 4(%1) \n\t" \
5342 "lw $5, 8(%1) \n\t" \
5343 "lw $6, 12(%1) \n\t" \
5344 "lw $25, 0(%1) \n\t" \
5345 VALGRIND_CALL_NOREDIR_T9 \
5346 "addu $29, $29, 16 \n\t" \
5347 "lw $28, 0($29) \n\t" \
5348 "lw $31, 4($29) \n\t" \
5349 "addu $29, $29, 8 \n\t" \
5350 "move %0, $2\n" \
5351 : "=r" (_res) \
5352 : "0" (&_argvec[0]) \
5353 : "memory", __CALLER_SAVED_REGS \
5354 ); \
5355 lval = (__typeof__(lval)) _res; \
5356 } while (0)
5357
5358 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5359 do { \
5360 volatile OrigFn _orig = (orig); \
5361 volatile unsigned long _argvec[5]; \
5362 volatile unsigned long _res; \
5363 _argvec[0] = (unsigned long)_orig.nraddr; \
5364 _argvec[1] = (unsigned long)(arg1); \
5365 _argvec[2] = (unsigned long)(arg2); \
5366 _argvec[3] = (unsigned long)(arg3); \
5367 _argvec[4] = (unsigned long)(arg4); \
5368 __asm__ volatile( \
5369 "subu $29, $29, 8 \n\t" \
5370 "sw $28, 0($29) \n\t" \
5371 "sw $31, 4($29) \n\t" \
5372 "subu $29, $29, 16 \n\t" \
5373 "lw $4, 4(%1) \n\t" \
5374 "lw $5, 8(%1) \n\t" \
5375 "lw $6, 12(%1) \n\t" \
5376 "lw $7, 16(%1) \n\t" \
5377 "lw $25, 0(%1) \n\t" \
5378 VALGRIND_CALL_NOREDIR_T9 \
5379 "addu $29, $29, 16 \n\t" \
5380 "lw $28, 0($29) \n\t" \
5381 "lw $31, 4($29) \n\t" \
5382 "addu $29, $29, 8 \n\t" \
5383 "move %0, $2\n" \
5384 : "=r" (_res) \
5385 : "0" (&_argvec[0]) \
5386 : "memory", __CALLER_SAVED_REGS \
5387 ); \
5388 lval = (__typeof__(lval)) _res; \
5389 } while (0)
5390
5391 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5392 do { \
5393 volatile OrigFn _orig = (orig); \
5394 volatile unsigned long _argvec[6]; \
5395 volatile unsigned long _res; \
5396 _argvec[0] = (unsigned long)_orig.nraddr; \
5397 _argvec[1] = (unsigned long)(arg1); \
5398 _argvec[2] = (unsigned long)(arg2); \
5399 _argvec[3] = (unsigned long)(arg3); \
5400 _argvec[4] = (unsigned long)(arg4); \
5401 _argvec[5] = (unsigned long)(arg5); \
5402 __asm__ volatile( \
5403 "subu $29, $29, 8 \n\t" \
5404 "sw $28, 0($29) \n\t" \
5405 "sw $31, 4($29) \n\t" \
5406 "lw $4, 20(%1) \n\t" \
5407 "subu $29, $29, 24\n\t" \
5408 "sw $4, 16($29) \n\t" \
5409 "lw $4, 4(%1) \n\t" \
5410 "lw $5, 8(%1) \n\t" \
5411 "lw $6, 12(%1) \n\t" \
5412 "lw $7, 16(%1) \n\t" \
5413 "lw $25, 0(%1) \n\t" \
5414 VALGRIND_CALL_NOREDIR_T9 \
5415 "addu $29, $29, 24 \n\t" \
5416 "lw $28, 0($29) \n\t" \
5417 "lw $31, 4($29) \n\t" \
5418 "addu $29, $29, 8 \n\t" \
5419 "move %0, $2\n" \
5420 : "=r" (_res) \
5421 : "0" (&_argvec[0]) \
5422 : "memory", __CALLER_SAVED_REGS \
5423 ); \
5424 lval = (__typeof__(lval)) _res; \
5425 } while (0)
5426 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5427 do { \
5428 volatile OrigFn _orig = (orig); \
5429 volatile unsigned long _argvec[7]; \
5430 volatile unsigned long _res; \
5431 _argvec[0] = (unsigned long)_orig.nraddr; \
5432 _argvec[1] = (unsigned long)(arg1); \
5433 _argvec[2] = (unsigned long)(arg2); \
5434 _argvec[3] = (unsigned long)(arg3); \
5435 _argvec[4] = (unsigned long)(arg4); \
5436 _argvec[5] = (unsigned long)(arg5); \
5437 _argvec[6] = (unsigned long)(arg6); \
5438 __asm__ volatile( \
5439 "subu $29, $29, 8 \n\t" \
5440 "sw $28, 0($29) \n\t" \
5441 "sw $31, 4($29) \n\t" \
5442 "lw $4, 20(%1) \n\t" \
5443 "subu $29, $29, 32\n\t" \
5444 "sw $4, 16($29) \n\t" \
5445 "lw $4, 24(%1) \n\t" \
5446 "nop\n\t" \
5447 "sw $4, 20($29) \n\t" \
5448 "lw $4, 4(%1) \n\t" \
5449 "lw $5, 8(%1) \n\t" \
5450 "lw $6, 12(%1) \n\t" \
5451 "lw $7, 16(%1) \n\t" \
5452 "lw $25, 0(%1) \n\t" \
5453 VALGRIND_CALL_NOREDIR_T9 \
5454 "addu $29, $29, 32 \n\t" \
5455 "lw $28, 0($29) \n\t" \
5456 "lw $31, 4($29) \n\t" \
5457 "addu $29, $29, 8 \n\t" \
5458 "move %0, $2\n" \
5459 : "=r" (_res) \
5460 : "0" (&_argvec[0]) \
5461 : "memory", __CALLER_SAVED_REGS \
5462 ); \
5463 lval = (__typeof__(lval)) _res; \
5464 } while (0)
5465
5466 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5467 arg7) \
5468 do { \
5469 volatile OrigFn _orig = (orig); \
5470 volatile unsigned long _argvec[8]; \
5471 volatile unsigned long _res; \
5472 _argvec[0] = (unsigned long)_orig.nraddr; \
5473 _argvec[1] = (unsigned long)(arg1); \
5474 _argvec[2] = (unsigned long)(arg2); \
5475 _argvec[3] = (unsigned long)(arg3); \
5476 _argvec[4] = (unsigned long)(arg4); \
5477 _argvec[5] = (unsigned long)(arg5); \
5478 _argvec[6] = (unsigned long)(arg6); \
5479 _argvec[7] = (unsigned long)(arg7); \
5480 __asm__ volatile( \
5481 "subu $29, $29, 8 \n\t" \
5482 "sw $28, 0($29) \n\t" \
5483 "sw $31, 4($29) \n\t" \
5484 "lw $4, 20(%1) \n\t" \
5485 "subu $29, $29, 32\n\t" \
5486 "sw $4, 16($29) \n\t" \
5487 "lw $4, 24(%1) \n\t" \
5488 "sw $4, 20($29) \n\t" \
5489 "lw $4, 28(%1) \n\t" \
5490 "sw $4, 24($29) \n\t" \
5491 "lw $4, 4(%1) \n\t" \
5492 "lw $5, 8(%1) \n\t" \
5493 "lw $6, 12(%1) \n\t" \
5494 "lw $7, 16(%1) \n\t" \
5495 "lw $25, 0(%1) \n\t" \
5496 VALGRIND_CALL_NOREDIR_T9 \
5497 "addu $29, $29, 32 \n\t" \
5498 "lw $28, 0($29) \n\t" \
5499 "lw $31, 4($29) \n\t" \
5500 "addu $29, $29, 8 \n\t" \
5501 "move %0, $2\n" \
5502 : "=r" (_res) \
5503 : "0" (&_argvec[0]) \
5504 : "memory", __CALLER_SAVED_REGS \
5505 ); \
5506 lval = (__typeof__(lval)) _res; \
5507 } while (0)
5508
5509 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5510 arg7,arg8) \
5511 do { \
5512 volatile OrigFn _orig = (orig); \
5513 volatile unsigned long _argvec[9]; \
5514 volatile unsigned long _res; \
5515 _argvec[0] = (unsigned long)_orig.nraddr; \
5516 _argvec[1] = (unsigned long)(arg1); \
5517 _argvec[2] = (unsigned long)(arg2); \
5518 _argvec[3] = (unsigned long)(arg3); \
5519 _argvec[4] = (unsigned long)(arg4); \
5520 _argvec[5] = (unsigned long)(arg5); \
5521 _argvec[6] = (unsigned long)(arg6); \
5522 _argvec[7] = (unsigned long)(arg7); \
5523 _argvec[8] = (unsigned long)(arg8); \
5524 __asm__ volatile( \
5525 "subu $29, $29, 8 \n\t" \
5526 "sw $28, 0($29) \n\t" \
5527 "sw $31, 4($29) \n\t" \
5528 "lw $4, 20(%1) \n\t" \
5529 "subu $29, $29, 40\n\t" \
5530 "sw $4, 16($29) \n\t" \
5531 "lw $4, 24(%1) \n\t" \
5532 "sw $4, 20($29) \n\t" \
5533 "lw $4, 28(%1) \n\t" \
5534 "sw $4, 24($29) \n\t" \
5535 "lw $4, 32(%1) \n\t" \
5536 "sw $4, 28($29) \n\t" \
5537 "lw $4, 4(%1) \n\t" \
5538 "lw $5, 8(%1) \n\t" \
5539 "lw $6, 12(%1) \n\t" \
5540 "lw $7, 16(%1) \n\t" \
5541 "lw $25, 0(%1) \n\t" \
5542 VALGRIND_CALL_NOREDIR_T9 \
5543 "addu $29, $29, 40 \n\t" \
5544 "lw $28, 0($29) \n\t" \
5545 "lw $31, 4($29) \n\t" \
5546 "addu $29, $29, 8 \n\t" \
5547 "move %0, $2\n" \
5548 : "=r" (_res) \
5549 : "0" (&_argvec[0]) \
5550 : "memory", __CALLER_SAVED_REGS \
5551 ); \
5552 lval = (__typeof__(lval)) _res; \
5553 } while (0)
5554
5555 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5556 arg7,arg8,arg9) \
5557 do { \
5558 volatile OrigFn _orig = (orig); \
5559 volatile unsigned long _argvec[10]; \
5560 volatile unsigned long _res; \
5561 _argvec[0] = (unsigned long)_orig.nraddr; \
5562 _argvec[1] = (unsigned long)(arg1); \
5563 _argvec[2] = (unsigned long)(arg2); \
5564 _argvec[3] = (unsigned long)(arg3); \
5565 _argvec[4] = (unsigned long)(arg4); \
5566 _argvec[5] = (unsigned long)(arg5); \
5567 _argvec[6] = (unsigned long)(arg6); \
5568 _argvec[7] = (unsigned long)(arg7); \
5569 _argvec[8] = (unsigned long)(arg8); \
5570 _argvec[9] = (unsigned long)(arg9); \
5571 __asm__ volatile( \
5572 "subu $29, $29, 8 \n\t" \
5573 "sw $28, 0($29) \n\t" \
5574 "sw $31, 4($29) \n\t" \
5575 "lw $4, 20(%1) \n\t" \
5576 "subu $29, $29, 40\n\t" \
5577 "sw $4, 16($29) \n\t" \
5578 "lw $4, 24(%1) \n\t" \
5579 "sw $4, 20($29) \n\t" \
5580 "lw $4, 28(%1) \n\t" \
5581 "sw $4, 24($29) \n\t" \
5582 "lw $4, 32(%1) \n\t" \
5583 "sw $4, 28($29) \n\t" \
5584 "lw $4, 36(%1) \n\t" \
5585 "sw $4, 32($29) \n\t" \
5586 "lw $4, 4(%1) \n\t" \
5587 "lw $5, 8(%1) \n\t" \
5588 "lw $6, 12(%1) \n\t" \
5589 "lw $7, 16(%1) \n\t" \
5590 "lw $25, 0(%1) \n\t" \
5591 VALGRIND_CALL_NOREDIR_T9 \
5592 "addu $29, $29, 40 \n\t" \
5593 "lw $28, 0($29) \n\t" \
5594 "lw $31, 4($29) \n\t" \
5595 "addu $29, $29, 8 \n\t" \
5596 "move %0, $2\n" \
5597 : "=r" (_res) \
5598 : "0" (&_argvec[0]) \
5599 : "memory", __CALLER_SAVED_REGS \
5600 ); \
5601 lval = (__typeof__(lval)) _res; \
5602 } while (0)
5603
5604 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5605 arg7,arg8,arg9,arg10) \
5606 do { \
5607 volatile OrigFn _orig = (orig); \
5608 volatile unsigned long _argvec[11]; \
5609 volatile unsigned long _res; \
5610 _argvec[0] = (unsigned long)_orig.nraddr; \
5611 _argvec[1] = (unsigned long)(arg1); \
5612 _argvec[2] = (unsigned long)(arg2); \
5613 _argvec[3] = (unsigned long)(arg3); \
5614 _argvec[4] = (unsigned long)(arg4); \
5615 _argvec[5] = (unsigned long)(arg5); \
5616 _argvec[6] = (unsigned long)(arg6); \
5617 _argvec[7] = (unsigned long)(arg7); \
5618 _argvec[8] = (unsigned long)(arg8); \
5619 _argvec[9] = (unsigned long)(arg9); \
5620 _argvec[10] = (unsigned long)(arg10); \
5621 __asm__ volatile( \
5622 "subu $29, $29, 8 \n\t" \
5623 "sw $28, 0($29) \n\t" \
5624 "sw $31, 4($29) \n\t" \
5625 "lw $4, 20(%1) \n\t" \
5626 "subu $29, $29, 48\n\t" \
5627 "sw $4, 16($29) \n\t" \
5628 "lw $4, 24(%1) \n\t" \
5629 "sw $4, 20($29) \n\t" \
5630 "lw $4, 28(%1) \n\t" \
5631 "sw $4, 24($29) \n\t" \
5632 "lw $4, 32(%1) \n\t" \
5633 "sw $4, 28($29) \n\t" \
5634 "lw $4, 36(%1) \n\t" \
5635 "sw $4, 32($29) \n\t" \
5636 "lw $4, 40(%1) \n\t" \
5637 "sw $4, 36($29) \n\t" \
5638 "lw $4, 4(%1) \n\t" \
5639 "lw $5, 8(%1) \n\t" \
5640 "lw $6, 12(%1) \n\t" \
5641 "lw $7, 16(%1) \n\t" \
5642 "lw $25, 0(%1) \n\t" \
5643 VALGRIND_CALL_NOREDIR_T9 \
5644 "addu $29, $29, 48 \n\t" \
5645 "lw $28, 0($29) \n\t" \
5646 "lw $31, 4($29) \n\t" \
5647 "addu $29, $29, 8 \n\t" \
5648 "move %0, $2\n" \
5649 : "=r" (_res) \
5650 : "0" (&_argvec[0]) \
5651 : "memory", __CALLER_SAVED_REGS \
5652 ); \
5653 lval = (__typeof__(lval)) _res; \
5654 } while (0)
5655
5656 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5657 arg6,arg7,arg8,arg9,arg10, \
5658 arg11) \
5659 do { \
5660 volatile OrigFn _orig = (orig); \
5661 volatile unsigned long _argvec[12]; \
5662 volatile unsigned long _res; \
5663 _argvec[0] = (unsigned long)_orig.nraddr; \
5664 _argvec[1] = (unsigned long)(arg1); \
5665 _argvec[2] = (unsigned long)(arg2); \
5666 _argvec[3] = (unsigned long)(arg3); \
5667 _argvec[4] = (unsigned long)(arg4); \
5668 _argvec[5] = (unsigned long)(arg5); \
5669 _argvec[6] = (unsigned long)(arg6); \
5670 _argvec[7] = (unsigned long)(arg7); \
5671 _argvec[8] = (unsigned long)(arg8); \
5672 _argvec[9] = (unsigned long)(arg9); \
5673 _argvec[10] = (unsigned long)(arg10); \
5674 _argvec[11] = (unsigned long)(arg11); \
5675 __asm__ volatile( \
5676 "subu $29, $29, 8 \n\t" \
5677 "sw $28, 0($29) \n\t" \
5678 "sw $31, 4($29) \n\t" \
5679 "lw $4, 20(%1) \n\t" \
5680 "subu $29, $29, 48\n\t" \
5681 "sw $4, 16($29) \n\t" \
5682 "lw $4, 24(%1) \n\t" \
5683 "sw $4, 20($29) \n\t" \
5684 "lw $4, 28(%1) \n\t" \
5685 "sw $4, 24($29) \n\t" \
5686 "lw $4, 32(%1) \n\t" \
5687 "sw $4, 28($29) \n\t" \
5688 "lw $4, 36(%1) \n\t" \
5689 "sw $4, 32($29) \n\t" \
5690 "lw $4, 40(%1) \n\t" \
5691 "sw $4, 36($29) \n\t" \
5692 "lw $4, 44(%1) \n\t" \
5693 "sw $4, 40($29) \n\t" \
5694 "lw $4, 4(%1) \n\t" \
5695 "lw $5, 8(%1) \n\t" \
5696 "lw $6, 12(%1) \n\t" \
5697 "lw $7, 16(%1) \n\t" \
5698 "lw $25, 0(%1) \n\t" \
5699 VALGRIND_CALL_NOREDIR_T9 \
5700 "addu $29, $29, 48 \n\t" \
5701 "lw $28, 0($29) \n\t" \
5702 "lw $31, 4($29) \n\t" \
5703 "addu $29, $29, 8 \n\t" \
5704 "move %0, $2\n" \
5705 : "=r" (_res) \
5706 : "0" (&_argvec[0]) \
5707 : "memory", __CALLER_SAVED_REGS \
5708 ); \
5709 lval = (__typeof__(lval)) _res; \
5710 } while (0)
5711
5712 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5713 arg6,arg7,arg8,arg9,arg10, \
5714 arg11,arg12) \
5715 do { \
5716 volatile OrigFn _orig = (orig); \
5717 volatile unsigned long _argvec[13]; \
5718 volatile unsigned long _res; \
5719 _argvec[0] = (unsigned long)_orig.nraddr; \
5720 _argvec[1] = (unsigned long)(arg1); \
5721 _argvec[2] = (unsigned long)(arg2); \
5722 _argvec[3] = (unsigned long)(arg3); \
5723 _argvec[4] = (unsigned long)(arg4); \
5724 _argvec[5] = (unsigned long)(arg5); \
5725 _argvec[6] = (unsigned long)(arg6); \
5726 _argvec[7] = (unsigned long)(arg7); \
5727 _argvec[8] = (unsigned long)(arg8); \
5728 _argvec[9] = (unsigned long)(arg9); \
5729 _argvec[10] = (unsigned long)(arg10); \
5730 _argvec[11] = (unsigned long)(arg11); \
5731 _argvec[12] = (unsigned long)(arg12); \
5732 __asm__ volatile( \
5733 "subu $29, $29, 8 \n\t" \
5734 "sw $28, 0($29) \n\t" \
5735 "sw $31, 4($29) \n\t" \
5736 "lw $4, 20(%1) \n\t" \
5737 "subu $29, $29, 56\n\t" \
5738 "sw $4, 16($29) \n\t" \
5739 "lw $4, 24(%1) \n\t" \
5740 "sw $4, 20($29) \n\t" \
5741 "lw $4, 28(%1) \n\t" \
5742 "sw $4, 24($29) \n\t" \
5743 "lw $4, 32(%1) \n\t" \
5744 "sw $4, 28($29) \n\t" \
5745 "lw $4, 36(%1) \n\t" \
5746 "sw $4, 32($29) \n\t" \
5747 "lw $4, 40(%1) \n\t" \
5748 "sw $4, 36($29) \n\t" \
5749 "lw $4, 44(%1) \n\t" \
5750 "sw $4, 40($29) \n\t" \
5751 "lw $4, 48(%1) \n\t" \
5752 "sw $4, 44($29) \n\t" \
5753 "lw $4, 4(%1) \n\t" \
5754 "lw $5, 8(%1) \n\t" \
5755 "lw $6, 12(%1) \n\t" \
5756 "lw $7, 16(%1) \n\t" \
5757 "lw $25, 0(%1) \n\t" \
5758 VALGRIND_CALL_NOREDIR_T9 \
5759 "addu $29, $29, 56 \n\t" \
5760 "lw $28, 0($29) \n\t" \
5761 "lw $31, 4($29) \n\t" \
5762 "addu $29, $29, 8 \n\t" \
5763 "move %0, $2\n" \
5764 : "=r" (_res) \
5765 : "r" (&_argvec[0]) \
5766 : "memory", __CALLER_SAVED_REGS \
5767 ); \
5768 lval = (__typeof__(lval)) _res; \
5769 } while (0)
5770
5771 #endif
5772
5773
5774
5775 #if defined(PLAT_nanomips_linux)
5776
5777
5778 #define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5779 "$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5780 "$t8","$t9", "$at"
5781
5782
5783
5784
5785 #define CALL_FN_W_v(lval, orig) \
5786 do { \
5787 volatile OrigFn _orig = (orig); \
5788 volatile unsigned long _argvec[1]; \
5789 volatile unsigned long _res; \
5790 _argvec[0] = (unsigned long)_orig.nraddr; \
5791 __asm__ volatile( \
5792 "lw $t9, 0(%1)\n\t" \
5793 VALGRIND_CALL_NOREDIR_T9 \
5794 "move %0, $a0\n" \
5795 : "=r" (_res) \
5796 : "r" (&_argvec[0]) \
5797 : "memory", __CALLER_SAVED_REGS \
5798 ); \
5799 lval = (__typeof__(lval)) _res; \
5800 } while (0)
5801
5802 #define CALL_FN_W_W(lval, orig, arg1) \
5803 do { \
5804 volatile OrigFn _orig = (orig); \
5805 volatile unsigned long _argvec[2]; \
5806 volatile unsigned long _res; \
5807 _argvec[0] = (unsigned long)_orig.nraddr; \
5808 _argvec[1] = (unsigned long)(arg1); \
5809 __asm__ volatile( \
5810 "lw $t9, 0(%1)\n\t" \
5811 "lw $a0, 4(%1)\n\t" \
5812 VALGRIND_CALL_NOREDIR_T9 \
5813 "move %0, $a0\n" \
5814 : "=r" (_res) \
5815 : "r" (&_argvec[0]) \
5816 : "memory", __CALLER_SAVED_REGS \
5817 ); \
5818 lval = (__typeof__(lval)) _res; \
5819 } while (0)
5820
5821 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5822 do { \
5823 volatile OrigFn _orig = (orig); \
5824 volatile unsigned long _argvec[3]; \
5825 volatile unsigned long _res; \
5826 _argvec[0] = (unsigned long)_orig.nraddr; \
5827 _argvec[1] = (unsigned long)(arg1); \
5828 _argvec[2] = (unsigned long)(arg2); \
5829 __asm__ volatile( \
5830 "lw $t9, 0(%1)\n\t" \
5831 "lw $a0, 4(%1)\n\t" \
5832 "lw $a1, 8(%1)\n\t" \
5833 VALGRIND_CALL_NOREDIR_T9 \
5834 "move %0, $a0\n" \
5835 : "=r" (_res) \
5836 : "r" (&_argvec[0]) \
5837 : "memory", __CALLER_SAVED_REGS \
5838 ); \
5839 lval = (__typeof__(lval)) _res; \
5840 } while (0)
5841
5842 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5843 do { \
5844 volatile OrigFn _orig = (orig); \
5845 volatile unsigned long _argvec[4]; \
5846 volatile unsigned long _res; \
5847 _argvec[0] = (unsigned long)_orig.nraddr; \
5848 _argvec[1] = (unsigned long)(arg1); \
5849 _argvec[2] = (unsigned long)(arg2); \
5850 _argvec[3] = (unsigned long)(arg3); \
5851 __asm__ volatile( \
5852 "lw $t9, 0(%1)\n\t" \
5853 "lw $a0, 4(%1)\n\t" \
5854 "lw $a1, 8(%1)\n\t" \
5855 "lw $a2,12(%1)\n\t" \
5856 VALGRIND_CALL_NOREDIR_T9 \
5857 "move %0, $a0\n" \
5858 : "=r" (_res) \
5859 : "r" (&_argvec[0]) \
5860 : "memory", __CALLER_SAVED_REGS \
5861 ); \
5862 lval = (__typeof__(lval)) _res; \
5863 } while (0)
5864
5865 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5866 do { \
5867 volatile OrigFn _orig = (orig); \
5868 volatile unsigned long _argvec[5]; \
5869 volatile unsigned long _res; \
5870 _argvec[0] = (unsigned long)_orig.nraddr; \
5871 _argvec[1] = (unsigned long)(arg1); \
5872 _argvec[2] = (unsigned long)(arg2); \
5873 _argvec[3] = (unsigned long)(arg3); \
5874 _argvec[4] = (unsigned long)(arg4); \
5875 __asm__ volatile( \
5876 "lw $t9, 0(%1)\n\t" \
5877 "lw $a0, 4(%1)\n\t" \
5878 "lw $a1, 8(%1)\n\t" \
5879 "lw $a2,12(%1)\n\t" \
5880 "lw $a3,16(%1)\n\t" \
5881 VALGRIND_CALL_NOREDIR_T9 \
5882 "move %0, $a0\n" \
5883 : "=r" (_res) \
5884 : "r" (&_argvec[0]) \
5885 : "memory", __CALLER_SAVED_REGS \
5886 ); \
5887 lval = (__typeof__(lval)) _res; \
5888 } while (0)
5889
5890 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5891 do { \
5892 volatile OrigFn _orig = (orig); \
5893 volatile unsigned long _argvec[6]; \
5894 volatile unsigned long _res; \
5895 _argvec[0] = (unsigned long)_orig.nraddr; \
5896 _argvec[1] = (unsigned long)(arg1); \
5897 _argvec[2] = (unsigned long)(arg2); \
5898 _argvec[3] = (unsigned long)(arg3); \
5899 _argvec[4] = (unsigned long)(arg4); \
5900 _argvec[5] = (unsigned long)(arg5); \
5901 __asm__ volatile( \
5902 "lw $t9, 0(%1)\n\t" \
5903 "lw $a0, 4(%1)\n\t" \
5904 "lw $a1, 8(%1)\n\t" \
5905 "lw $a2,12(%1)\n\t" \
5906 "lw $a3,16(%1)\n\t" \
5907 "lw $a4,20(%1)\n\t" \
5908 VALGRIND_CALL_NOREDIR_T9 \
5909 "move %0, $a0\n" \
5910 : "=r" (_res) \
5911 : "r" (&_argvec[0]) \
5912 : "memory", __CALLER_SAVED_REGS \
5913 ); \
5914 lval = (__typeof__(lval)) _res; \
5915 } while (0)
5916 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5917 do { \
5918 volatile OrigFn _orig = (orig); \
5919 volatile unsigned long _argvec[7]; \
5920 volatile unsigned long _res; \
5921 _argvec[0] = (unsigned long)_orig.nraddr; \
5922 _argvec[1] = (unsigned long)(arg1); \
5923 _argvec[2] = (unsigned long)(arg2); \
5924 _argvec[3] = (unsigned long)(arg3); \
5925 _argvec[4] = (unsigned long)(arg4); \
5926 _argvec[5] = (unsigned long)(arg5); \
5927 _argvec[6] = (unsigned long)(arg6); \
5928 __asm__ volatile( \
5929 "lw $t9, 0(%1)\n\t" \
5930 "lw $a0, 4(%1)\n\t" \
5931 "lw $a1, 8(%1)\n\t" \
5932 "lw $a2,12(%1)\n\t" \
5933 "lw $a3,16(%1)\n\t" \
5934 "lw $a4,20(%1)\n\t" \
5935 "lw $a5,24(%1)\n\t" \
5936 VALGRIND_CALL_NOREDIR_T9 \
5937 "move %0, $a0\n" \
5938 : "=r" (_res) \
5939 : "r" (&_argvec[0]) \
5940 : "memory", __CALLER_SAVED_REGS \
5941 ); \
5942 lval = (__typeof__(lval)) _res; \
5943 } while (0)
5944
5945 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5946 arg7) \
5947 do { \
5948 volatile OrigFn _orig = (orig); \
5949 volatile unsigned long _argvec[8]; \
5950 volatile unsigned long _res; \
5951 _argvec[0] = (unsigned long)_orig.nraddr; \
5952 _argvec[1] = (unsigned long)(arg1); \
5953 _argvec[2] = (unsigned long)(arg2); \
5954 _argvec[3] = (unsigned long)(arg3); \
5955 _argvec[4] = (unsigned long)(arg4); \
5956 _argvec[5] = (unsigned long)(arg5); \
5957 _argvec[6] = (unsigned long)(arg6); \
5958 _argvec[7] = (unsigned long)(arg7); \
5959 __asm__ volatile( \
5960 "lw $t9, 0(%1)\n\t" \
5961 "lw $a0, 4(%1)\n\t" \
5962 "lw $a1, 8(%1)\n\t" \
5963 "lw $a2,12(%1)\n\t" \
5964 "lw $a3,16(%1)\n\t" \
5965 "lw $a4,20(%1)\n\t" \
5966 "lw $a5,24(%1)\n\t" \
5967 "lw $a6,28(%1)\n\t" \
5968 VALGRIND_CALL_NOREDIR_T9 \
5969 "move %0, $a0\n" \
5970 : "=r" (_res) \
5971 : "r" (&_argvec[0]) \
5972 : "memory", __CALLER_SAVED_REGS \
5973 ); \
5974 lval = (__typeof__(lval)) _res; \
5975 } while (0)
5976
5977 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5978 arg7,arg8) \
5979 do { \
5980 volatile OrigFn _orig = (orig); \
5981 volatile unsigned long _argvec[9]; \
5982 volatile unsigned long _res; \
5983 _argvec[0] = (unsigned long)_orig.nraddr; \
5984 _argvec[1] = (unsigned long)(arg1); \
5985 _argvec[2] = (unsigned long)(arg2); \
5986 _argvec[3] = (unsigned long)(arg3); \
5987 _argvec[4] = (unsigned long)(arg4); \
5988 _argvec[5] = (unsigned long)(arg5); \
5989 _argvec[6] = (unsigned long)(arg6); \
5990 _argvec[7] = (unsigned long)(arg7); \
5991 _argvec[8] = (unsigned long)(arg8); \
5992 __asm__ volatile( \
5993 "lw $t9, 0(%1)\n\t" \
5994 "lw $a0, 4(%1)\n\t" \
5995 "lw $a1, 8(%1)\n\t" \
5996 "lw $a2,12(%1)\n\t" \
5997 "lw $a3,16(%1)\n\t" \
5998 "lw $a4,20(%1)\n\t" \
5999 "lw $a5,24(%1)\n\t" \
6000 "lw $a6,28(%1)\n\t" \
6001 "lw $a7,32(%1)\n\t" \
6002 VALGRIND_CALL_NOREDIR_T9 \
6003 "move %0, $a0\n" \
6004 : "=r" (_res) \
6005 : "r" (&_argvec[0]) \
6006 : "memory", __CALLER_SAVED_REGS \
6007 ); \
6008 lval = (__typeof__(lval)) _res; \
6009 } while (0)
6010
6011 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6012 arg7,arg8,arg9) \
6013 do { \
6014 volatile OrigFn _orig = (orig); \
6015 volatile unsigned long _argvec[10]; \
6016 volatile unsigned long _res; \
6017 _argvec[0] = (unsigned long)_orig.nraddr; \
6018 _argvec[1] = (unsigned long)(arg1); \
6019 _argvec[2] = (unsigned long)(arg2); \
6020 _argvec[3] = (unsigned long)(arg3); \
6021 _argvec[4] = (unsigned long)(arg4); \
6022 _argvec[5] = (unsigned long)(arg5); \
6023 _argvec[6] = (unsigned long)(arg6); \
6024 _argvec[7] = (unsigned long)(arg7); \
6025 _argvec[8] = (unsigned long)(arg8); \
6026 _argvec[9] = (unsigned long)(arg9); \
6027 __asm__ volatile( \
6028 "addiu $sp, $sp, -16 \n\t" \
6029 "lw $t9,36(%1) \n\t" \
6030 "sw $t9, 0($sp) \n\t" \
6031 "lw $t9, 0(%1) \n\t" \
6032 "lw $a0, 4(%1) \n\t" \
6033 "lw $a1, 8(%1) \n\t" \
6034 "lw $a2,12(%1) \n\t" \
6035 "lw $a3,16(%1) \n\t" \
6036 "lw $a4,20(%1) \n\t" \
6037 "lw $a5,24(%1) \n\t" \
6038 "lw $a6,28(%1) \n\t" \
6039 "lw $a7,32(%1) \n\t" \
6040 VALGRIND_CALL_NOREDIR_T9 \
6041 "move %0, $a0 \n\t" \
6042 "addiu $sp, $sp, 16 \n\t" \
6043 : "=r" (_res) \
6044 : "r" (&_argvec[0]) \
6045 : "memory", __CALLER_SAVED_REGS \
6046 ); \
6047 lval = (__typeof__(lval)) _res; \
6048 } while (0)
6049
6050 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6051 arg7,arg8,arg9,arg10) \
6052 do { \
6053 volatile OrigFn _orig = (orig); \
6054 volatile unsigned long _argvec[11]; \
6055 volatile unsigned long _res; \
6056 _argvec[0] = (unsigned long)_orig.nraddr; \
6057 _argvec[1] = (unsigned long)(arg1); \
6058 _argvec[2] = (unsigned long)(arg2); \
6059 _argvec[3] = (unsigned long)(arg3); \
6060 _argvec[4] = (unsigned long)(arg4); \
6061 _argvec[5] = (unsigned long)(arg5); \
6062 _argvec[6] = (unsigned long)(arg6); \
6063 _argvec[7] = (unsigned long)(arg7); \
6064 _argvec[8] = (unsigned long)(arg8); \
6065 _argvec[9] = (unsigned long)(arg9); \
6066 _argvec[10] = (unsigned long)(arg10); \
6067 __asm__ volatile( \
6068 "addiu $sp, $sp, -16 \n\t" \
6069 "lw $t9,36(%1) \n\t" \
6070 "sw $t9, 0($sp) \n\t" \
6071 "lw $t9,40(%1) \n\t" \
6072 "sw $t9, 4($sp) \n\t" \
6073 "lw $t9, 0(%1) \n\t" \
6074 "lw $a0, 4(%1) \n\t" \
6075 "lw $a1, 8(%1) \n\t" \
6076 "lw $a2,12(%1) \n\t" \
6077 "lw $a3,16(%1) \n\t" \
6078 "lw $a4,20(%1) \n\t" \
6079 "lw $a5,24(%1) \n\t" \
6080 "lw $a6,28(%1) \n\t" \
6081 "lw $a7,32(%1) \n\t" \
6082 VALGRIND_CALL_NOREDIR_T9 \
6083 "move %0, $a0 \n\t" \
6084 "addiu $sp, $sp, 16 \n\t" \
6085 : "=r" (_res) \
6086 : "r" (&_argvec[0]) \
6087 : "memory", __CALLER_SAVED_REGS \
6088 ); \
6089 lval = (__typeof__(lval)) _res; \
6090 } while (0)
6091
6092 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6093 arg6,arg7,arg8,arg9,arg10, \
6094 arg11) \
6095 do { \
6096 volatile OrigFn _orig = (orig); \
6097 volatile unsigned long _argvec[12]; \
6098 volatile unsigned long _res; \
6099 _argvec[0] = (unsigned long)_orig.nraddr; \
6100 _argvec[1] = (unsigned long)(arg1); \
6101 _argvec[2] = (unsigned long)(arg2); \
6102 _argvec[3] = (unsigned long)(arg3); \
6103 _argvec[4] = (unsigned long)(arg4); \
6104 _argvec[5] = (unsigned long)(arg5); \
6105 _argvec[6] = (unsigned long)(arg6); \
6106 _argvec[7] = (unsigned long)(arg7); \
6107 _argvec[8] = (unsigned long)(arg8); \
6108 _argvec[9] = (unsigned long)(arg9); \
6109 _argvec[10] = (unsigned long)(arg10); \
6110 _argvec[11] = (unsigned long)(arg11); \
6111 __asm__ volatile( \
6112 "addiu $sp, $sp, -16 \n\t" \
6113 "lw $t9,36(%1) \n\t" \
6114 "sw $t9, 0($sp) \n\t" \
6115 "lw $t9,40(%1) \n\t" \
6116 "sw $t9, 4($sp) \n\t" \
6117 "lw $t9,44(%1) \n\t" \
6118 "sw $t9, 8($sp) \n\t" \
6119 "lw $t9, 0(%1) \n\t" \
6120 "lw $a0, 4(%1) \n\t" \
6121 "lw $a1, 8(%1) \n\t" \
6122 "lw $a2,12(%1) \n\t" \
6123 "lw $a3,16(%1) \n\t" \
6124 "lw $a4,20(%1) \n\t" \
6125 "lw $a5,24(%1) \n\t" \
6126 "lw $a6,28(%1) \n\t" \
6127 "lw $a7,32(%1) \n\t" \
6128 VALGRIND_CALL_NOREDIR_T9 \
6129 "move %0, $a0 \n\t" \
6130 "addiu $sp, $sp, 16 \n\t" \
6131 : "=r" (_res) \
6132 : "r" (&_argvec[0]) \
6133 : "memory", __CALLER_SAVED_REGS \
6134 ); \
6135 lval = (__typeof__(lval)) _res; \
6136 } while (0)
6137
6138 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6139 arg6,arg7,arg8,arg9,arg10, \
6140 arg11,arg12) \
6141 do { \
6142 volatile OrigFn _orig = (orig); \
6143 volatile unsigned long _argvec[13]; \
6144 volatile unsigned long _res; \
6145 _argvec[0] = (unsigned long)_orig.nraddr; \
6146 _argvec[1] = (unsigned long)(arg1); \
6147 _argvec[2] = (unsigned long)(arg2); \
6148 _argvec[3] = (unsigned long)(arg3); \
6149 _argvec[4] = (unsigned long)(arg4); \
6150 _argvec[5] = (unsigned long)(arg5); \
6151 _argvec[6] = (unsigned long)(arg6); \
6152 _argvec[7] = (unsigned long)(arg7); \
6153 _argvec[8] = (unsigned long)(arg8); \
6154 _argvec[9] = (unsigned long)(arg9); \
6155 _argvec[10] = (unsigned long)(arg10); \
6156 _argvec[11] = (unsigned long)(arg11); \
6157 _argvec[12] = (unsigned long)(arg12); \
6158 __asm__ volatile( \
6159 "addiu $sp, $sp, -16 \n\t" \
6160 "lw $t9,36(%1) \n\t" \
6161 "sw $t9, 0($sp) \n\t" \
6162 "lw $t9,40(%1) \n\t" \
6163 "sw $t9, 4($sp) \n\t" \
6164 "lw $t9,44(%1) \n\t" \
6165 "sw $t9, 8($sp) \n\t" \
6166 "lw $t9,48(%1) \n\t" \
6167 "sw $t9,12($sp) \n\t" \
6168 "lw $t9, 0(%1) \n\t" \
6169 "lw $a0, 4(%1) \n\t" \
6170 "lw $a1, 8(%1) \n\t" \
6171 "lw $a2,12(%1) \n\t" \
6172 "lw $a3,16(%1) \n\t" \
6173 "lw $a4,20(%1) \n\t" \
6174 "lw $a5,24(%1) \n\t" \
6175 "lw $a6,28(%1) \n\t" \
6176 "lw $a7,32(%1) \n\t" \
6177 VALGRIND_CALL_NOREDIR_T9 \
6178 "move %0, $a0 \n\t" \
6179 "addiu $sp, $sp, 16 \n\t" \
6180 : "=r" (_res) \
6181 : "r" (&_argvec[0]) \
6182 : "memory", __CALLER_SAVED_REGS \
6183 ); \
6184 lval = (__typeof__(lval)) _res; \
6185 } while (0)
6186
6187 #endif
6188
6189
6190
6191 #if defined(PLAT_mips64_linux)
6192
6193
6194 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6195 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6196 "$25", "$31"
6197
6198
6199
6200
6201 #define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6202
6203 #define CALL_FN_W_v(lval, orig) \
6204 do { \
6205 volatile OrigFn _orig = (orig); \
6206 volatile unsigned long long _argvec[1]; \
6207 volatile unsigned long long _res; \
6208 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6209 __asm__ volatile( \
6210 "ld $25, 0(%1)\n\t" \
6211 VALGRIND_CALL_NOREDIR_T9 \
6212 "move %0, $2\n" \
6213 : "=r" (_res) \
6214 : "0" (&_argvec[0]) \
6215 : "memory", __CALLER_SAVED_REGS \
6216 ); \
6217 lval = (__typeof__(lval)) (long)_res; \
6218 } while (0)
6219
6220 #define CALL_FN_W_W(lval, orig, arg1) \
6221 do { \
6222 volatile OrigFn _orig = (orig); \
6223 volatile unsigned long long _argvec[2]; \
6224 volatile unsigned long long _res; \
6225 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6226 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6227 __asm__ volatile( \
6228 "ld $4, 8(%1)\n\t" \
6229 "ld $25, 0(%1)\n\t" \
6230 VALGRIND_CALL_NOREDIR_T9 \
6231 "move %0, $2\n" \
6232 : "=r" (_res) \
6233 : "r" (&_argvec[0]) \
6234 : "memory", __CALLER_SAVED_REGS \
6235 ); \
6236 lval = (__typeof__(lval)) (long)_res; \
6237 } while (0)
6238
6239 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6240 do { \
6241 volatile OrigFn _orig = (orig); \
6242 volatile unsigned long long _argvec[3]; \
6243 volatile unsigned long long _res; \
6244 _argvec[0] = _orig.nraddr; \
6245 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6246 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6247 __asm__ volatile( \
6248 "ld $4, 8(%1)\n\t" \
6249 "ld $5, 16(%1)\n\t" \
6250 "ld $25, 0(%1)\n\t" \
6251 VALGRIND_CALL_NOREDIR_T9 \
6252 "move %0, $2\n" \
6253 : "=r" (_res) \
6254 : "r" (&_argvec[0]) \
6255 : "memory", __CALLER_SAVED_REGS \
6256 ); \
6257 lval = (__typeof__(lval)) (long)_res; \
6258 } while (0)
6259
6260
6261 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6262 do { \
6263 volatile OrigFn _orig = (orig); \
6264 volatile unsigned long long _argvec[4]; \
6265 volatile unsigned long long _res; \
6266 _argvec[0] = _orig.nraddr; \
6267 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6268 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6269 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6270 __asm__ volatile( \
6271 "ld $4, 8(%1)\n\t" \
6272 "ld $5, 16(%1)\n\t" \
6273 "ld $6, 24(%1)\n\t" \
6274 "ld $25, 0(%1)\n\t" \
6275 VALGRIND_CALL_NOREDIR_T9 \
6276 "move %0, $2\n" \
6277 : "=r" (_res) \
6278 : "r" (&_argvec[0]) \
6279 : "memory", __CALLER_SAVED_REGS \
6280 ); \
6281 lval = (__typeof__(lval)) (long)_res; \
6282 } while (0)
6283
6284 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6285 do { \
6286 volatile OrigFn _orig = (orig); \
6287 volatile unsigned long long _argvec[5]; \
6288 volatile unsigned long long _res; \
6289 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6290 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6291 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6292 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6293 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6294 __asm__ volatile( \
6295 "ld $4, 8(%1)\n\t" \
6296 "ld $5, 16(%1)\n\t" \
6297 "ld $6, 24(%1)\n\t" \
6298 "ld $7, 32(%1)\n\t" \
6299 "ld $25, 0(%1)\n\t" \
6300 VALGRIND_CALL_NOREDIR_T9 \
6301 "move %0, $2\n" \
6302 : "=r" (_res) \
6303 : "r" (&_argvec[0]) \
6304 : "memory", __CALLER_SAVED_REGS \
6305 ); \
6306 lval = (__typeof__(lval)) (long)_res; \
6307 } while (0)
6308
6309 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6310 do { \
6311 volatile OrigFn _orig = (orig); \
6312 volatile unsigned long long _argvec[6]; \
6313 volatile unsigned long long _res; \
6314 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6315 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6316 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6317 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6318 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6319 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6320 __asm__ volatile( \
6321 "ld $4, 8(%1)\n\t" \
6322 "ld $5, 16(%1)\n\t" \
6323 "ld $6, 24(%1)\n\t" \
6324 "ld $7, 32(%1)\n\t" \
6325 "ld $8, 40(%1)\n\t" \
6326 "ld $25, 0(%1)\n\t" \
6327 VALGRIND_CALL_NOREDIR_T9 \
6328 "move %0, $2\n" \
6329 : "=r" (_res) \
6330 : "r" (&_argvec[0]) \
6331 : "memory", __CALLER_SAVED_REGS \
6332 ); \
6333 lval = (__typeof__(lval)) (long)_res; \
6334 } while (0)
6335
6336 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6337 do { \
6338 volatile OrigFn _orig = (orig); \
6339 volatile unsigned long long _argvec[7]; \
6340 volatile unsigned long long _res; \
6341 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6342 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6343 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6344 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6345 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6346 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6347 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6348 __asm__ volatile( \
6349 "ld $4, 8(%1)\n\t" \
6350 "ld $5, 16(%1)\n\t" \
6351 "ld $6, 24(%1)\n\t" \
6352 "ld $7, 32(%1)\n\t" \
6353 "ld $8, 40(%1)\n\t" \
6354 "ld $9, 48(%1)\n\t" \
6355 "ld $25, 0(%1)\n\t" \
6356 VALGRIND_CALL_NOREDIR_T9 \
6357 "move %0, $2\n" \
6358 : "=r" (_res) \
6359 : "r" (&_argvec[0]) \
6360 : "memory", __CALLER_SAVED_REGS \
6361 ); \
6362 lval = (__typeof__(lval)) (long)_res; \
6363 } while (0)
6364
6365 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6366 arg7) \
6367 do { \
6368 volatile OrigFn _orig = (orig); \
6369 volatile unsigned long long _argvec[8]; \
6370 volatile unsigned long long _res; \
6371 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6372 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6373 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6374 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6375 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6376 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6377 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6378 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6379 __asm__ volatile( \
6380 "ld $4, 8(%1)\n\t" \
6381 "ld $5, 16(%1)\n\t" \
6382 "ld $6, 24(%1)\n\t" \
6383 "ld $7, 32(%1)\n\t" \
6384 "ld $8, 40(%1)\n\t" \
6385 "ld $9, 48(%1)\n\t" \
6386 "ld $10, 56(%1)\n\t" \
6387 "ld $25, 0(%1) \n\t" \
6388 VALGRIND_CALL_NOREDIR_T9 \
6389 "move %0, $2\n" \
6390 : "=r" (_res) \
6391 : "r" (&_argvec[0]) \
6392 : "memory", __CALLER_SAVED_REGS \
6393 ); \
6394 lval = (__typeof__(lval)) (long)_res; \
6395 } while (0)
6396
6397 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6398 arg7,arg8) \
6399 do { \
6400 volatile OrigFn _orig = (orig); \
6401 volatile unsigned long long _argvec[9]; \
6402 volatile unsigned long long _res; \
6403 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6404 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6405 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6406 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6407 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6408 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6409 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6410 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6411 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6412 __asm__ volatile( \
6413 "ld $4, 8(%1)\n\t" \
6414 "ld $5, 16(%1)\n\t" \
6415 "ld $6, 24(%1)\n\t" \
6416 "ld $7, 32(%1)\n\t" \
6417 "ld $8, 40(%1)\n\t" \
6418 "ld $9, 48(%1)\n\t" \
6419 "ld $10, 56(%1)\n\t" \
6420 "ld $11, 64(%1)\n\t" \
6421 "ld $25, 0(%1) \n\t" \
6422 VALGRIND_CALL_NOREDIR_T9 \
6423 "move %0, $2\n" \
6424 : "=r" (_res) \
6425 : "r" (&_argvec[0]) \
6426 : "memory", __CALLER_SAVED_REGS \
6427 ); \
6428 lval = (__typeof__(lval)) (long)_res; \
6429 } while (0)
6430
6431 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6432 arg7,arg8,arg9) \
6433 do { \
6434 volatile OrigFn _orig = (orig); \
6435 volatile unsigned long long _argvec[10]; \
6436 volatile unsigned long long _res; \
6437 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6438 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6439 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6440 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6441 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6442 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6443 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6444 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6445 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6446 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6447 __asm__ volatile( \
6448 "dsubu $29, $29, 8\n\t" \
6449 "ld $4, 72(%1)\n\t" \
6450 "sd $4, 0($29)\n\t" \
6451 "ld $4, 8(%1)\n\t" \
6452 "ld $5, 16(%1)\n\t" \
6453 "ld $6, 24(%1)\n\t" \
6454 "ld $7, 32(%1)\n\t" \
6455 "ld $8, 40(%1)\n\t" \
6456 "ld $9, 48(%1)\n\t" \
6457 "ld $10, 56(%1)\n\t" \
6458 "ld $11, 64(%1)\n\t" \
6459 "ld $25, 0(%1)\n\t" \
6460 VALGRIND_CALL_NOREDIR_T9 \
6461 "daddu $29, $29, 8\n\t" \
6462 "move %0, $2\n" \
6463 : "=r" (_res) \
6464 : "r" (&_argvec[0]) \
6465 : "memory", __CALLER_SAVED_REGS \
6466 ); \
6467 lval = (__typeof__(lval)) (long)_res; \
6468 } while (0)
6469
6470 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6471 arg7,arg8,arg9,arg10) \
6472 do { \
6473 volatile OrigFn _orig = (orig); \
6474 volatile unsigned long long _argvec[11]; \
6475 volatile unsigned long long _res; \
6476 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6477 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6478 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6479 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6480 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6481 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6482 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6483 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6484 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6485 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6486 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6487 __asm__ volatile( \
6488 "dsubu $29, $29, 16\n\t" \
6489 "ld $4, 72(%1)\n\t" \
6490 "sd $4, 0($29)\n\t" \
6491 "ld $4, 80(%1)\n\t" \
6492 "sd $4, 8($29)\n\t" \
6493 "ld $4, 8(%1)\n\t" \
6494 "ld $5, 16(%1)\n\t" \
6495 "ld $6, 24(%1)\n\t" \
6496 "ld $7, 32(%1)\n\t" \
6497 "ld $8, 40(%1)\n\t" \
6498 "ld $9, 48(%1)\n\t" \
6499 "ld $10, 56(%1)\n\t" \
6500 "ld $11, 64(%1)\n\t" \
6501 "ld $25, 0(%1)\n\t" \
6502 VALGRIND_CALL_NOREDIR_T9 \
6503 "daddu $29, $29, 16\n\t" \
6504 "move %0, $2\n" \
6505 : "=r" (_res) \
6506 : "r" (&_argvec[0]) \
6507 : "memory", __CALLER_SAVED_REGS \
6508 ); \
6509 lval = (__typeof__(lval)) (long)_res; \
6510 } while (0)
6511
6512 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6513 arg6,arg7,arg8,arg9,arg10, \
6514 arg11) \
6515 do { \
6516 volatile OrigFn _orig = (orig); \
6517 volatile unsigned long long _argvec[12]; \
6518 volatile unsigned long long _res; \
6519 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6520 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6521 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6522 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6523 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6524 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6525 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6526 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6527 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6528 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6529 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6530 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6531 __asm__ volatile( \
6532 "dsubu $29, $29, 24\n\t" \
6533 "ld $4, 72(%1)\n\t" \
6534 "sd $4, 0($29)\n\t" \
6535 "ld $4, 80(%1)\n\t" \
6536 "sd $4, 8($29)\n\t" \
6537 "ld $4, 88(%1)\n\t" \
6538 "sd $4, 16($29)\n\t" \
6539 "ld $4, 8(%1)\n\t" \
6540 "ld $5, 16(%1)\n\t" \
6541 "ld $6, 24(%1)\n\t" \
6542 "ld $7, 32(%1)\n\t" \
6543 "ld $8, 40(%1)\n\t" \
6544 "ld $9, 48(%1)\n\t" \
6545 "ld $10, 56(%1)\n\t" \
6546 "ld $11, 64(%1)\n\t" \
6547 "ld $25, 0(%1)\n\t" \
6548 VALGRIND_CALL_NOREDIR_T9 \
6549 "daddu $29, $29, 24\n\t" \
6550 "move %0, $2\n" \
6551 : "=r" (_res) \
6552 : "r" (&_argvec[0]) \
6553 : "memory", __CALLER_SAVED_REGS \
6554 ); \
6555 lval = (__typeof__(lval)) (long)_res; \
6556 } while (0)
6557
6558 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6559 arg6,arg7,arg8,arg9,arg10, \
6560 arg11,arg12) \
6561 do { \
6562 volatile OrigFn _orig = (orig); \
6563 volatile unsigned long long _argvec[13]; \
6564 volatile unsigned long long _res; \
6565 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6566 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6567 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6568 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6569 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6570 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6571 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6572 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6573 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6574 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6575 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6576 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6577 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6578 __asm__ volatile( \
6579 "dsubu $29, $29, 32\n\t" \
6580 "ld $4, 72(%1)\n\t" \
6581 "sd $4, 0($29)\n\t" \
6582 "ld $4, 80(%1)\n\t" \
6583 "sd $4, 8($29)\n\t" \
6584 "ld $4, 88(%1)\n\t" \
6585 "sd $4, 16($29)\n\t" \
6586 "ld $4, 96(%1)\n\t" \
6587 "sd $4, 24($29)\n\t" \
6588 "ld $4, 8(%1)\n\t" \
6589 "ld $5, 16(%1)\n\t" \
6590 "ld $6, 24(%1)\n\t" \
6591 "ld $7, 32(%1)\n\t" \
6592 "ld $8, 40(%1)\n\t" \
6593 "ld $9, 48(%1)\n\t" \
6594 "ld $10, 56(%1)\n\t" \
6595 "ld $11, 64(%1)\n\t" \
6596 "ld $25, 0(%1)\n\t" \
6597 VALGRIND_CALL_NOREDIR_T9 \
6598 "daddu $29, $29, 32\n\t" \
6599 "move %0, $2\n" \
6600 : "=r" (_res) \
6601 : "r" (&_argvec[0]) \
6602 : "memory", __CALLER_SAVED_REGS \
6603 ); \
6604 lval = (__typeof__(lval)) (long)_res; \
6605 } while (0)
6606
6607 #endif
6608
6609
6610
6611
6612
6613
6614
6615
6616
6617
6618
6619
6620
6621
6622
6623
6624 #define VG_USERREQ_TOOL_BASE(a,b) \
6625 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6626 #define VG_IS_TOOL_USERREQ(a, b, v) \
6627 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6628
6629
6630
6631
6632
6633
6634 typedef
6635 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6636 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6637
6638
6639
6640
6641
6642
6643 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6644 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6645 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6646 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6647
6648
6649
6650
6651 VG_USERREQ__COUNT_ERRORS = 0x1201,
6652
6653
6654
6655 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6656
6657
6658
6659 VG_USERREQ__CLO_CHANGE = 0x1203,
6660
6661
6662
6663 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6664 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6665 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6666
6667 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6668 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6669 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6670 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6671 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6672 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6673 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6674 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6675
6676
6677
6678
6679
6680
6681
6682
6683 VG_USERREQ__PRINTF = 0x1401,
6684 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6685
6686 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6687 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6688
6689
6690 VG_USERREQ__STACK_REGISTER = 0x1501,
6691 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6692 VG_USERREQ__STACK_CHANGE = 0x1503,
6693
6694
6695 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6696
6697
6698 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6699
6700
6701
6702
6703
6704
6705 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6706
6707
6708
6709
6710 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6711
6712
6713 VG_USERREQ__INNER_THREADS = 0x1902
6714 } Vg_ClientRequest;
6715
6716 #if !defined(__GNUC__)
6717 # define __extension__
6718 #endif
6719
6720
6721
6722
6723
6724
6725 #define RUNNING_ON_VALGRIND \
6726 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6727 VG_USERREQ__RUNNING_ON_VALGRIND, \
6728 0, 0, 0, 0, 0) \
6729
6730
6731
6732
6733
6734
6735 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6736 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6737 _qzz_addr, _qzz_len, 0, 0, 0)
6738
6739 #define VALGRIND_INNER_THREADS(_qzz_addr) \
6740 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6741 _qzz_addr, 0, 0, 0, 0)
6742
6743
6744
6745
6746
6747
6748
6749 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6750
6751
6752 static int VALGRIND_PRINTF(const char *format, ...)
6753 __attribute__((format(__printf__, 1, 2), __unused__));
6754 #endif
6755 static int
6756 #if defined(_MSC_VER)
6757 __inline
6758 #endif
6759 VALGRIND_PRINTF(const char *format, ...)
6760 {
6761 #if defined(NVALGRIND)
6762 (void)format;
6763 return 0;
6764 #else
6765 #if defined(_MSC_VER) || defined(__MINGW64__)
6766 uintptr_t _qzz_res;
6767 #else
6768 unsigned long _qzz_res;
6769 #endif
6770 va_list vargs;
6771 va_start(vargs, format);
6772 #if defined(_MSC_VER) || defined(__MINGW64__)
6773 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6774 VG_USERREQ__PRINTF_VALIST_BY_REF,
6775 (uintptr_t)format,
6776 (uintptr_t)&vargs,
6777 0, 0, 0);
6778 #else
6779 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6780 VG_USERREQ__PRINTF_VALIST_BY_REF,
6781 (unsigned long)format,
6782 (unsigned long)&vargs,
6783 0, 0, 0);
6784 #endif
6785 va_end(vargs);
6786 return (int)_qzz_res;
6787 #endif
6788 }
6789
6790 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6791 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6792 __attribute__((format(__printf__, 1, 2), __unused__));
6793 #endif
6794 static int
6795 #if defined(_MSC_VER)
6796 __inline
6797 #endif
6798 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6799 {
6800 #if defined(NVALGRIND)
6801 (void)format;
6802 return 0;
6803 #else
6804 #if defined(_MSC_VER) || defined(__MINGW64__)
6805 uintptr_t _qzz_res;
6806 #else
6807 unsigned long _qzz_res;
6808 #endif
6809 va_list vargs;
6810 va_start(vargs, format);
6811 #if defined(_MSC_VER) || defined(__MINGW64__)
6812 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6813 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6814 (uintptr_t)format,
6815 (uintptr_t)&vargs,
6816 0, 0, 0);
6817 #else
6818 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6819 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6820 (unsigned long)format,
6821 (unsigned long)&vargs,
6822 0, 0, 0);
6823 #endif
6824 va_end(vargs);
6825 return (int)_qzz_res;
6826 #endif
6827 }
6828
6829
6830
6831
6832
6833
6834
6835
6836
6837
6838
6839
6840
6841
6842
6843
6844
6845
6846
6847
6848
6849
6850
6851
6852
6853 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6854 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6855 VG_USERREQ__CLIENT_CALL0, \
6856 _qyy_fn, \
6857 0, 0, 0, 0)
6858
6859 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6860 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6861 VG_USERREQ__CLIENT_CALL1, \
6862 _qyy_fn, \
6863 _qyy_arg1, 0, 0, 0)
6864
6865 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6866 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6867 VG_USERREQ__CLIENT_CALL2, \
6868 _qyy_fn, \
6869 _qyy_arg1, _qyy_arg2, 0, 0)
6870
6871 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6872 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6873 VG_USERREQ__CLIENT_CALL3, \
6874 _qyy_fn, \
6875 _qyy_arg1, _qyy_arg2, \
6876 _qyy_arg3, 0)
6877
6878
6879
6880
6881
6882 #define VALGRIND_COUNT_ERRORS \
6883 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6884 0 , \
6885 VG_USERREQ__COUNT_ERRORS, \
6886 0, 0, 0, 0, 0)
6887
6888
6889
6890
6891
6892
6893
6894
6895
6896
6897
6898
6899
6900
6901
6902
6903
6904
6905
6906
6907
6908
6909
6910
6911
6912
6913
6914
6915
6916
6917
6918
6919
6920
6921
6922
6923
6924
6925
6926
6927
6928
6929
6930
6931
6932
6933
6934
6935
6936
6937
6938
6939
6940
6941
6942
6943
6944
6945
6946
6947
6948
6949
6950
6951
6952
6953
6954
6955
6956
6957
6958
6959
6960
6961
6962
6963
6964
6965
6966
6967
6968
6969
6970
6971
6972
6973
6974
6975
6976
6977
6978
6979
6980
6981
6982
6983
6984
6985
6986
6987
6988
6989 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6990 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6991 addr, sizeB, rzB, is_zeroed, 0)
6992
6993
6994
6995
6996 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6997 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6998 addr, oldSizeB, newSizeB, rzB, 0)
6999
7000
7001
7002
7003 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
7004 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
7005 addr, rzB, 0, 0, 0)
7006
7007
7008 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
7009 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7010 pool, rzB, is_zeroed, 0, 0)
7011
7012
7013
7014
7015
7016
7017
7018
7019
7020
7021
7022
7023
7024
7025
7026
7027
7028
7029
7030
7031
7032
7033
7034
7035
7036
7037
7038 #define VALGRIND_MEMPOOL_AUTO_FREE 1
7039 #define VALGRIND_MEMPOOL_METAPOOL 2
7040 #define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
7041 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7042 pool, rzB, is_zeroed, flags, 0)
7043
7044
7045 #define VALGRIND_DESTROY_MEMPOOL(pool) \
7046 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7047 pool, 0, 0, 0, 0)
7048
7049
7050 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7051 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7052 pool, addr, size, 0, 0)
7053
7054
7055 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
7056 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7057 pool, addr, 0, 0, 0)
7058
7059
7060 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7061 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7062 pool, addr, size, 0, 0)
7063
7064
7065 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7066 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7067 poolA, poolB, 0, 0, 0)
7068
7069
7070 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7071 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7072 pool, addrA, addrB, size, 0)
7073
7074
7075 #define VALGRIND_MEMPOOL_EXISTS(pool) \
7076 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7077 VG_USERREQ__MEMPOOL_EXISTS, \
7078 pool, 0, 0, 0, 0)
7079
7080
7081
7082
7083 #define VALGRIND_STACK_REGISTER(start, end) \
7084 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7085 VG_USERREQ__STACK_REGISTER, \
7086 start, end, 0, 0, 0)
7087
7088
7089
7090 #define VALGRIND_STACK_DEREGISTER(id) \
7091 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7092 id, 0, 0, 0, 0)
7093
7094
7095
7096
7097 #define VALGRIND_STACK_CHANGE(id, start, end) \
7098 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7099 id, start, end, 0, 0)
7100
7101
7102 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7103 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7104 fd, ptr, total_size, delta, 0)
7105
7106
7107
7108
7109
7110 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7111 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7112 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7113 addr, buf64, 0, 0, 0)
7114
7115
7116
7117
7118
7119
7120
7121
7122
7123 #define VALGRIND_DISABLE_ERROR_REPORTING \
7124 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7125 1, 0, 0, 0, 0)
7126
7127
7128
7129 #define VALGRIND_ENABLE_ERROR_REPORTING \
7130 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7131 -1, 0, 0, 0, 0)
7132
7133
7134
7135
7136
7137
7138 #define VALGRIND_MONITOR_COMMAND(command) \
7139 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7140 command, 0, 0, 0, 0)
7141
7142
7143
7144
7145
7146 #define VALGRIND_CLO_CHANGE(option) \
7147 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7148 option, 0, 0, 0, 0)
7149
7150
7151 #undef PLAT_x86_darwin
7152 #undef PLAT_amd64_darwin
7153 #undef PLAT_x86_win32
7154 #undef PLAT_amd64_win64
7155 #undef PLAT_x86_linux
7156 #undef PLAT_amd64_linux
7157 #undef PLAT_ppc32_linux
7158 #undef PLAT_ppc64be_linux
7159 #undef PLAT_ppc64le_linux
7160 #undef PLAT_arm_linux
7161 #undef PLAT_s390x_linux
7162 #undef PLAT_mips32_linux
7163 #undef PLAT_mips64_linux
7164 #undef PLAT_nanomips_linux
7165 #undef PLAT_x86_solaris
7166 #undef PLAT_amd64_solaris
7167
7168 #endif