File indexing completed on 2025-01-18 10:13:33
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073 #ifndef __VALGRIND_H
0074 #define __VALGRIND_H
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091 #define __VALGRIND_MAJOR__ 3
0092 #define __VALGRIND_MINOR__ 20
0093
0094
0095 #include <stdarg.h>
0096
0097
0098
0099
0100
0101
0102
0103
0104
0105
0106
0107
0108
0109
0110
0111 #undef PLAT_x86_darwin
0112 #undef PLAT_amd64_darwin
0113 #undef PLAT_x86_freebsd
0114 #undef PLAT_amd64_freebsd
0115 #undef PLAT_x86_win32
0116 #undef PLAT_amd64_win64
0117 #undef PLAT_x86_linux
0118 #undef PLAT_amd64_linux
0119 #undef PLAT_ppc32_linux
0120 #undef PLAT_ppc64be_linux
0121 #undef PLAT_ppc64le_linux
0122 #undef PLAT_arm_linux
0123 #undef PLAT_arm64_linux
0124 #undef PLAT_s390x_linux
0125 #undef PLAT_mips32_linux
0126 #undef PLAT_mips64_linux
0127 #undef PLAT_nanomips_linux
0128 #undef PLAT_x86_solaris
0129 #undef PLAT_amd64_solaris
0130
0131
0132 #if defined(__APPLE__) && defined(__i386__)
0133 # define PLAT_x86_darwin 1
0134 #elif defined(__APPLE__) && defined(__x86_64__)
0135 # define PLAT_amd64_darwin 1
0136 #elif defined(__FreeBSD__) && defined(__i386__)
0137 # define PLAT_x86_freebsd 1
0138 #elif defined(__FreeBSD__) && defined(__amd64__)
0139 # define PLAT_amd64_freebsd 1
0140 #elif (defined(__MINGW32__) && defined(__i386__)) \
0141 || defined(__CYGWIN32__) \
0142 || (defined(_WIN32) && defined(_M_IX86))
0143 # define PLAT_x86_win32 1
0144 #elif (defined(__MINGW32__) && defined(__x86_64__)) \
0145 || (defined(_WIN32) && defined(_M_X64))
0146
0147 # define PLAT_amd64_win64 1
0148 #elif defined(__linux__) && defined(__i386__)
0149 # define PLAT_x86_linux 1
0150 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
0151 # define PLAT_amd64_linux 1
0152 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
0153 # define PLAT_ppc32_linux 1
0154 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
0155
0156 # define PLAT_ppc64be_linux 1
0157 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
0158
0159 # define PLAT_ppc64le_linux 1
0160 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
0161 # define PLAT_arm_linux 1
0162 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
0163 # define PLAT_arm64_linux 1
0164 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
0165 # define PLAT_s390x_linux 1
0166 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
0167 # define PLAT_mips64_linux 1
0168 #elif defined(__linux__) && defined(__mips__) && (__mips==32)
0169 # define PLAT_mips32_linux 1
0170 #elif defined(__linux__) && defined(__nanomips__)
0171 # define PLAT_nanomips_linux 1
0172 #elif defined(__sun) && defined(__i386__)
0173 # define PLAT_x86_solaris 1
0174 #elif defined(__sun) && defined(__x86_64__)
0175 # define PLAT_amd64_solaris 1
0176 #else
0177
0178
0179 # if !defined(NVALGRIND)
0180 # define NVALGRIND 1
0181 # endif
0182 #endif
0183
0184
0185
0186
0187
0188
0189
0190
0191
0192
0193
0194
0195
0196
0197
0198
0199
0200
0201
0202
0203 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
0204 _zzq_request, _zzq_arg1, _zzq_arg2, \
0205 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0206 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
0207 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
0208 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
0209
0210 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
0211 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0212 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
0213 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
0214 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
0215
0216 #if defined(NVALGRIND)
0217
0218
0219
0220
0221 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0222 _zzq_default, _zzq_request, \
0223 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0224 (_zzq_default)
0225
0226 #else
0227
0228
0229
0230
0231
0232
0233
0234
0235
0236
0237
0238
0239
0240
0241
0242
0243
0244
0245
0246
0247
0248
0249
0250
0251
0252
0253
0254
0255
0256
0257
0258
0259
0260
0261
0262
0263
0264
0265 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
0266 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
0267 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
0268
0269 typedef
0270 struct {
0271 unsigned int nraddr;
0272 }
0273 OrigFn;
0274
0275 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0276 "roll $3, %%edi ; roll $13, %%edi\n\t" \
0277 "roll $29, %%edi ; roll $19, %%edi\n\t"
0278
0279 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0280 _zzq_default, _zzq_request, \
0281 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0282 __extension__ \
0283 ({volatile unsigned int _zzq_args[6]; \
0284 volatile unsigned int _zzq_result; \
0285 _zzq_args[0] = (unsigned int)(_zzq_request); \
0286 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0287 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0288 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0289 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0290 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0291 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0292 \
0293 "xchgl %%ebx,%%ebx" \
0294 : "=d" (_zzq_result) \
0295 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
0296 : "cc", "memory" \
0297 ); \
0298 _zzq_result; \
0299 })
0300
0301 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0302 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0303 volatile unsigned int __addr; \
0304 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0305 \
0306 "xchgl %%ecx,%%ecx" \
0307 : "=a" (__addr) \
0308 : \
0309 : "cc", "memory" \
0310 ); \
0311 _zzq_orig->nraddr = __addr; \
0312 }
0313
0314 #define VALGRIND_CALL_NOREDIR_EAX \
0315 __SPECIAL_INSTRUCTION_PREAMBLE \
0316 \
0317 "xchgl %%edx,%%edx\n\t"
0318
0319 #define VALGRIND_VEX_INJECT_IR() \
0320 do { \
0321 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0322 "xchgl %%edi,%%edi\n\t" \
0323 : : : "cc", "memory" \
0324 ); \
0325 } while (0)
0326
0327 #endif
0328
0329
0330
0331
0332 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
0333
0334 typedef
0335 struct {
0336 unsigned int nraddr;
0337 }
0338 OrigFn;
0339
0340 #if defined(_MSC_VER)
0341
0342 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0343 __asm rol edi, 3 __asm rol edi, 13 \
0344 __asm rol edi, 29 __asm rol edi, 19
0345
0346 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0347 _zzq_default, _zzq_request, \
0348 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0349 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
0350 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
0351 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
0352 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
0353
0354 static __inline uintptr_t
0355 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
0356 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
0357 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
0358 uintptr_t _zzq_arg5)
0359 {
0360 volatile uintptr_t _zzq_args[6];
0361 volatile unsigned int _zzq_result;
0362 _zzq_args[0] = (uintptr_t)(_zzq_request);
0363 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
0364 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
0365 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
0366 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
0367 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
0368 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
0369 __SPECIAL_INSTRUCTION_PREAMBLE
0370
0371 __asm xchg ebx,ebx
0372 __asm mov _zzq_result, edx
0373 }
0374 return _zzq_result;
0375 }
0376
0377 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0378 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0379 volatile unsigned int __addr; \
0380 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
0381 \
0382 __asm xchg ecx,ecx \
0383 __asm mov __addr, eax \
0384 } \
0385 _zzq_orig->nraddr = __addr; \
0386 }
0387
0388 #define VALGRIND_CALL_NOREDIR_EAX ERROR
0389
0390 #define VALGRIND_VEX_INJECT_IR() \
0391 do { \
0392 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
0393 __asm xchg edi,edi \
0394 } \
0395 } while (0)
0396
0397 #else
0398 #error Unsupported compiler.
0399 #endif
0400
0401 #endif
0402
0403
0404
0405 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
0406 || defined(PLAT_amd64_solaris) \
0407 || defined(PLAT_amd64_freebsd) \
0408 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
0409
0410 typedef
0411 struct {
0412 unsigned long int nraddr;
0413 }
0414 OrigFn;
0415
0416 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0417 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
0418 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
0419
0420 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0421 _zzq_default, _zzq_request, \
0422 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0423 __extension__ \
0424 ({ volatile unsigned long int _zzq_args[6]; \
0425 volatile unsigned long int _zzq_result; \
0426 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0427 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0428 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0429 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0430 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0431 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0432 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0433 \
0434 "xchgq %%rbx,%%rbx" \
0435 : "=d" (_zzq_result) \
0436 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
0437 : "cc", "memory" \
0438 ); \
0439 _zzq_result; \
0440 })
0441
0442 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0443 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0444 volatile unsigned long int __addr; \
0445 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0446 \
0447 "xchgq %%rcx,%%rcx" \
0448 : "=a" (__addr) \
0449 : \
0450 : "cc", "memory" \
0451 ); \
0452 _zzq_orig->nraddr = __addr; \
0453 }
0454
0455 #define VALGRIND_CALL_NOREDIR_RAX \
0456 __SPECIAL_INSTRUCTION_PREAMBLE \
0457 \
0458 "xchgq %%rdx,%%rdx\n\t"
0459
0460 #define VALGRIND_VEX_INJECT_IR() \
0461 do { \
0462 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0463 "xchgq %%rdi,%%rdi\n\t" \
0464 : : : "cc", "memory" \
0465 ); \
0466 } while (0)
0467
0468 #endif
0469
0470
0471
0472 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
0473
0474 #error Unsupported compiler.
0475
0476 #endif
0477
0478
0479
0480 #if defined(PLAT_ppc32_linux)
0481
0482 typedef
0483 struct {
0484 unsigned int nraddr;
0485 }
0486 OrigFn;
0487
0488 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0489 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
0490 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
0491
0492 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0493 _zzq_default, _zzq_request, \
0494 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0495 \
0496 __extension__ \
0497 ({ unsigned int _zzq_args[6]; \
0498 unsigned int _zzq_result; \
0499 unsigned int* _zzq_ptr; \
0500 _zzq_args[0] = (unsigned int)(_zzq_request); \
0501 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0502 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0503 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0504 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0505 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0506 _zzq_ptr = _zzq_args; \
0507 __asm__ volatile("mr 3,%1\n\t" \
0508 "mr 4,%2\n\t" \
0509 __SPECIAL_INSTRUCTION_PREAMBLE \
0510 \
0511 "or 1,1,1\n\t" \
0512 "mr %0,3" \
0513 : "=b" (_zzq_result) \
0514 : "b" (_zzq_default), "b" (_zzq_ptr) \
0515 : "cc", "memory", "r3", "r4"); \
0516 _zzq_result; \
0517 })
0518
0519 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0520 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0521 unsigned int __addr; \
0522 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0523 \
0524 "or 2,2,2\n\t" \
0525 "mr %0,3" \
0526 : "=b" (__addr) \
0527 : \
0528 : "cc", "memory", "r3" \
0529 ); \
0530 _zzq_orig->nraddr = __addr; \
0531 }
0532
0533 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
0534 __SPECIAL_INSTRUCTION_PREAMBLE \
0535 \
0536 "or 3,3,3\n\t"
0537
0538 #define VALGRIND_VEX_INJECT_IR() \
0539 do { \
0540 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0541 "or 5,5,5\n\t" \
0542 ); \
0543 } while (0)
0544
0545 #endif
0546
0547
0548
0549 #if defined(PLAT_ppc64be_linux)
0550
0551 typedef
0552 struct {
0553 unsigned long int nraddr;
0554 unsigned long int r2;
0555 }
0556 OrigFn;
0557
0558 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0559 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
0560 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
0561
0562 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0563 _zzq_default, _zzq_request, \
0564 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0565 \
0566 __extension__ \
0567 ({ unsigned long int _zzq_args[6]; \
0568 unsigned long int _zzq_result; \
0569 unsigned long int* _zzq_ptr; \
0570 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0571 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0572 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0573 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0574 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0575 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0576 _zzq_ptr = _zzq_args; \
0577 __asm__ volatile("mr 3,%1\n\t" \
0578 "mr 4,%2\n\t" \
0579 __SPECIAL_INSTRUCTION_PREAMBLE \
0580 \
0581 "or 1,1,1\n\t" \
0582 "mr %0,3" \
0583 : "=b" (_zzq_result) \
0584 : "b" (_zzq_default), "b" (_zzq_ptr) \
0585 : "cc", "memory", "r3", "r4"); \
0586 _zzq_result; \
0587 })
0588
0589 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0590 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0591 unsigned long int __addr; \
0592 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0593 \
0594 "or 2,2,2\n\t" \
0595 "mr %0,3" \
0596 : "=b" (__addr) \
0597 : \
0598 : "cc", "memory", "r3" \
0599 ); \
0600 _zzq_orig->nraddr = __addr; \
0601 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0602 \
0603 "or 4,4,4\n\t" \
0604 "mr %0,3" \
0605 : "=b" (__addr) \
0606 : \
0607 : "cc", "memory", "r3" \
0608 ); \
0609 _zzq_orig->r2 = __addr; \
0610 }
0611
0612 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
0613 __SPECIAL_INSTRUCTION_PREAMBLE \
0614 \
0615 "or 3,3,3\n\t"
0616
0617 #define VALGRIND_VEX_INJECT_IR() \
0618 do { \
0619 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0620 "or 5,5,5\n\t" \
0621 ); \
0622 } while (0)
0623
0624 #endif
0625
0626 #if defined(PLAT_ppc64le_linux)
0627
0628 typedef
0629 struct {
0630 unsigned long int nraddr;
0631 unsigned long int r2;
0632 }
0633 OrigFn;
0634
0635 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0636 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
0637 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
0638
0639 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0640 _zzq_default, _zzq_request, \
0641 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0642 \
0643 __extension__ \
0644 ({ unsigned long int _zzq_args[6]; \
0645 unsigned long int _zzq_result; \
0646 unsigned long int* _zzq_ptr; \
0647 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0648 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0649 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0650 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0651 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0652 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0653 _zzq_ptr = _zzq_args; \
0654 __asm__ volatile("mr 3,%1\n\t" \
0655 "mr 4,%2\n\t" \
0656 __SPECIAL_INSTRUCTION_PREAMBLE \
0657 \
0658 "or 1,1,1\n\t" \
0659 "mr %0,3" \
0660 : "=b" (_zzq_result) \
0661 : "b" (_zzq_default), "b" (_zzq_ptr) \
0662 : "cc", "memory", "r3", "r4"); \
0663 _zzq_result; \
0664 })
0665
0666 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0667 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0668 unsigned long int __addr; \
0669 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0670 \
0671 "or 2,2,2\n\t" \
0672 "mr %0,3" \
0673 : "=b" (__addr) \
0674 : \
0675 : "cc", "memory", "r3" \
0676 ); \
0677 _zzq_orig->nraddr = __addr; \
0678 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0679 \
0680 "or 4,4,4\n\t" \
0681 "mr %0,3" \
0682 : "=b" (__addr) \
0683 : \
0684 : "cc", "memory", "r3" \
0685 ); \
0686 _zzq_orig->r2 = __addr; \
0687 }
0688
0689 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
0690 __SPECIAL_INSTRUCTION_PREAMBLE \
0691 \
0692 "or 3,3,3\n\t"
0693
0694 #define VALGRIND_VEX_INJECT_IR() \
0695 do { \
0696 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0697 "or 5,5,5\n\t" \
0698 ); \
0699 } while (0)
0700
0701 #endif
0702
0703
0704
0705 #if defined(PLAT_arm_linux)
0706
0707 typedef
0708 struct {
0709 unsigned int nraddr;
0710 }
0711 OrigFn;
0712
0713 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0714 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
0715 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
0716
0717 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0718 _zzq_default, _zzq_request, \
0719 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0720 \
0721 __extension__ \
0722 ({volatile unsigned int _zzq_args[6]; \
0723 volatile unsigned int _zzq_result; \
0724 _zzq_args[0] = (unsigned int)(_zzq_request); \
0725 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0726 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0727 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0728 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0729 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0730 __asm__ volatile("mov r3, %1\n\t" \
0731 "mov r4, %2\n\t" \
0732 __SPECIAL_INSTRUCTION_PREAMBLE \
0733 \
0734 "orr r10, r10, r10\n\t" \
0735 "mov %0, r3" \
0736 : "=r" (_zzq_result) \
0737 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
0738 : "cc","memory", "r3", "r4"); \
0739 _zzq_result; \
0740 })
0741
0742 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0743 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0744 unsigned int __addr; \
0745 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0746 \
0747 "orr r11, r11, r11\n\t" \
0748 "mov %0, r3" \
0749 : "=r" (__addr) \
0750 : \
0751 : "cc", "memory", "r3" \
0752 ); \
0753 _zzq_orig->nraddr = __addr; \
0754 }
0755
0756 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
0757 __SPECIAL_INSTRUCTION_PREAMBLE \
0758 \
0759 "orr r12, r12, r12\n\t"
0760
0761 #define VALGRIND_VEX_INJECT_IR() \
0762 do { \
0763 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0764 "orr r9, r9, r9\n\t" \
0765 : : : "cc", "memory" \
0766 ); \
0767 } while (0)
0768
0769 #endif
0770
0771
0772
0773 #if defined(PLAT_arm64_linux)
0774
0775 typedef
0776 struct {
0777 unsigned long int nraddr;
0778 }
0779 OrigFn;
0780
0781 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0782 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
0783 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
0784
0785 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0786 _zzq_default, _zzq_request, \
0787 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0788 \
0789 __extension__ \
0790 ({volatile unsigned long int _zzq_args[6]; \
0791 volatile unsigned long int _zzq_result; \
0792 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0793 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0794 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0795 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0796 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0797 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0798 __asm__ volatile("mov x3, %1\n\t" \
0799 "mov x4, %2\n\t" \
0800 __SPECIAL_INSTRUCTION_PREAMBLE \
0801 \
0802 "orr x10, x10, x10\n\t" \
0803 "mov %0, x3" \
0804 : "=r" (_zzq_result) \
0805 : "r" ((unsigned long int)(_zzq_default)), \
0806 "r" (&_zzq_args[0]) \
0807 : "cc","memory", "x3", "x4"); \
0808 _zzq_result; \
0809 })
0810
0811 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0812 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0813 unsigned long int __addr; \
0814 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0815 \
0816 "orr x11, x11, x11\n\t" \
0817 "mov %0, x3" \
0818 : "=r" (__addr) \
0819 : \
0820 : "cc", "memory", "x3" \
0821 ); \
0822 _zzq_orig->nraddr = __addr; \
0823 }
0824
0825 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
0826 __SPECIAL_INSTRUCTION_PREAMBLE \
0827 \
0828 "orr x12, x12, x12\n\t"
0829
0830 #define VALGRIND_VEX_INJECT_IR() \
0831 do { \
0832 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0833 "orr x9, x9, x9\n\t" \
0834 : : : "cc", "memory" \
0835 ); \
0836 } while (0)
0837
0838 #endif
0839
0840
0841
0842 #if defined(PLAT_s390x_linux)
0843
0844 typedef
0845 struct {
0846 unsigned long int nraddr;
0847 }
0848 OrigFn;
0849
0850
0851
0852
0853
0854 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0855 "lr 15,15\n\t" \
0856 "lr 1,1\n\t" \
0857 "lr 2,2\n\t" \
0858 "lr 3,3\n\t"
0859
0860 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
0861 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
0862 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
0863 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
0864
0865 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0866 _zzq_default, _zzq_request, \
0867 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0868 __extension__ \
0869 ({volatile unsigned long int _zzq_args[6]; \
0870 volatile unsigned long int _zzq_result; \
0871 _zzq_args[0] = (unsigned long int)(_zzq_request); \
0872 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
0873 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
0874 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
0875 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
0876 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
0877 __asm__ volatile( \
0878 "lgr 2,%1\n\t" \
0879 \
0880 "lgr 3,%2\n\t" \
0881 __SPECIAL_INSTRUCTION_PREAMBLE \
0882 __CLIENT_REQUEST_CODE \
0883 \
0884 "lgr %0, 3\n\t" \
0885 : "=d" (_zzq_result) \
0886 : "a" (&_zzq_args[0]), \
0887 "0" ((unsigned long int)_zzq_default) \
0888 : "cc", "2", "3", "memory" \
0889 ); \
0890 _zzq_result; \
0891 })
0892
0893 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0894 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0895 volatile unsigned long int __addr; \
0896 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0897 __GET_NR_CONTEXT_CODE \
0898 "lgr %0, 3\n\t" \
0899 : "=a" (__addr) \
0900 : \
0901 : "cc", "3", "memory" \
0902 ); \
0903 _zzq_orig->nraddr = __addr; \
0904 }
0905
0906 #define VALGRIND_CALL_NOREDIR_R1 \
0907 __SPECIAL_INSTRUCTION_PREAMBLE \
0908 __CALL_NO_REDIR_CODE
0909
0910 #define VALGRIND_VEX_INJECT_IR() \
0911 do { \
0912 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0913 __VEX_INJECT_IR_CODE); \
0914 } while (0)
0915
0916 #endif
0917
0918
0919
0920 #if defined(PLAT_mips32_linux)
0921
0922 typedef
0923 struct {
0924 unsigned int nraddr;
0925 }
0926 OrigFn;
0927
0928
0929
0930
0931
0932 #define __SPECIAL_INSTRUCTION_PREAMBLE \
0933 "srl $0, $0, 13\n\t" \
0934 "srl $0, $0, 29\n\t" \
0935 "srl $0, $0, 3\n\t" \
0936 "srl $0, $0, 19\n\t"
0937
0938 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
0939 _zzq_default, _zzq_request, \
0940 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
0941 __extension__ \
0942 ({ volatile unsigned int _zzq_args[6]; \
0943 volatile unsigned int _zzq_result; \
0944 _zzq_args[0] = (unsigned int)(_zzq_request); \
0945 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
0946 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
0947 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
0948 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
0949 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
0950 __asm__ volatile("move $11, %1\n\t" \
0951 "move $12, %2\n\t" \
0952 __SPECIAL_INSTRUCTION_PREAMBLE \
0953 \
0954 "or $13, $13, $13\n\t" \
0955 "move %0, $11\n\t" \
0956 : "=r" (_zzq_result) \
0957 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
0958 : "$11", "$12", "memory"); \
0959 _zzq_result; \
0960 })
0961
0962 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
0963 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
0964 volatile unsigned int __addr; \
0965 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0966 \
0967 "or $14, $14, $14\n\t" \
0968 "move %0, $11" \
0969 : "=r" (__addr) \
0970 : \
0971 : "$11" \
0972 ); \
0973 _zzq_orig->nraddr = __addr; \
0974 }
0975
0976 #define VALGRIND_CALL_NOREDIR_T9 \
0977 __SPECIAL_INSTRUCTION_PREAMBLE \
0978 \
0979 "or $15, $15, $15\n\t"
0980
0981 #define VALGRIND_VEX_INJECT_IR() \
0982 do { \
0983 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
0984 "or $11, $11, $11\n\t" \
0985 ); \
0986 } while (0)
0987
0988
0989 #endif
0990
0991
0992
0993 #if defined(PLAT_mips64_linux)
0994
0995 typedef
0996 struct {
0997 unsigned long nraddr;
0998 }
0999 OrigFn;
1000
1001
1002
1003
1004
1005 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1006 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
1007 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1008
1009 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1010 _zzq_default, _zzq_request, \
1011 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1012 __extension__ \
1013 ({ volatile unsigned long int _zzq_args[6]; \
1014 volatile unsigned long int _zzq_result; \
1015 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1016 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1017 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1018 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1019 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1020 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1021 __asm__ volatile("move $11, %1\n\t" \
1022 "move $12, %2\n\t" \
1023 __SPECIAL_INSTRUCTION_PREAMBLE \
1024 \
1025 "or $13, $13, $13\n\t" \
1026 "move %0, $11\n\t" \
1027 : "=r" (_zzq_result) \
1028 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1029 : "$11", "$12", "memory"); \
1030 _zzq_result; \
1031 })
1032
1033 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1034 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1035 volatile unsigned long int __addr; \
1036 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1037 \
1038 "or $14, $14, $14\n\t" \
1039 "move %0, $11" \
1040 : "=r" (__addr) \
1041 : \
1042 : "$11"); \
1043 _zzq_orig->nraddr = __addr; \
1044 }
1045
1046 #define VALGRIND_CALL_NOREDIR_T9 \
1047 __SPECIAL_INSTRUCTION_PREAMBLE \
1048 \
1049 "or $15, $15, $15\n\t"
1050
1051 #define VALGRIND_VEX_INJECT_IR() \
1052 do { \
1053 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1054 "or $11, $11, $11\n\t" \
1055 ); \
1056 } while (0)
1057
1058 #endif
1059
1060 #if defined(PLAT_nanomips_linux)
1061
1062 typedef
1063 struct {
1064 unsigned int nraddr;
1065 }
1066 OrigFn;
1067
1068
1069
1070
1071
1072
1073
1074 #define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1075 "srl[32] $zero, $zero, 29 \n\t" \
1076 "srl[32] $zero, $zero, 3 \n\t" \
1077 "srl[32] $zero, $zero, 19 \n\t"
1078
1079 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1080 _zzq_default, _zzq_request, \
1081 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1082 __extension__ \
1083 ({ volatile unsigned int _zzq_args[6]; \
1084 volatile unsigned int _zzq_result; \
1085 _zzq_args[0] = (unsigned int)(_zzq_request); \
1086 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1087 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1088 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1089 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1090 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1091 __asm__ volatile("move $a7, %1\n\t" \
1092 "move $t0, %2\n\t" \
1093 __SPECIAL_INSTRUCTION_PREAMBLE \
1094 \
1095 "or[32] $t0, $t0, $t0\n\t" \
1096 "move %0, $a7\n\t" \
1097 : "=r" (_zzq_result) \
1098 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1099 : "$a7", "$t0", "memory"); \
1100 _zzq_result; \
1101 })
1102
1103 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1104 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1105 volatile unsigned long int __addr; \
1106 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1107 \
1108 "or[32] $t1, $t1, $t1\n\t" \
1109 "move %0, $a7" \
1110 : "=r" (__addr) \
1111 : \
1112 : "$a7"); \
1113 _zzq_orig->nraddr = __addr; \
1114 }
1115
1116 #define VALGRIND_CALL_NOREDIR_T9 \
1117 __SPECIAL_INSTRUCTION_PREAMBLE \
1118 \
1119 "or[32] $t2, $t2, $t2\n\t"
1120
1121 #define VALGRIND_VEX_INJECT_IR() \
1122 do { \
1123 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1124 "or[32] $t3, $t3, $t3\n\t" \
1125 ); \
1126 } while (0)
1127
1128 #endif
1129
1130
1131 #endif
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1166
1167 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1168 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1169
1170 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1171 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1172
1173
1174
1175
1176
1177 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1178
1179
1180
1181
1182
1183
1184
1185 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1186 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1187
1188 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1189 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1190
1191
1192
1193
1194 #define CALL_FN_v_v(fnptr) \
1195 do { volatile unsigned long _junk; \
1196 CALL_FN_W_v(_junk,fnptr); } while (0)
1197
1198 #define CALL_FN_v_W(fnptr, arg1) \
1199 do { volatile unsigned long _junk; \
1200 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1201
1202 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1203 do { volatile unsigned long _junk; \
1204 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1205
1206 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1207 do { volatile unsigned long _junk; \
1208 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1209
1210 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1211 do { volatile unsigned long _junk; \
1212 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1213
1214 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1215 do { volatile unsigned long _junk; \
1216 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1217
1218 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1219 do { volatile unsigned long _junk; \
1220 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1221
1222 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1223 do { volatile unsigned long _junk; \
1224 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1225
1226
1227
1228 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1229 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
1230
1231
1232
1233 #define __CALLER_SAVED_REGS "ecx", "edx"
1234
1235
1236
1237
1238
1239
1240 #define VALGRIND_ALIGN_STACK \
1241 "movl %%esp,%%edi\n\t" \
1242 "andl $0xfffffff0,%%esp\n\t"
1243 #define VALGRIND_RESTORE_STACK \
1244 "movl %%edi,%%esp\n\t"
1245
1246
1247
1248
1249 #define CALL_FN_W_v(lval, orig) \
1250 do { \
1251 volatile OrigFn _orig = (orig); \
1252 volatile unsigned long _argvec[1]; \
1253 volatile unsigned long _res; \
1254 _argvec[0] = (unsigned long)_orig.nraddr; \
1255 __asm__ volatile( \
1256 VALGRIND_ALIGN_STACK \
1257 "movl (%%eax), %%eax\n\t" \
1258 VALGRIND_CALL_NOREDIR_EAX \
1259 VALGRIND_RESTORE_STACK \
1260 : "=a" (_res) \
1261 : "a" (&_argvec[0]) \
1262 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1263 ); \
1264 lval = (__typeof__(lval)) _res; \
1265 } while (0)
1266
1267 #define CALL_FN_W_W(lval, orig, arg1) \
1268 do { \
1269 volatile OrigFn _orig = (orig); \
1270 volatile unsigned long _argvec[2]; \
1271 volatile unsigned long _res; \
1272 _argvec[0] = (unsigned long)_orig.nraddr; \
1273 _argvec[1] = (unsigned long)(arg1); \
1274 __asm__ volatile( \
1275 VALGRIND_ALIGN_STACK \
1276 "subl $12, %%esp\n\t" \
1277 "pushl 4(%%eax)\n\t" \
1278 "movl (%%eax), %%eax\n\t" \
1279 VALGRIND_CALL_NOREDIR_EAX \
1280 VALGRIND_RESTORE_STACK \
1281 : "=a" (_res) \
1282 : "a" (&_argvec[0]) \
1283 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1284 ); \
1285 lval = (__typeof__(lval)) _res; \
1286 } while (0)
1287
1288 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1289 do { \
1290 volatile OrigFn _orig = (orig); \
1291 volatile unsigned long _argvec[3]; \
1292 volatile unsigned long _res; \
1293 _argvec[0] = (unsigned long)_orig.nraddr; \
1294 _argvec[1] = (unsigned long)(arg1); \
1295 _argvec[2] = (unsigned long)(arg2); \
1296 __asm__ volatile( \
1297 VALGRIND_ALIGN_STACK \
1298 "subl $8, %%esp\n\t" \
1299 "pushl 8(%%eax)\n\t" \
1300 "pushl 4(%%eax)\n\t" \
1301 "movl (%%eax), %%eax\n\t" \
1302 VALGRIND_CALL_NOREDIR_EAX \
1303 VALGRIND_RESTORE_STACK \
1304 : "=a" (_res) \
1305 : "a" (&_argvec[0]) \
1306 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1307 ); \
1308 lval = (__typeof__(lval)) _res; \
1309 } while (0)
1310
1311 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1312 do { \
1313 volatile OrigFn _orig = (orig); \
1314 volatile unsigned long _argvec[4]; \
1315 volatile unsigned long _res; \
1316 _argvec[0] = (unsigned long)_orig.nraddr; \
1317 _argvec[1] = (unsigned long)(arg1); \
1318 _argvec[2] = (unsigned long)(arg2); \
1319 _argvec[3] = (unsigned long)(arg3); \
1320 __asm__ volatile( \
1321 VALGRIND_ALIGN_STACK \
1322 "subl $4, %%esp\n\t" \
1323 "pushl 12(%%eax)\n\t" \
1324 "pushl 8(%%eax)\n\t" \
1325 "pushl 4(%%eax)\n\t" \
1326 "movl (%%eax), %%eax\n\t" \
1327 VALGRIND_CALL_NOREDIR_EAX \
1328 VALGRIND_RESTORE_STACK \
1329 : "=a" (_res) \
1330 : "a" (&_argvec[0]) \
1331 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1332 ); \
1333 lval = (__typeof__(lval)) _res; \
1334 } while (0)
1335
1336 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1337 do { \
1338 volatile OrigFn _orig = (orig); \
1339 volatile unsigned long _argvec[5]; \
1340 volatile unsigned long _res; \
1341 _argvec[0] = (unsigned long)_orig.nraddr; \
1342 _argvec[1] = (unsigned long)(arg1); \
1343 _argvec[2] = (unsigned long)(arg2); \
1344 _argvec[3] = (unsigned long)(arg3); \
1345 _argvec[4] = (unsigned long)(arg4); \
1346 __asm__ volatile( \
1347 VALGRIND_ALIGN_STACK \
1348 "pushl 16(%%eax)\n\t" \
1349 "pushl 12(%%eax)\n\t" \
1350 "pushl 8(%%eax)\n\t" \
1351 "pushl 4(%%eax)\n\t" \
1352 "movl (%%eax), %%eax\n\t" \
1353 VALGRIND_CALL_NOREDIR_EAX \
1354 VALGRIND_RESTORE_STACK \
1355 : "=a" (_res) \
1356 : "a" (&_argvec[0]) \
1357 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1358 ); \
1359 lval = (__typeof__(lval)) _res; \
1360 } while (0)
1361
1362 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1363 do { \
1364 volatile OrigFn _orig = (orig); \
1365 volatile unsigned long _argvec[6]; \
1366 volatile unsigned long _res; \
1367 _argvec[0] = (unsigned long)_orig.nraddr; \
1368 _argvec[1] = (unsigned long)(arg1); \
1369 _argvec[2] = (unsigned long)(arg2); \
1370 _argvec[3] = (unsigned long)(arg3); \
1371 _argvec[4] = (unsigned long)(arg4); \
1372 _argvec[5] = (unsigned long)(arg5); \
1373 __asm__ volatile( \
1374 VALGRIND_ALIGN_STACK \
1375 "subl $12, %%esp\n\t" \
1376 "pushl 20(%%eax)\n\t" \
1377 "pushl 16(%%eax)\n\t" \
1378 "pushl 12(%%eax)\n\t" \
1379 "pushl 8(%%eax)\n\t" \
1380 "pushl 4(%%eax)\n\t" \
1381 "movl (%%eax), %%eax\n\t" \
1382 VALGRIND_CALL_NOREDIR_EAX \
1383 VALGRIND_RESTORE_STACK \
1384 : "=a" (_res) \
1385 : "a" (&_argvec[0]) \
1386 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1387 ); \
1388 lval = (__typeof__(lval)) _res; \
1389 } while (0)
1390
1391 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1392 do { \
1393 volatile OrigFn _orig = (orig); \
1394 volatile unsigned long _argvec[7]; \
1395 volatile unsigned long _res; \
1396 _argvec[0] = (unsigned long)_orig.nraddr; \
1397 _argvec[1] = (unsigned long)(arg1); \
1398 _argvec[2] = (unsigned long)(arg2); \
1399 _argvec[3] = (unsigned long)(arg3); \
1400 _argvec[4] = (unsigned long)(arg4); \
1401 _argvec[5] = (unsigned long)(arg5); \
1402 _argvec[6] = (unsigned long)(arg6); \
1403 __asm__ volatile( \
1404 VALGRIND_ALIGN_STACK \
1405 "subl $8, %%esp\n\t" \
1406 "pushl 24(%%eax)\n\t" \
1407 "pushl 20(%%eax)\n\t" \
1408 "pushl 16(%%eax)\n\t" \
1409 "pushl 12(%%eax)\n\t" \
1410 "pushl 8(%%eax)\n\t" \
1411 "pushl 4(%%eax)\n\t" \
1412 "movl (%%eax), %%eax\n\t" \
1413 VALGRIND_CALL_NOREDIR_EAX \
1414 VALGRIND_RESTORE_STACK \
1415 : "=a" (_res) \
1416 : "a" (&_argvec[0]) \
1417 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1418 ); \
1419 lval = (__typeof__(lval)) _res; \
1420 } while (0)
1421
1422 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1423 arg7) \
1424 do { \
1425 volatile OrigFn _orig = (orig); \
1426 volatile unsigned long _argvec[8]; \
1427 volatile unsigned long _res; \
1428 _argvec[0] = (unsigned long)_orig.nraddr; \
1429 _argvec[1] = (unsigned long)(arg1); \
1430 _argvec[2] = (unsigned long)(arg2); \
1431 _argvec[3] = (unsigned long)(arg3); \
1432 _argvec[4] = (unsigned long)(arg4); \
1433 _argvec[5] = (unsigned long)(arg5); \
1434 _argvec[6] = (unsigned long)(arg6); \
1435 _argvec[7] = (unsigned long)(arg7); \
1436 __asm__ volatile( \
1437 VALGRIND_ALIGN_STACK \
1438 "subl $4, %%esp\n\t" \
1439 "pushl 28(%%eax)\n\t" \
1440 "pushl 24(%%eax)\n\t" \
1441 "pushl 20(%%eax)\n\t" \
1442 "pushl 16(%%eax)\n\t" \
1443 "pushl 12(%%eax)\n\t" \
1444 "pushl 8(%%eax)\n\t" \
1445 "pushl 4(%%eax)\n\t" \
1446 "movl (%%eax), %%eax\n\t" \
1447 VALGRIND_CALL_NOREDIR_EAX \
1448 VALGRIND_RESTORE_STACK \
1449 : "=a" (_res) \
1450 : "a" (&_argvec[0]) \
1451 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1452 ); \
1453 lval = (__typeof__(lval)) _res; \
1454 } while (0)
1455
1456 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1457 arg7,arg8) \
1458 do { \
1459 volatile OrigFn _orig = (orig); \
1460 volatile unsigned long _argvec[9]; \
1461 volatile unsigned long _res; \
1462 _argvec[0] = (unsigned long)_orig.nraddr; \
1463 _argvec[1] = (unsigned long)(arg1); \
1464 _argvec[2] = (unsigned long)(arg2); \
1465 _argvec[3] = (unsigned long)(arg3); \
1466 _argvec[4] = (unsigned long)(arg4); \
1467 _argvec[5] = (unsigned long)(arg5); \
1468 _argvec[6] = (unsigned long)(arg6); \
1469 _argvec[7] = (unsigned long)(arg7); \
1470 _argvec[8] = (unsigned long)(arg8); \
1471 __asm__ volatile( \
1472 VALGRIND_ALIGN_STACK \
1473 "pushl 32(%%eax)\n\t" \
1474 "pushl 28(%%eax)\n\t" \
1475 "pushl 24(%%eax)\n\t" \
1476 "pushl 20(%%eax)\n\t" \
1477 "pushl 16(%%eax)\n\t" \
1478 "pushl 12(%%eax)\n\t" \
1479 "pushl 8(%%eax)\n\t" \
1480 "pushl 4(%%eax)\n\t" \
1481 "movl (%%eax), %%eax\n\t" \
1482 VALGRIND_CALL_NOREDIR_EAX \
1483 VALGRIND_RESTORE_STACK \
1484 : "=a" (_res) \
1485 : "a" (&_argvec[0]) \
1486 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1487 ); \
1488 lval = (__typeof__(lval)) _res; \
1489 } while (0)
1490
1491 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1492 arg7,arg8,arg9) \
1493 do { \
1494 volatile OrigFn _orig = (orig); \
1495 volatile unsigned long _argvec[10]; \
1496 volatile unsigned long _res; \
1497 _argvec[0] = (unsigned long)_orig.nraddr; \
1498 _argvec[1] = (unsigned long)(arg1); \
1499 _argvec[2] = (unsigned long)(arg2); \
1500 _argvec[3] = (unsigned long)(arg3); \
1501 _argvec[4] = (unsigned long)(arg4); \
1502 _argvec[5] = (unsigned long)(arg5); \
1503 _argvec[6] = (unsigned long)(arg6); \
1504 _argvec[7] = (unsigned long)(arg7); \
1505 _argvec[8] = (unsigned long)(arg8); \
1506 _argvec[9] = (unsigned long)(arg9); \
1507 __asm__ volatile( \
1508 VALGRIND_ALIGN_STACK \
1509 "subl $12, %%esp\n\t" \
1510 "pushl 36(%%eax)\n\t" \
1511 "pushl 32(%%eax)\n\t" \
1512 "pushl 28(%%eax)\n\t" \
1513 "pushl 24(%%eax)\n\t" \
1514 "pushl 20(%%eax)\n\t" \
1515 "pushl 16(%%eax)\n\t" \
1516 "pushl 12(%%eax)\n\t" \
1517 "pushl 8(%%eax)\n\t" \
1518 "pushl 4(%%eax)\n\t" \
1519 "movl (%%eax), %%eax\n\t" \
1520 VALGRIND_CALL_NOREDIR_EAX \
1521 VALGRIND_RESTORE_STACK \
1522 : "=a" (_res) \
1523 : "a" (&_argvec[0]) \
1524 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1525 ); \
1526 lval = (__typeof__(lval)) _res; \
1527 } while (0)
1528
1529 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1530 arg7,arg8,arg9,arg10) \
1531 do { \
1532 volatile OrigFn _orig = (orig); \
1533 volatile unsigned long _argvec[11]; \
1534 volatile unsigned long _res; \
1535 _argvec[0] = (unsigned long)_orig.nraddr; \
1536 _argvec[1] = (unsigned long)(arg1); \
1537 _argvec[2] = (unsigned long)(arg2); \
1538 _argvec[3] = (unsigned long)(arg3); \
1539 _argvec[4] = (unsigned long)(arg4); \
1540 _argvec[5] = (unsigned long)(arg5); \
1541 _argvec[6] = (unsigned long)(arg6); \
1542 _argvec[7] = (unsigned long)(arg7); \
1543 _argvec[8] = (unsigned long)(arg8); \
1544 _argvec[9] = (unsigned long)(arg9); \
1545 _argvec[10] = (unsigned long)(arg10); \
1546 __asm__ volatile( \
1547 VALGRIND_ALIGN_STACK \
1548 "subl $8, %%esp\n\t" \
1549 "pushl 40(%%eax)\n\t" \
1550 "pushl 36(%%eax)\n\t" \
1551 "pushl 32(%%eax)\n\t" \
1552 "pushl 28(%%eax)\n\t" \
1553 "pushl 24(%%eax)\n\t" \
1554 "pushl 20(%%eax)\n\t" \
1555 "pushl 16(%%eax)\n\t" \
1556 "pushl 12(%%eax)\n\t" \
1557 "pushl 8(%%eax)\n\t" \
1558 "pushl 4(%%eax)\n\t" \
1559 "movl (%%eax), %%eax\n\t" \
1560 VALGRIND_CALL_NOREDIR_EAX \
1561 VALGRIND_RESTORE_STACK \
1562 : "=a" (_res) \
1563 : "a" (&_argvec[0]) \
1564 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1565 ); \
1566 lval = (__typeof__(lval)) _res; \
1567 } while (0)
1568
1569 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1570 arg6,arg7,arg8,arg9,arg10, \
1571 arg11) \
1572 do { \
1573 volatile OrigFn _orig = (orig); \
1574 volatile unsigned long _argvec[12]; \
1575 volatile unsigned long _res; \
1576 _argvec[0] = (unsigned long)_orig.nraddr; \
1577 _argvec[1] = (unsigned long)(arg1); \
1578 _argvec[2] = (unsigned long)(arg2); \
1579 _argvec[3] = (unsigned long)(arg3); \
1580 _argvec[4] = (unsigned long)(arg4); \
1581 _argvec[5] = (unsigned long)(arg5); \
1582 _argvec[6] = (unsigned long)(arg6); \
1583 _argvec[7] = (unsigned long)(arg7); \
1584 _argvec[8] = (unsigned long)(arg8); \
1585 _argvec[9] = (unsigned long)(arg9); \
1586 _argvec[10] = (unsigned long)(arg10); \
1587 _argvec[11] = (unsigned long)(arg11); \
1588 __asm__ volatile( \
1589 VALGRIND_ALIGN_STACK \
1590 "subl $4, %%esp\n\t" \
1591 "pushl 44(%%eax)\n\t" \
1592 "pushl 40(%%eax)\n\t" \
1593 "pushl 36(%%eax)\n\t" \
1594 "pushl 32(%%eax)\n\t" \
1595 "pushl 28(%%eax)\n\t" \
1596 "pushl 24(%%eax)\n\t" \
1597 "pushl 20(%%eax)\n\t" \
1598 "pushl 16(%%eax)\n\t" \
1599 "pushl 12(%%eax)\n\t" \
1600 "pushl 8(%%eax)\n\t" \
1601 "pushl 4(%%eax)\n\t" \
1602 "movl (%%eax), %%eax\n\t" \
1603 VALGRIND_CALL_NOREDIR_EAX \
1604 VALGRIND_RESTORE_STACK \
1605 : "=a" (_res) \
1606 : "a" (&_argvec[0]) \
1607 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1608 ); \
1609 lval = (__typeof__(lval)) _res; \
1610 } while (0)
1611
1612 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1613 arg6,arg7,arg8,arg9,arg10, \
1614 arg11,arg12) \
1615 do { \
1616 volatile OrigFn _orig = (orig); \
1617 volatile unsigned long _argvec[13]; \
1618 volatile unsigned long _res; \
1619 _argvec[0] = (unsigned long)_orig.nraddr; \
1620 _argvec[1] = (unsigned long)(arg1); \
1621 _argvec[2] = (unsigned long)(arg2); \
1622 _argvec[3] = (unsigned long)(arg3); \
1623 _argvec[4] = (unsigned long)(arg4); \
1624 _argvec[5] = (unsigned long)(arg5); \
1625 _argvec[6] = (unsigned long)(arg6); \
1626 _argvec[7] = (unsigned long)(arg7); \
1627 _argvec[8] = (unsigned long)(arg8); \
1628 _argvec[9] = (unsigned long)(arg9); \
1629 _argvec[10] = (unsigned long)(arg10); \
1630 _argvec[11] = (unsigned long)(arg11); \
1631 _argvec[12] = (unsigned long)(arg12); \
1632 __asm__ volatile( \
1633 VALGRIND_ALIGN_STACK \
1634 "pushl 48(%%eax)\n\t" \
1635 "pushl 44(%%eax)\n\t" \
1636 "pushl 40(%%eax)\n\t" \
1637 "pushl 36(%%eax)\n\t" \
1638 "pushl 32(%%eax)\n\t" \
1639 "pushl 28(%%eax)\n\t" \
1640 "pushl 24(%%eax)\n\t" \
1641 "pushl 20(%%eax)\n\t" \
1642 "pushl 16(%%eax)\n\t" \
1643 "pushl 12(%%eax)\n\t" \
1644 "pushl 8(%%eax)\n\t" \
1645 "pushl 4(%%eax)\n\t" \
1646 "movl (%%eax), %%eax\n\t" \
1647 VALGRIND_CALL_NOREDIR_EAX \
1648 VALGRIND_RESTORE_STACK \
1649 : "=a" (_res) \
1650 : "a" (&_argvec[0]) \
1651 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1652 ); \
1653 lval = (__typeof__(lval)) _res; \
1654 } while (0)
1655
1656 #endif
1657
1658
1659
1660 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1661 || defined(PLAT_amd64_solaris) || defined(PLAT_amd64_freebsd)
1662
1663
1664
1665
1666 #define __CALLER_SAVED_REGS "rcx", "rdx", "rsi", \
1667 "rdi", "r8", "r9", "r10", "r11"
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1724 # define __FRAME_POINTER \
1725 ,"r"(__builtin_dwarf_cfa())
1726 # define VALGRIND_CFI_PROLOGUE \
1727 "movq %%rbp, %%r15\n\t" \
1728 "movq %2, %%rbp\n\t" \
1729 ".cfi_remember_state\n\t" \
1730 ".cfi_def_cfa rbp, 0\n\t"
1731 # define VALGRIND_CFI_EPILOGUE \
1732 "movq %%r15, %%rbp\n\t" \
1733 ".cfi_restore_state\n\t"
1734 #else
1735 # define __FRAME_POINTER
1736 # define VALGRIND_CFI_PROLOGUE
1737 # define VALGRIND_CFI_EPILOGUE
1738 #endif
1739
1740
1741
1742
1743
1744
1745 #define VALGRIND_ALIGN_STACK \
1746 "movq %%rsp,%%r14\n\t" \
1747 "andq $0xfffffffffffffff0,%%rsp\n\t"
1748 #define VALGRIND_RESTORE_STACK \
1749 "movq %%r14,%%rsp\n\t"
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775 #define CALL_FN_W_v(lval, orig) \
1776 do { \
1777 volatile OrigFn _orig = (orig); \
1778 volatile unsigned long _argvec[1]; \
1779 volatile unsigned long _res; \
1780 _argvec[0] = (unsigned long)_orig.nraddr; \
1781 __asm__ volatile( \
1782 VALGRIND_CFI_PROLOGUE \
1783 VALGRIND_ALIGN_STACK \
1784 "subq $128,%%rsp\n\t" \
1785 "movq (%%rax), %%rax\n\t" \
1786 VALGRIND_CALL_NOREDIR_RAX \
1787 VALGRIND_RESTORE_STACK \
1788 VALGRIND_CFI_EPILOGUE \
1789 : "=a" (_res) \
1790 : "a" (&_argvec[0]) __FRAME_POINTER \
1791 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1792 ); \
1793 lval = (__typeof__(lval)) _res; \
1794 } while (0)
1795
1796 #define CALL_FN_W_W(lval, orig, arg1) \
1797 do { \
1798 volatile OrigFn _orig = (orig); \
1799 volatile unsigned long _argvec[2]; \
1800 volatile unsigned long _res; \
1801 _argvec[0] = (unsigned long)_orig.nraddr; \
1802 _argvec[1] = (unsigned long)(arg1); \
1803 __asm__ volatile( \
1804 VALGRIND_CFI_PROLOGUE \
1805 VALGRIND_ALIGN_STACK \
1806 "subq $128,%%rsp\n\t" \
1807 "movq 8(%%rax), %%rdi\n\t" \
1808 "movq (%%rax), %%rax\n\t" \
1809 VALGRIND_CALL_NOREDIR_RAX \
1810 VALGRIND_RESTORE_STACK \
1811 VALGRIND_CFI_EPILOGUE \
1812 : "=a" (_res) \
1813 : "a" (&_argvec[0]) __FRAME_POINTER \
1814 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1815 ); \
1816 lval = (__typeof__(lval)) _res; \
1817 } while (0)
1818
1819 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1820 do { \
1821 volatile OrigFn _orig = (orig); \
1822 volatile unsigned long _argvec[3]; \
1823 volatile unsigned long _res; \
1824 _argvec[0] = (unsigned long)_orig.nraddr; \
1825 _argvec[1] = (unsigned long)(arg1); \
1826 _argvec[2] = (unsigned long)(arg2); \
1827 __asm__ volatile( \
1828 VALGRIND_CFI_PROLOGUE \
1829 VALGRIND_ALIGN_STACK \
1830 "subq $128,%%rsp\n\t" \
1831 "movq 16(%%rax), %%rsi\n\t" \
1832 "movq 8(%%rax), %%rdi\n\t" \
1833 "movq (%%rax), %%rax\n\t" \
1834 VALGRIND_CALL_NOREDIR_RAX \
1835 VALGRIND_RESTORE_STACK \
1836 VALGRIND_CFI_EPILOGUE \
1837 : "=a" (_res) \
1838 : "a" (&_argvec[0]) __FRAME_POINTER \
1839 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1840 ); \
1841 lval = (__typeof__(lval)) _res; \
1842 } while (0)
1843
1844 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1845 do { \
1846 volatile OrigFn _orig = (orig); \
1847 volatile unsigned long _argvec[4]; \
1848 volatile unsigned long _res; \
1849 _argvec[0] = (unsigned long)_orig.nraddr; \
1850 _argvec[1] = (unsigned long)(arg1); \
1851 _argvec[2] = (unsigned long)(arg2); \
1852 _argvec[3] = (unsigned long)(arg3); \
1853 __asm__ volatile( \
1854 VALGRIND_CFI_PROLOGUE \
1855 VALGRIND_ALIGN_STACK \
1856 "subq $128,%%rsp\n\t" \
1857 "movq 24(%%rax), %%rdx\n\t" \
1858 "movq 16(%%rax), %%rsi\n\t" \
1859 "movq 8(%%rax), %%rdi\n\t" \
1860 "movq (%%rax), %%rax\n\t" \
1861 VALGRIND_CALL_NOREDIR_RAX \
1862 VALGRIND_RESTORE_STACK \
1863 VALGRIND_CFI_EPILOGUE \
1864 : "=a" (_res) \
1865 : "a" (&_argvec[0]) __FRAME_POINTER \
1866 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1867 ); \
1868 lval = (__typeof__(lval)) _res; \
1869 } while (0)
1870
1871 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1872 do { \
1873 volatile OrigFn _orig = (orig); \
1874 volatile unsigned long _argvec[5]; \
1875 volatile unsigned long _res; \
1876 _argvec[0] = (unsigned long)_orig.nraddr; \
1877 _argvec[1] = (unsigned long)(arg1); \
1878 _argvec[2] = (unsigned long)(arg2); \
1879 _argvec[3] = (unsigned long)(arg3); \
1880 _argvec[4] = (unsigned long)(arg4); \
1881 __asm__ volatile( \
1882 VALGRIND_CFI_PROLOGUE \
1883 VALGRIND_ALIGN_STACK \
1884 "subq $128,%%rsp\n\t" \
1885 "movq 32(%%rax), %%rcx\n\t" \
1886 "movq 24(%%rax), %%rdx\n\t" \
1887 "movq 16(%%rax), %%rsi\n\t" \
1888 "movq 8(%%rax), %%rdi\n\t" \
1889 "movq (%%rax), %%rax\n\t" \
1890 VALGRIND_CALL_NOREDIR_RAX \
1891 VALGRIND_RESTORE_STACK \
1892 VALGRIND_CFI_EPILOGUE \
1893 : "=a" (_res) \
1894 : "a" (&_argvec[0]) __FRAME_POINTER \
1895 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1896 ); \
1897 lval = (__typeof__(lval)) _res; \
1898 } while (0)
1899
1900 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1901 do { \
1902 volatile OrigFn _orig = (orig); \
1903 volatile unsigned long _argvec[6]; \
1904 volatile unsigned long _res; \
1905 _argvec[0] = (unsigned long)_orig.nraddr; \
1906 _argvec[1] = (unsigned long)(arg1); \
1907 _argvec[2] = (unsigned long)(arg2); \
1908 _argvec[3] = (unsigned long)(arg3); \
1909 _argvec[4] = (unsigned long)(arg4); \
1910 _argvec[5] = (unsigned long)(arg5); \
1911 __asm__ volatile( \
1912 VALGRIND_CFI_PROLOGUE \
1913 VALGRIND_ALIGN_STACK \
1914 "subq $128,%%rsp\n\t" \
1915 "movq 40(%%rax), %%r8\n\t" \
1916 "movq 32(%%rax), %%rcx\n\t" \
1917 "movq 24(%%rax), %%rdx\n\t" \
1918 "movq 16(%%rax), %%rsi\n\t" \
1919 "movq 8(%%rax), %%rdi\n\t" \
1920 "movq (%%rax), %%rax\n\t" \
1921 VALGRIND_CALL_NOREDIR_RAX \
1922 VALGRIND_RESTORE_STACK \
1923 VALGRIND_CFI_EPILOGUE \
1924 : "=a" (_res) \
1925 : "a" (&_argvec[0]) __FRAME_POINTER \
1926 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1927 ); \
1928 lval = (__typeof__(lval)) _res; \
1929 } while (0)
1930
1931 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1932 do { \
1933 volatile OrigFn _orig = (orig); \
1934 volatile unsigned long _argvec[7]; \
1935 volatile unsigned long _res; \
1936 _argvec[0] = (unsigned long)_orig.nraddr; \
1937 _argvec[1] = (unsigned long)(arg1); \
1938 _argvec[2] = (unsigned long)(arg2); \
1939 _argvec[3] = (unsigned long)(arg3); \
1940 _argvec[4] = (unsigned long)(arg4); \
1941 _argvec[5] = (unsigned long)(arg5); \
1942 _argvec[6] = (unsigned long)(arg6); \
1943 __asm__ volatile( \
1944 VALGRIND_CFI_PROLOGUE \
1945 VALGRIND_ALIGN_STACK \
1946 "subq $128,%%rsp\n\t" \
1947 "movq 48(%%rax), %%r9\n\t" \
1948 "movq 40(%%rax), %%r8\n\t" \
1949 "movq 32(%%rax), %%rcx\n\t" \
1950 "movq 24(%%rax), %%rdx\n\t" \
1951 "movq 16(%%rax), %%rsi\n\t" \
1952 "movq 8(%%rax), %%rdi\n\t" \
1953 "movq (%%rax), %%rax\n\t" \
1954 VALGRIND_CALL_NOREDIR_RAX \
1955 VALGRIND_RESTORE_STACK \
1956 VALGRIND_CFI_EPILOGUE \
1957 : "=a" (_res) \
1958 : "a" (&_argvec[0]) __FRAME_POINTER \
1959 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1960 ); \
1961 lval = (__typeof__(lval)) _res; \
1962 } while (0)
1963
1964 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1965 arg7) \
1966 do { \
1967 volatile OrigFn _orig = (orig); \
1968 volatile unsigned long _argvec[8]; \
1969 volatile unsigned long _res; \
1970 _argvec[0] = (unsigned long)_orig.nraddr; \
1971 _argvec[1] = (unsigned long)(arg1); \
1972 _argvec[2] = (unsigned long)(arg2); \
1973 _argvec[3] = (unsigned long)(arg3); \
1974 _argvec[4] = (unsigned long)(arg4); \
1975 _argvec[5] = (unsigned long)(arg5); \
1976 _argvec[6] = (unsigned long)(arg6); \
1977 _argvec[7] = (unsigned long)(arg7); \
1978 __asm__ volatile( \
1979 VALGRIND_CFI_PROLOGUE \
1980 VALGRIND_ALIGN_STACK \
1981 "subq $136,%%rsp\n\t" \
1982 "pushq 56(%%rax)\n\t" \
1983 "movq 48(%%rax), %%r9\n\t" \
1984 "movq 40(%%rax), %%r8\n\t" \
1985 "movq 32(%%rax), %%rcx\n\t" \
1986 "movq 24(%%rax), %%rdx\n\t" \
1987 "movq 16(%%rax), %%rsi\n\t" \
1988 "movq 8(%%rax), %%rdi\n\t" \
1989 "movq (%%rax), %%rax\n\t" \
1990 VALGRIND_CALL_NOREDIR_RAX \
1991 VALGRIND_RESTORE_STACK \
1992 VALGRIND_CFI_EPILOGUE \
1993 : "=a" (_res) \
1994 : "a" (&_argvec[0]) __FRAME_POINTER \
1995 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1996 ); \
1997 lval = (__typeof__(lval)) _res; \
1998 } while (0)
1999
2000 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2001 arg7,arg8) \
2002 do { \
2003 volatile OrigFn _orig = (orig); \
2004 volatile unsigned long _argvec[9]; \
2005 volatile unsigned long _res; \
2006 _argvec[0] = (unsigned long)_orig.nraddr; \
2007 _argvec[1] = (unsigned long)(arg1); \
2008 _argvec[2] = (unsigned long)(arg2); \
2009 _argvec[3] = (unsigned long)(arg3); \
2010 _argvec[4] = (unsigned long)(arg4); \
2011 _argvec[5] = (unsigned long)(arg5); \
2012 _argvec[6] = (unsigned long)(arg6); \
2013 _argvec[7] = (unsigned long)(arg7); \
2014 _argvec[8] = (unsigned long)(arg8); \
2015 __asm__ volatile( \
2016 VALGRIND_CFI_PROLOGUE \
2017 VALGRIND_ALIGN_STACK \
2018 "subq $128,%%rsp\n\t" \
2019 "pushq 64(%%rax)\n\t" \
2020 "pushq 56(%%rax)\n\t" \
2021 "movq 48(%%rax), %%r9\n\t" \
2022 "movq 40(%%rax), %%r8\n\t" \
2023 "movq 32(%%rax), %%rcx\n\t" \
2024 "movq 24(%%rax), %%rdx\n\t" \
2025 "movq 16(%%rax), %%rsi\n\t" \
2026 "movq 8(%%rax), %%rdi\n\t" \
2027 "movq (%%rax), %%rax\n\t" \
2028 VALGRIND_CALL_NOREDIR_RAX \
2029 VALGRIND_RESTORE_STACK \
2030 VALGRIND_CFI_EPILOGUE \
2031 : "=a" (_res) \
2032 : "a" (&_argvec[0]) __FRAME_POINTER \
2033 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2034 ); \
2035 lval = (__typeof__(lval)) _res; \
2036 } while (0)
2037
2038 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2039 arg7,arg8,arg9) \
2040 do { \
2041 volatile OrigFn _orig = (orig); \
2042 volatile unsigned long _argvec[10]; \
2043 volatile unsigned long _res; \
2044 _argvec[0] = (unsigned long)_orig.nraddr; \
2045 _argvec[1] = (unsigned long)(arg1); \
2046 _argvec[2] = (unsigned long)(arg2); \
2047 _argvec[3] = (unsigned long)(arg3); \
2048 _argvec[4] = (unsigned long)(arg4); \
2049 _argvec[5] = (unsigned long)(arg5); \
2050 _argvec[6] = (unsigned long)(arg6); \
2051 _argvec[7] = (unsigned long)(arg7); \
2052 _argvec[8] = (unsigned long)(arg8); \
2053 _argvec[9] = (unsigned long)(arg9); \
2054 __asm__ volatile( \
2055 VALGRIND_CFI_PROLOGUE \
2056 VALGRIND_ALIGN_STACK \
2057 "subq $136,%%rsp\n\t" \
2058 "pushq 72(%%rax)\n\t" \
2059 "pushq 64(%%rax)\n\t" \
2060 "pushq 56(%%rax)\n\t" \
2061 "movq 48(%%rax), %%r9\n\t" \
2062 "movq 40(%%rax), %%r8\n\t" \
2063 "movq 32(%%rax), %%rcx\n\t" \
2064 "movq 24(%%rax), %%rdx\n\t" \
2065 "movq 16(%%rax), %%rsi\n\t" \
2066 "movq 8(%%rax), %%rdi\n\t" \
2067 "movq (%%rax), %%rax\n\t" \
2068 VALGRIND_CALL_NOREDIR_RAX \
2069 VALGRIND_RESTORE_STACK \
2070 VALGRIND_CFI_EPILOGUE \
2071 : "=a" (_res) \
2072 : "a" (&_argvec[0]) __FRAME_POINTER \
2073 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2074 ); \
2075 lval = (__typeof__(lval)) _res; \
2076 } while (0)
2077
2078 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2079 arg7,arg8,arg9,arg10) \
2080 do { \
2081 volatile OrigFn _orig = (orig); \
2082 volatile unsigned long _argvec[11]; \
2083 volatile unsigned long _res; \
2084 _argvec[0] = (unsigned long)_orig.nraddr; \
2085 _argvec[1] = (unsigned long)(arg1); \
2086 _argvec[2] = (unsigned long)(arg2); \
2087 _argvec[3] = (unsigned long)(arg3); \
2088 _argvec[4] = (unsigned long)(arg4); \
2089 _argvec[5] = (unsigned long)(arg5); \
2090 _argvec[6] = (unsigned long)(arg6); \
2091 _argvec[7] = (unsigned long)(arg7); \
2092 _argvec[8] = (unsigned long)(arg8); \
2093 _argvec[9] = (unsigned long)(arg9); \
2094 _argvec[10] = (unsigned long)(arg10); \
2095 __asm__ volatile( \
2096 VALGRIND_CFI_PROLOGUE \
2097 VALGRIND_ALIGN_STACK \
2098 "subq $128,%%rsp\n\t" \
2099 "pushq 80(%%rax)\n\t" \
2100 "pushq 72(%%rax)\n\t" \
2101 "pushq 64(%%rax)\n\t" \
2102 "pushq 56(%%rax)\n\t" \
2103 "movq 48(%%rax), %%r9\n\t" \
2104 "movq 40(%%rax), %%r8\n\t" \
2105 "movq 32(%%rax), %%rcx\n\t" \
2106 "movq 24(%%rax), %%rdx\n\t" \
2107 "movq 16(%%rax), %%rsi\n\t" \
2108 "movq 8(%%rax), %%rdi\n\t" \
2109 "movq (%%rax), %%rax\n\t" \
2110 VALGRIND_CALL_NOREDIR_RAX \
2111 VALGRIND_RESTORE_STACK \
2112 VALGRIND_CFI_EPILOGUE \
2113 : "=a" (_res) \
2114 : "a" (&_argvec[0]) __FRAME_POINTER \
2115 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2116 ); \
2117 lval = (__typeof__(lval)) _res; \
2118 } while (0)
2119
2120 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2121 arg7,arg8,arg9,arg10,arg11) \
2122 do { \
2123 volatile OrigFn _orig = (orig); \
2124 volatile unsigned long _argvec[12]; \
2125 volatile unsigned long _res; \
2126 _argvec[0] = (unsigned long)_orig.nraddr; \
2127 _argvec[1] = (unsigned long)(arg1); \
2128 _argvec[2] = (unsigned long)(arg2); \
2129 _argvec[3] = (unsigned long)(arg3); \
2130 _argvec[4] = (unsigned long)(arg4); \
2131 _argvec[5] = (unsigned long)(arg5); \
2132 _argvec[6] = (unsigned long)(arg6); \
2133 _argvec[7] = (unsigned long)(arg7); \
2134 _argvec[8] = (unsigned long)(arg8); \
2135 _argvec[9] = (unsigned long)(arg9); \
2136 _argvec[10] = (unsigned long)(arg10); \
2137 _argvec[11] = (unsigned long)(arg11); \
2138 __asm__ volatile( \
2139 VALGRIND_CFI_PROLOGUE \
2140 VALGRIND_ALIGN_STACK \
2141 "subq $136,%%rsp\n\t" \
2142 "pushq 88(%%rax)\n\t" \
2143 "pushq 80(%%rax)\n\t" \
2144 "pushq 72(%%rax)\n\t" \
2145 "pushq 64(%%rax)\n\t" \
2146 "pushq 56(%%rax)\n\t" \
2147 "movq 48(%%rax), %%r9\n\t" \
2148 "movq 40(%%rax), %%r8\n\t" \
2149 "movq 32(%%rax), %%rcx\n\t" \
2150 "movq 24(%%rax), %%rdx\n\t" \
2151 "movq 16(%%rax), %%rsi\n\t" \
2152 "movq 8(%%rax), %%rdi\n\t" \
2153 "movq (%%rax), %%rax\n\t" \
2154 VALGRIND_CALL_NOREDIR_RAX \
2155 VALGRIND_RESTORE_STACK \
2156 VALGRIND_CFI_EPILOGUE \
2157 : "=a" (_res) \
2158 : "a" (&_argvec[0]) __FRAME_POINTER \
2159 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2160 ); \
2161 lval = (__typeof__(lval)) _res; \
2162 } while (0)
2163
2164 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2165 arg7,arg8,arg9,arg10,arg11,arg12) \
2166 do { \
2167 volatile OrigFn _orig = (orig); \
2168 volatile unsigned long _argvec[13]; \
2169 volatile unsigned long _res; \
2170 _argvec[0] = (unsigned long)_orig.nraddr; \
2171 _argvec[1] = (unsigned long)(arg1); \
2172 _argvec[2] = (unsigned long)(arg2); \
2173 _argvec[3] = (unsigned long)(arg3); \
2174 _argvec[4] = (unsigned long)(arg4); \
2175 _argvec[5] = (unsigned long)(arg5); \
2176 _argvec[6] = (unsigned long)(arg6); \
2177 _argvec[7] = (unsigned long)(arg7); \
2178 _argvec[8] = (unsigned long)(arg8); \
2179 _argvec[9] = (unsigned long)(arg9); \
2180 _argvec[10] = (unsigned long)(arg10); \
2181 _argvec[11] = (unsigned long)(arg11); \
2182 _argvec[12] = (unsigned long)(arg12); \
2183 __asm__ volatile( \
2184 VALGRIND_CFI_PROLOGUE \
2185 VALGRIND_ALIGN_STACK \
2186 "subq $128,%%rsp\n\t" \
2187 "pushq 96(%%rax)\n\t" \
2188 "pushq 88(%%rax)\n\t" \
2189 "pushq 80(%%rax)\n\t" \
2190 "pushq 72(%%rax)\n\t" \
2191 "pushq 64(%%rax)\n\t" \
2192 "pushq 56(%%rax)\n\t" \
2193 "movq 48(%%rax), %%r9\n\t" \
2194 "movq 40(%%rax), %%r8\n\t" \
2195 "movq 32(%%rax), %%rcx\n\t" \
2196 "movq 24(%%rax), %%rdx\n\t" \
2197 "movq 16(%%rax), %%rsi\n\t" \
2198 "movq 8(%%rax), %%rdi\n\t" \
2199 "movq (%%rax), %%rax\n\t" \
2200 VALGRIND_CALL_NOREDIR_RAX \
2201 VALGRIND_RESTORE_STACK \
2202 VALGRIND_CFI_EPILOGUE \
2203 : "=a" (_res) \
2204 : "a" (&_argvec[0]) __FRAME_POINTER \
2205 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2206 ); \
2207 lval = (__typeof__(lval)) _res; \
2208 } while (0)
2209
2210 #endif
2211
2212
2213
2214 #if defined(PLAT_ppc32_linux)
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240 #define __CALLER_SAVED_REGS \
2241 "lr", "ctr", "xer", \
2242 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2243 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2244 "r11", "r12", "r13"
2245
2246
2247
2248
2249
2250
2251 #define VALGRIND_ALIGN_STACK \
2252 "mr 28,1\n\t" \
2253 "rlwinm 1,1,0,0,27\n\t"
2254 #define VALGRIND_RESTORE_STACK \
2255 "mr 1,28\n\t"
2256
2257
2258
2259
2260 #define CALL_FN_W_v(lval, orig) \
2261 do { \
2262 volatile OrigFn _orig = (orig); \
2263 volatile unsigned long _argvec[1]; \
2264 volatile unsigned long _res; \
2265 _argvec[0] = (unsigned long)_orig.nraddr; \
2266 __asm__ volatile( \
2267 VALGRIND_ALIGN_STACK \
2268 "mr 11,%1\n\t" \
2269 "lwz 11,0(11)\n\t" \
2270 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2271 VALGRIND_RESTORE_STACK \
2272 "mr %0,3" \
2273 : "=r" (_res) \
2274 : "r" (&_argvec[0]) \
2275 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2276 ); \
2277 lval = (__typeof__(lval)) _res; \
2278 } while (0)
2279
2280 #define CALL_FN_W_W(lval, orig, arg1) \
2281 do { \
2282 volatile OrigFn _orig = (orig); \
2283 volatile unsigned long _argvec[2]; \
2284 volatile unsigned long _res; \
2285 _argvec[0] = (unsigned long)_orig.nraddr; \
2286 _argvec[1] = (unsigned long)arg1; \
2287 __asm__ volatile( \
2288 VALGRIND_ALIGN_STACK \
2289 "mr 11,%1\n\t" \
2290 "lwz 3,4(11)\n\t" \
2291 "lwz 11,0(11)\n\t" \
2292 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2293 VALGRIND_RESTORE_STACK \
2294 "mr %0,3" \
2295 : "=r" (_res) \
2296 : "r" (&_argvec[0]) \
2297 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2298 ); \
2299 lval = (__typeof__(lval)) _res; \
2300 } while (0)
2301
2302 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2303 do { \
2304 volatile OrigFn _orig = (orig); \
2305 volatile unsigned long _argvec[3]; \
2306 volatile unsigned long _res; \
2307 _argvec[0] = (unsigned long)_orig.nraddr; \
2308 _argvec[1] = (unsigned long)arg1; \
2309 _argvec[2] = (unsigned long)arg2; \
2310 __asm__ volatile( \
2311 VALGRIND_ALIGN_STACK \
2312 "mr 11,%1\n\t" \
2313 "lwz 3,4(11)\n\t" \
2314 "lwz 4,8(11)\n\t" \
2315 "lwz 11,0(11)\n\t" \
2316 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2317 VALGRIND_RESTORE_STACK \
2318 "mr %0,3" \
2319 : "=r" (_res) \
2320 : "r" (&_argvec[0]) \
2321 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2322 ); \
2323 lval = (__typeof__(lval)) _res; \
2324 } while (0)
2325
2326 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2327 do { \
2328 volatile OrigFn _orig = (orig); \
2329 volatile unsigned long _argvec[4]; \
2330 volatile unsigned long _res; \
2331 _argvec[0] = (unsigned long)_orig.nraddr; \
2332 _argvec[1] = (unsigned long)arg1; \
2333 _argvec[2] = (unsigned long)arg2; \
2334 _argvec[3] = (unsigned long)arg3; \
2335 __asm__ volatile( \
2336 VALGRIND_ALIGN_STACK \
2337 "mr 11,%1\n\t" \
2338 "lwz 3,4(11)\n\t" \
2339 "lwz 4,8(11)\n\t" \
2340 "lwz 5,12(11)\n\t" \
2341 "lwz 11,0(11)\n\t" \
2342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2343 VALGRIND_RESTORE_STACK \
2344 "mr %0,3" \
2345 : "=r" (_res) \
2346 : "r" (&_argvec[0]) \
2347 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2348 ); \
2349 lval = (__typeof__(lval)) _res; \
2350 } while (0)
2351
2352 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2353 do { \
2354 volatile OrigFn _orig = (orig); \
2355 volatile unsigned long _argvec[5]; \
2356 volatile unsigned long _res; \
2357 _argvec[0] = (unsigned long)_orig.nraddr; \
2358 _argvec[1] = (unsigned long)arg1; \
2359 _argvec[2] = (unsigned long)arg2; \
2360 _argvec[3] = (unsigned long)arg3; \
2361 _argvec[4] = (unsigned long)arg4; \
2362 __asm__ volatile( \
2363 VALGRIND_ALIGN_STACK \
2364 "mr 11,%1\n\t" \
2365 "lwz 3,4(11)\n\t" \
2366 "lwz 4,8(11)\n\t" \
2367 "lwz 5,12(11)\n\t" \
2368 "lwz 6,16(11)\n\t" \
2369 "lwz 11,0(11)\n\t" \
2370 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2371 VALGRIND_RESTORE_STACK \
2372 "mr %0,3" \
2373 : "=r" (_res) \
2374 : "r" (&_argvec[0]) \
2375 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2376 ); \
2377 lval = (__typeof__(lval)) _res; \
2378 } while (0)
2379
2380 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2381 do { \
2382 volatile OrigFn _orig = (orig); \
2383 volatile unsigned long _argvec[6]; \
2384 volatile unsigned long _res; \
2385 _argvec[0] = (unsigned long)_orig.nraddr; \
2386 _argvec[1] = (unsigned long)arg1; \
2387 _argvec[2] = (unsigned long)arg2; \
2388 _argvec[3] = (unsigned long)arg3; \
2389 _argvec[4] = (unsigned long)arg4; \
2390 _argvec[5] = (unsigned long)arg5; \
2391 __asm__ volatile( \
2392 VALGRIND_ALIGN_STACK \
2393 "mr 11,%1\n\t" \
2394 "lwz 3,4(11)\n\t" \
2395 "lwz 4,8(11)\n\t" \
2396 "lwz 5,12(11)\n\t" \
2397 "lwz 6,16(11)\n\t" \
2398 "lwz 7,20(11)\n\t" \
2399 "lwz 11,0(11)\n\t" \
2400 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2401 VALGRIND_RESTORE_STACK \
2402 "mr %0,3" \
2403 : "=r" (_res) \
2404 : "r" (&_argvec[0]) \
2405 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2406 ); \
2407 lval = (__typeof__(lval)) _res; \
2408 } while (0)
2409
2410 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2411 do { \
2412 volatile OrigFn _orig = (orig); \
2413 volatile unsigned long _argvec[7]; \
2414 volatile unsigned long _res; \
2415 _argvec[0] = (unsigned long)_orig.nraddr; \
2416 _argvec[1] = (unsigned long)arg1; \
2417 _argvec[2] = (unsigned long)arg2; \
2418 _argvec[3] = (unsigned long)arg3; \
2419 _argvec[4] = (unsigned long)arg4; \
2420 _argvec[5] = (unsigned long)arg5; \
2421 _argvec[6] = (unsigned long)arg6; \
2422 __asm__ volatile( \
2423 VALGRIND_ALIGN_STACK \
2424 "mr 11,%1\n\t" \
2425 "lwz 3,4(11)\n\t" \
2426 "lwz 4,8(11)\n\t" \
2427 "lwz 5,12(11)\n\t" \
2428 "lwz 6,16(11)\n\t" \
2429 "lwz 7,20(11)\n\t" \
2430 "lwz 8,24(11)\n\t" \
2431 "lwz 11,0(11)\n\t" \
2432 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2433 VALGRIND_RESTORE_STACK \
2434 "mr %0,3" \
2435 : "=r" (_res) \
2436 : "r" (&_argvec[0]) \
2437 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2438 ); \
2439 lval = (__typeof__(lval)) _res; \
2440 } while (0)
2441
2442 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2443 arg7) \
2444 do { \
2445 volatile OrigFn _orig = (orig); \
2446 volatile unsigned long _argvec[8]; \
2447 volatile unsigned long _res; \
2448 _argvec[0] = (unsigned long)_orig.nraddr; \
2449 _argvec[1] = (unsigned long)arg1; \
2450 _argvec[2] = (unsigned long)arg2; \
2451 _argvec[3] = (unsigned long)arg3; \
2452 _argvec[4] = (unsigned long)arg4; \
2453 _argvec[5] = (unsigned long)arg5; \
2454 _argvec[6] = (unsigned long)arg6; \
2455 _argvec[7] = (unsigned long)arg7; \
2456 __asm__ volatile( \
2457 VALGRIND_ALIGN_STACK \
2458 "mr 11,%1\n\t" \
2459 "lwz 3,4(11)\n\t" \
2460 "lwz 4,8(11)\n\t" \
2461 "lwz 5,12(11)\n\t" \
2462 "lwz 6,16(11)\n\t" \
2463 "lwz 7,20(11)\n\t" \
2464 "lwz 8,24(11)\n\t" \
2465 "lwz 9,28(11)\n\t" \
2466 "lwz 11,0(11)\n\t" \
2467 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2468 VALGRIND_RESTORE_STACK \
2469 "mr %0,3" \
2470 : "=r" (_res) \
2471 : "r" (&_argvec[0]) \
2472 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2473 ); \
2474 lval = (__typeof__(lval)) _res; \
2475 } while (0)
2476
2477 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2478 arg7,arg8) \
2479 do { \
2480 volatile OrigFn _orig = (orig); \
2481 volatile unsigned long _argvec[9]; \
2482 volatile unsigned long _res; \
2483 _argvec[0] = (unsigned long)_orig.nraddr; \
2484 _argvec[1] = (unsigned long)arg1; \
2485 _argvec[2] = (unsigned long)arg2; \
2486 _argvec[3] = (unsigned long)arg3; \
2487 _argvec[4] = (unsigned long)arg4; \
2488 _argvec[5] = (unsigned long)arg5; \
2489 _argvec[6] = (unsigned long)arg6; \
2490 _argvec[7] = (unsigned long)arg7; \
2491 _argvec[8] = (unsigned long)arg8; \
2492 __asm__ volatile( \
2493 VALGRIND_ALIGN_STACK \
2494 "mr 11,%1\n\t" \
2495 "lwz 3,4(11)\n\t" \
2496 "lwz 4,8(11)\n\t" \
2497 "lwz 5,12(11)\n\t" \
2498 "lwz 6,16(11)\n\t" \
2499 "lwz 7,20(11)\n\t" \
2500 "lwz 8,24(11)\n\t" \
2501 "lwz 9,28(11)\n\t" \
2502 "lwz 10,32(11)\n\t" \
2503 "lwz 11,0(11)\n\t" \
2504 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2505 VALGRIND_RESTORE_STACK \
2506 "mr %0,3" \
2507 : "=r" (_res) \
2508 : "r" (&_argvec[0]) \
2509 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2510 ); \
2511 lval = (__typeof__(lval)) _res; \
2512 } while (0)
2513
2514 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2515 arg7,arg8,arg9) \
2516 do { \
2517 volatile OrigFn _orig = (orig); \
2518 volatile unsigned long _argvec[10]; \
2519 volatile unsigned long _res; \
2520 _argvec[0] = (unsigned long)_orig.nraddr; \
2521 _argvec[1] = (unsigned long)arg1; \
2522 _argvec[2] = (unsigned long)arg2; \
2523 _argvec[3] = (unsigned long)arg3; \
2524 _argvec[4] = (unsigned long)arg4; \
2525 _argvec[5] = (unsigned long)arg5; \
2526 _argvec[6] = (unsigned long)arg6; \
2527 _argvec[7] = (unsigned long)arg7; \
2528 _argvec[8] = (unsigned long)arg8; \
2529 _argvec[9] = (unsigned long)arg9; \
2530 __asm__ volatile( \
2531 VALGRIND_ALIGN_STACK \
2532 "mr 11,%1\n\t" \
2533 "addi 1,1,-16\n\t" \
2534 \
2535 "lwz 3,36(11)\n\t" \
2536 "stw 3,8(1)\n\t" \
2537 \
2538 "lwz 3,4(11)\n\t" \
2539 "lwz 4,8(11)\n\t" \
2540 "lwz 5,12(11)\n\t" \
2541 "lwz 6,16(11)\n\t" \
2542 "lwz 7,20(11)\n\t" \
2543 "lwz 8,24(11)\n\t" \
2544 "lwz 9,28(11)\n\t" \
2545 "lwz 10,32(11)\n\t" \
2546 "lwz 11,0(11)\n\t" \
2547 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2548 VALGRIND_RESTORE_STACK \
2549 "mr %0,3" \
2550 : "=r" (_res) \
2551 : "r" (&_argvec[0]) \
2552 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2553 ); \
2554 lval = (__typeof__(lval)) _res; \
2555 } while (0)
2556
2557 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2558 arg7,arg8,arg9,arg10) \
2559 do { \
2560 volatile OrigFn _orig = (orig); \
2561 volatile unsigned long _argvec[11]; \
2562 volatile unsigned long _res; \
2563 _argvec[0] = (unsigned long)_orig.nraddr; \
2564 _argvec[1] = (unsigned long)arg1; \
2565 _argvec[2] = (unsigned long)arg2; \
2566 _argvec[3] = (unsigned long)arg3; \
2567 _argvec[4] = (unsigned long)arg4; \
2568 _argvec[5] = (unsigned long)arg5; \
2569 _argvec[6] = (unsigned long)arg6; \
2570 _argvec[7] = (unsigned long)arg7; \
2571 _argvec[8] = (unsigned long)arg8; \
2572 _argvec[9] = (unsigned long)arg9; \
2573 _argvec[10] = (unsigned long)arg10; \
2574 __asm__ volatile( \
2575 VALGRIND_ALIGN_STACK \
2576 "mr 11,%1\n\t" \
2577 "addi 1,1,-16\n\t" \
2578 \
2579 "lwz 3,40(11)\n\t" \
2580 "stw 3,12(1)\n\t" \
2581 \
2582 "lwz 3,36(11)\n\t" \
2583 "stw 3,8(1)\n\t" \
2584 \
2585 "lwz 3,4(11)\n\t" \
2586 "lwz 4,8(11)\n\t" \
2587 "lwz 5,12(11)\n\t" \
2588 "lwz 6,16(11)\n\t" \
2589 "lwz 7,20(11)\n\t" \
2590 "lwz 8,24(11)\n\t" \
2591 "lwz 9,28(11)\n\t" \
2592 "lwz 10,32(11)\n\t" \
2593 "lwz 11,0(11)\n\t" \
2594 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2595 VALGRIND_RESTORE_STACK \
2596 "mr %0,3" \
2597 : "=r" (_res) \
2598 : "r" (&_argvec[0]) \
2599 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2600 ); \
2601 lval = (__typeof__(lval)) _res; \
2602 } while (0)
2603
2604 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2605 arg7,arg8,arg9,arg10,arg11) \
2606 do { \
2607 volatile OrigFn _orig = (orig); \
2608 volatile unsigned long _argvec[12]; \
2609 volatile unsigned long _res; \
2610 _argvec[0] = (unsigned long)_orig.nraddr; \
2611 _argvec[1] = (unsigned long)arg1; \
2612 _argvec[2] = (unsigned long)arg2; \
2613 _argvec[3] = (unsigned long)arg3; \
2614 _argvec[4] = (unsigned long)arg4; \
2615 _argvec[5] = (unsigned long)arg5; \
2616 _argvec[6] = (unsigned long)arg6; \
2617 _argvec[7] = (unsigned long)arg7; \
2618 _argvec[8] = (unsigned long)arg8; \
2619 _argvec[9] = (unsigned long)arg9; \
2620 _argvec[10] = (unsigned long)arg10; \
2621 _argvec[11] = (unsigned long)arg11; \
2622 __asm__ volatile( \
2623 VALGRIND_ALIGN_STACK \
2624 "mr 11,%1\n\t" \
2625 "addi 1,1,-32\n\t" \
2626 \
2627 "lwz 3,44(11)\n\t" \
2628 "stw 3,16(1)\n\t" \
2629 \
2630 "lwz 3,40(11)\n\t" \
2631 "stw 3,12(1)\n\t" \
2632 \
2633 "lwz 3,36(11)\n\t" \
2634 "stw 3,8(1)\n\t" \
2635 \
2636 "lwz 3,4(11)\n\t" \
2637 "lwz 4,8(11)\n\t" \
2638 "lwz 5,12(11)\n\t" \
2639 "lwz 6,16(11)\n\t" \
2640 "lwz 7,20(11)\n\t" \
2641 "lwz 8,24(11)\n\t" \
2642 "lwz 9,28(11)\n\t" \
2643 "lwz 10,32(11)\n\t" \
2644 "lwz 11,0(11)\n\t" \
2645 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2646 VALGRIND_RESTORE_STACK \
2647 "mr %0,3" \
2648 : "=r" (_res) \
2649 : "r" (&_argvec[0]) \
2650 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2651 ); \
2652 lval = (__typeof__(lval)) _res; \
2653 } while (0)
2654
2655 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2656 arg7,arg8,arg9,arg10,arg11,arg12) \
2657 do { \
2658 volatile OrigFn _orig = (orig); \
2659 volatile unsigned long _argvec[13]; \
2660 volatile unsigned long _res; \
2661 _argvec[0] = (unsigned long)_orig.nraddr; \
2662 _argvec[1] = (unsigned long)arg1; \
2663 _argvec[2] = (unsigned long)arg2; \
2664 _argvec[3] = (unsigned long)arg3; \
2665 _argvec[4] = (unsigned long)arg4; \
2666 _argvec[5] = (unsigned long)arg5; \
2667 _argvec[6] = (unsigned long)arg6; \
2668 _argvec[7] = (unsigned long)arg7; \
2669 _argvec[8] = (unsigned long)arg8; \
2670 _argvec[9] = (unsigned long)arg9; \
2671 _argvec[10] = (unsigned long)arg10; \
2672 _argvec[11] = (unsigned long)arg11; \
2673 _argvec[12] = (unsigned long)arg12; \
2674 __asm__ volatile( \
2675 VALGRIND_ALIGN_STACK \
2676 "mr 11,%1\n\t" \
2677 "addi 1,1,-32\n\t" \
2678 \
2679 "lwz 3,48(11)\n\t" \
2680 "stw 3,20(1)\n\t" \
2681 \
2682 "lwz 3,44(11)\n\t" \
2683 "stw 3,16(1)\n\t" \
2684 \
2685 "lwz 3,40(11)\n\t" \
2686 "stw 3,12(1)\n\t" \
2687 \
2688 "lwz 3,36(11)\n\t" \
2689 "stw 3,8(1)\n\t" \
2690 \
2691 "lwz 3,4(11)\n\t" \
2692 "lwz 4,8(11)\n\t" \
2693 "lwz 5,12(11)\n\t" \
2694 "lwz 6,16(11)\n\t" \
2695 "lwz 7,20(11)\n\t" \
2696 "lwz 8,24(11)\n\t" \
2697 "lwz 9,28(11)\n\t" \
2698 "lwz 10,32(11)\n\t" \
2699 "lwz 11,0(11)\n\t" \
2700 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2701 VALGRIND_RESTORE_STACK \
2702 "mr %0,3" \
2703 : "=r" (_res) \
2704 : "r" (&_argvec[0]) \
2705 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2706 ); \
2707 lval = (__typeof__(lval)) _res; \
2708 } while (0)
2709
2710 #endif
2711
2712
2713
2714 #if defined(PLAT_ppc64be_linux)
2715
2716
2717
2718
2719 #define __CALLER_SAVED_REGS \
2720 "lr", "ctr", "xer", \
2721 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2722 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2723 "r11", "r12", "r13"
2724
2725
2726
2727
2728
2729
2730 #define VALGRIND_ALIGN_STACK \
2731 "mr 28,1\n\t" \
2732 "rldicr 1,1,0,59\n\t"
2733 #define VALGRIND_RESTORE_STACK \
2734 "mr 1,28\n\t"
2735
2736
2737
2738
2739 #define CALL_FN_W_v(lval, orig) \
2740 do { \
2741 volatile OrigFn _orig = (orig); \
2742 volatile unsigned long _argvec[3+0]; \
2743 volatile unsigned long _res; \
2744 \
2745 _argvec[1] = (unsigned long)_orig.r2; \
2746 _argvec[2] = (unsigned long)_orig.nraddr; \
2747 __asm__ volatile( \
2748 VALGRIND_ALIGN_STACK \
2749 "mr 11,%1\n\t" \
2750 "std 2,-16(11)\n\t" \
2751 "ld 2,-8(11)\n\t" \
2752 "ld 11, 0(11)\n\t" \
2753 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2754 "mr 11,%1\n\t" \
2755 "mr %0,3\n\t" \
2756 "ld 2,-16(11)\n\t" \
2757 VALGRIND_RESTORE_STACK \
2758 : "=r" (_res) \
2759 : "r" (&_argvec[2]) \
2760 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2761 ); \
2762 lval = (__typeof__(lval)) _res; \
2763 } while (0)
2764
2765 #define CALL_FN_W_W(lval, orig, arg1) \
2766 do { \
2767 volatile OrigFn _orig = (orig); \
2768 volatile unsigned long _argvec[3+1]; \
2769 volatile unsigned long _res; \
2770 \
2771 _argvec[1] = (unsigned long)_orig.r2; \
2772 _argvec[2] = (unsigned long)_orig.nraddr; \
2773 _argvec[2+1] = (unsigned long)arg1; \
2774 __asm__ volatile( \
2775 VALGRIND_ALIGN_STACK \
2776 "mr 11,%1\n\t" \
2777 "std 2,-16(11)\n\t" \
2778 "ld 2,-8(11)\n\t" \
2779 "ld 3, 8(11)\n\t" \
2780 "ld 11, 0(11)\n\t" \
2781 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2782 "mr 11,%1\n\t" \
2783 "mr %0,3\n\t" \
2784 "ld 2,-16(11)\n\t" \
2785 VALGRIND_RESTORE_STACK \
2786 : "=r" (_res) \
2787 : "r" (&_argvec[2]) \
2788 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2789 ); \
2790 lval = (__typeof__(lval)) _res; \
2791 } while (0)
2792
2793 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2794 do { \
2795 volatile OrigFn _orig = (orig); \
2796 volatile unsigned long _argvec[3+2]; \
2797 volatile unsigned long _res; \
2798 \
2799 _argvec[1] = (unsigned long)_orig.r2; \
2800 _argvec[2] = (unsigned long)_orig.nraddr; \
2801 _argvec[2+1] = (unsigned long)arg1; \
2802 _argvec[2+2] = (unsigned long)arg2; \
2803 __asm__ volatile( \
2804 VALGRIND_ALIGN_STACK \
2805 "mr 11,%1\n\t" \
2806 "std 2,-16(11)\n\t" \
2807 "ld 2,-8(11)\n\t" \
2808 "ld 3, 8(11)\n\t" \
2809 "ld 4, 16(11)\n\t" \
2810 "ld 11, 0(11)\n\t" \
2811 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2812 "mr 11,%1\n\t" \
2813 "mr %0,3\n\t" \
2814 "ld 2,-16(11)\n\t" \
2815 VALGRIND_RESTORE_STACK \
2816 : "=r" (_res) \
2817 : "r" (&_argvec[2]) \
2818 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2819 ); \
2820 lval = (__typeof__(lval)) _res; \
2821 } while (0)
2822
2823 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2824 do { \
2825 volatile OrigFn _orig = (orig); \
2826 volatile unsigned long _argvec[3+3]; \
2827 volatile unsigned long _res; \
2828 \
2829 _argvec[1] = (unsigned long)_orig.r2; \
2830 _argvec[2] = (unsigned long)_orig.nraddr; \
2831 _argvec[2+1] = (unsigned long)arg1; \
2832 _argvec[2+2] = (unsigned long)arg2; \
2833 _argvec[2+3] = (unsigned long)arg3; \
2834 __asm__ volatile( \
2835 VALGRIND_ALIGN_STACK \
2836 "mr 11,%1\n\t" \
2837 "std 2,-16(11)\n\t" \
2838 "ld 2,-8(11)\n\t" \
2839 "ld 3, 8(11)\n\t" \
2840 "ld 4, 16(11)\n\t" \
2841 "ld 5, 24(11)\n\t" \
2842 "ld 11, 0(11)\n\t" \
2843 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2844 "mr 11,%1\n\t" \
2845 "mr %0,3\n\t" \
2846 "ld 2,-16(11)\n\t" \
2847 VALGRIND_RESTORE_STACK \
2848 : "=r" (_res) \
2849 : "r" (&_argvec[2]) \
2850 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2851 ); \
2852 lval = (__typeof__(lval)) _res; \
2853 } while (0)
2854
2855 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2856 do { \
2857 volatile OrigFn _orig = (orig); \
2858 volatile unsigned long _argvec[3+4]; \
2859 volatile unsigned long _res; \
2860 \
2861 _argvec[1] = (unsigned long)_orig.r2; \
2862 _argvec[2] = (unsigned long)_orig.nraddr; \
2863 _argvec[2+1] = (unsigned long)arg1; \
2864 _argvec[2+2] = (unsigned long)arg2; \
2865 _argvec[2+3] = (unsigned long)arg3; \
2866 _argvec[2+4] = (unsigned long)arg4; \
2867 __asm__ volatile( \
2868 VALGRIND_ALIGN_STACK \
2869 "mr 11,%1\n\t" \
2870 "std 2,-16(11)\n\t" \
2871 "ld 2,-8(11)\n\t" \
2872 "ld 3, 8(11)\n\t" \
2873 "ld 4, 16(11)\n\t" \
2874 "ld 5, 24(11)\n\t" \
2875 "ld 6, 32(11)\n\t" \
2876 "ld 11, 0(11)\n\t" \
2877 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2878 "mr 11,%1\n\t" \
2879 "mr %0,3\n\t" \
2880 "ld 2,-16(11)\n\t" \
2881 VALGRIND_RESTORE_STACK \
2882 : "=r" (_res) \
2883 : "r" (&_argvec[2]) \
2884 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2885 ); \
2886 lval = (__typeof__(lval)) _res; \
2887 } while (0)
2888
2889 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2890 do { \
2891 volatile OrigFn _orig = (orig); \
2892 volatile unsigned long _argvec[3+5]; \
2893 volatile unsigned long _res; \
2894 \
2895 _argvec[1] = (unsigned long)_orig.r2; \
2896 _argvec[2] = (unsigned long)_orig.nraddr; \
2897 _argvec[2+1] = (unsigned long)arg1; \
2898 _argvec[2+2] = (unsigned long)arg2; \
2899 _argvec[2+3] = (unsigned long)arg3; \
2900 _argvec[2+4] = (unsigned long)arg4; \
2901 _argvec[2+5] = (unsigned long)arg5; \
2902 __asm__ volatile( \
2903 VALGRIND_ALIGN_STACK \
2904 "mr 11,%1\n\t" \
2905 "std 2,-16(11)\n\t" \
2906 "ld 2,-8(11)\n\t" \
2907 "ld 3, 8(11)\n\t" \
2908 "ld 4, 16(11)\n\t" \
2909 "ld 5, 24(11)\n\t" \
2910 "ld 6, 32(11)\n\t" \
2911 "ld 7, 40(11)\n\t" \
2912 "ld 11, 0(11)\n\t" \
2913 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2914 "mr 11,%1\n\t" \
2915 "mr %0,3\n\t" \
2916 "ld 2,-16(11)\n\t" \
2917 VALGRIND_RESTORE_STACK \
2918 : "=r" (_res) \
2919 : "r" (&_argvec[2]) \
2920 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2921 ); \
2922 lval = (__typeof__(lval)) _res; \
2923 } while (0)
2924
2925 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2926 do { \
2927 volatile OrigFn _orig = (orig); \
2928 volatile unsigned long _argvec[3+6]; \
2929 volatile unsigned long _res; \
2930 \
2931 _argvec[1] = (unsigned long)_orig.r2; \
2932 _argvec[2] = (unsigned long)_orig.nraddr; \
2933 _argvec[2+1] = (unsigned long)arg1; \
2934 _argvec[2+2] = (unsigned long)arg2; \
2935 _argvec[2+3] = (unsigned long)arg3; \
2936 _argvec[2+4] = (unsigned long)arg4; \
2937 _argvec[2+5] = (unsigned long)arg5; \
2938 _argvec[2+6] = (unsigned long)arg6; \
2939 __asm__ volatile( \
2940 VALGRIND_ALIGN_STACK \
2941 "mr 11,%1\n\t" \
2942 "std 2,-16(11)\n\t" \
2943 "ld 2,-8(11)\n\t" \
2944 "ld 3, 8(11)\n\t" \
2945 "ld 4, 16(11)\n\t" \
2946 "ld 5, 24(11)\n\t" \
2947 "ld 6, 32(11)\n\t" \
2948 "ld 7, 40(11)\n\t" \
2949 "ld 8, 48(11)\n\t" \
2950 "ld 11, 0(11)\n\t" \
2951 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2952 "mr 11,%1\n\t" \
2953 "mr %0,3\n\t" \
2954 "ld 2,-16(11)\n\t" \
2955 VALGRIND_RESTORE_STACK \
2956 : "=r" (_res) \
2957 : "r" (&_argvec[2]) \
2958 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2959 ); \
2960 lval = (__typeof__(lval)) _res; \
2961 } while (0)
2962
2963 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2964 arg7) \
2965 do { \
2966 volatile OrigFn _orig = (orig); \
2967 volatile unsigned long _argvec[3+7]; \
2968 volatile unsigned long _res; \
2969 \
2970 _argvec[1] = (unsigned long)_orig.r2; \
2971 _argvec[2] = (unsigned long)_orig.nraddr; \
2972 _argvec[2+1] = (unsigned long)arg1; \
2973 _argvec[2+2] = (unsigned long)arg2; \
2974 _argvec[2+3] = (unsigned long)arg3; \
2975 _argvec[2+4] = (unsigned long)arg4; \
2976 _argvec[2+5] = (unsigned long)arg5; \
2977 _argvec[2+6] = (unsigned long)arg6; \
2978 _argvec[2+7] = (unsigned long)arg7; \
2979 __asm__ volatile( \
2980 VALGRIND_ALIGN_STACK \
2981 "mr 11,%1\n\t" \
2982 "std 2,-16(11)\n\t" \
2983 "ld 2,-8(11)\n\t" \
2984 "ld 3, 8(11)\n\t" \
2985 "ld 4, 16(11)\n\t" \
2986 "ld 5, 24(11)\n\t" \
2987 "ld 6, 32(11)\n\t" \
2988 "ld 7, 40(11)\n\t" \
2989 "ld 8, 48(11)\n\t" \
2990 "ld 9, 56(11)\n\t" \
2991 "ld 11, 0(11)\n\t" \
2992 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2993 "mr 11,%1\n\t" \
2994 "mr %0,3\n\t" \
2995 "ld 2,-16(11)\n\t" \
2996 VALGRIND_RESTORE_STACK \
2997 : "=r" (_res) \
2998 : "r" (&_argvec[2]) \
2999 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3000 ); \
3001 lval = (__typeof__(lval)) _res; \
3002 } while (0)
3003
3004 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3005 arg7,arg8) \
3006 do { \
3007 volatile OrigFn _orig = (orig); \
3008 volatile unsigned long _argvec[3+8]; \
3009 volatile unsigned long _res; \
3010 \
3011 _argvec[1] = (unsigned long)_orig.r2; \
3012 _argvec[2] = (unsigned long)_orig.nraddr; \
3013 _argvec[2+1] = (unsigned long)arg1; \
3014 _argvec[2+2] = (unsigned long)arg2; \
3015 _argvec[2+3] = (unsigned long)arg3; \
3016 _argvec[2+4] = (unsigned long)arg4; \
3017 _argvec[2+5] = (unsigned long)arg5; \
3018 _argvec[2+6] = (unsigned long)arg6; \
3019 _argvec[2+7] = (unsigned long)arg7; \
3020 _argvec[2+8] = (unsigned long)arg8; \
3021 __asm__ volatile( \
3022 VALGRIND_ALIGN_STACK \
3023 "mr 11,%1\n\t" \
3024 "std 2,-16(11)\n\t" \
3025 "ld 2,-8(11)\n\t" \
3026 "ld 3, 8(11)\n\t" \
3027 "ld 4, 16(11)\n\t" \
3028 "ld 5, 24(11)\n\t" \
3029 "ld 6, 32(11)\n\t" \
3030 "ld 7, 40(11)\n\t" \
3031 "ld 8, 48(11)\n\t" \
3032 "ld 9, 56(11)\n\t" \
3033 "ld 10, 64(11)\n\t" \
3034 "ld 11, 0(11)\n\t" \
3035 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3036 "mr 11,%1\n\t" \
3037 "mr %0,3\n\t" \
3038 "ld 2,-16(11)\n\t" \
3039 VALGRIND_RESTORE_STACK \
3040 : "=r" (_res) \
3041 : "r" (&_argvec[2]) \
3042 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3043 ); \
3044 lval = (__typeof__(lval)) _res; \
3045 } while (0)
3046
3047 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3048 arg7,arg8,arg9) \
3049 do { \
3050 volatile OrigFn _orig = (orig); \
3051 volatile unsigned long _argvec[3+9]; \
3052 volatile unsigned long _res; \
3053 \
3054 _argvec[1] = (unsigned long)_orig.r2; \
3055 _argvec[2] = (unsigned long)_orig.nraddr; \
3056 _argvec[2+1] = (unsigned long)arg1; \
3057 _argvec[2+2] = (unsigned long)arg2; \
3058 _argvec[2+3] = (unsigned long)arg3; \
3059 _argvec[2+4] = (unsigned long)arg4; \
3060 _argvec[2+5] = (unsigned long)arg5; \
3061 _argvec[2+6] = (unsigned long)arg6; \
3062 _argvec[2+7] = (unsigned long)arg7; \
3063 _argvec[2+8] = (unsigned long)arg8; \
3064 _argvec[2+9] = (unsigned long)arg9; \
3065 __asm__ volatile( \
3066 VALGRIND_ALIGN_STACK \
3067 "mr 11,%1\n\t" \
3068 "std 2,-16(11)\n\t" \
3069 "ld 2,-8(11)\n\t" \
3070 "addi 1,1,-128\n\t" \
3071 \
3072 "ld 3,72(11)\n\t" \
3073 "std 3,112(1)\n\t" \
3074 \
3075 "ld 3, 8(11)\n\t" \
3076 "ld 4, 16(11)\n\t" \
3077 "ld 5, 24(11)\n\t" \
3078 "ld 6, 32(11)\n\t" \
3079 "ld 7, 40(11)\n\t" \
3080 "ld 8, 48(11)\n\t" \
3081 "ld 9, 56(11)\n\t" \
3082 "ld 10, 64(11)\n\t" \
3083 "ld 11, 0(11)\n\t" \
3084 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3085 "mr 11,%1\n\t" \
3086 "mr %0,3\n\t" \
3087 "ld 2,-16(11)\n\t" \
3088 VALGRIND_RESTORE_STACK \
3089 : "=r" (_res) \
3090 : "r" (&_argvec[2]) \
3091 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3092 ); \
3093 lval = (__typeof__(lval)) _res; \
3094 } while (0)
3095
3096 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3097 arg7,arg8,arg9,arg10) \
3098 do { \
3099 volatile OrigFn _orig = (orig); \
3100 volatile unsigned long _argvec[3+10]; \
3101 volatile unsigned long _res; \
3102 \
3103 _argvec[1] = (unsigned long)_orig.r2; \
3104 _argvec[2] = (unsigned long)_orig.nraddr; \
3105 _argvec[2+1] = (unsigned long)arg1; \
3106 _argvec[2+2] = (unsigned long)arg2; \
3107 _argvec[2+3] = (unsigned long)arg3; \
3108 _argvec[2+4] = (unsigned long)arg4; \
3109 _argvec[2+5] = (unsigned long)arg5; \
3110 _argvec[2+6] = (unsigned long)arg6; \
3111 _argvec[2+7] = (unsigned long)arg7; \
3112 _argvec[2+8] = (unsigned long)arg8; \
3113 _argvec[2+9] = (unsigned long)arg9; \
3114 _argvec[2+10] = (unsigned long)arg10; \
3115 __asm__ volatile( \
3116 VALGRIND_ALIGN_STACK \
3117 "mr 11,%1\n\t" \
3118 "std 2,-16(11)\n\t" \
3119 "ld 2,-8(11)\n\t" \
3120 "addi 1,1,-128\n\t" \
3121 \
3122 "ld 3,80(11)\n\t" \
3123 "std 3,120(1)\n\t" \
3124 \
3125 "ld 3,72(11)\n\t" \
3126 "std 3,112(1)\n\t" \
3127 \
3128 "ld 3, 8(11)\n\t" \
3129 "ld 4, 16(11)\n\t" \
3130 "ld 5, 24(11)\n\t" \
3131 "ld 6, 32(11)\n\t" \
3132 "ld 7, 40(11)\n\t" \
3133 "ld 8, 48(11)\n\t" \
3134 "ld 9, 56(11)\n\t" \
3135 "ld 10, 64(11)\n\t" \
3136 "ld 11, 0(11)\n\t" \
3137 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3138 "mr 11,%1\n\t" \
3139 "mr %0,3\n\t" \
3140 "ld 2,-16(11)\n\t" \
3141 VALGRIND_RESTORE_STACK \
3142 : "=r" (_res) \
3143 : "r" (&_argvec[2]) \
3144 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3145 ); \
3146 lval = (__typeof__(lval)) _res; \
3147 } while (0)
3148
3149 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3150 arg7,arg8,arg9,arg10,arg11) \
3151 do { \
3152 volatile OrigFn _orig = (orig); \
3153 volatile unsigned long _argvec[3+11]; \
3154 volatile unsigned long _res; \
3155 \
3156 _argvec[1] = (unsigned long)_orig.r2; \
3157 _argvec[2] = (unsigned long)_orig.nraddr; \
3158 _argvec[2+1] = (unsigned long)arg1; \
3159 _argvec[2+2] = (unsigned long)arg2; \
3160 _argvec[2+3] = (unsigned long)arg3; \
3161 _argvec[2+4] = (unsigned long)arg4; \
3162 _argvec[2+5] = (unsigned long)arg5; \
3163 _argvec[2+6] = (unsigned long)arg6; \
3164 _argvec[2+7] = (unsigned long)arg7; \
3165 _argvec[2+8] = (unsigned long)arg8; \
3166 _argvec[2+9] = (unsigned long)arg9; \
3167 _argvec[2+10] = (unsigned long)arg10; \
3168 _argvec[2+11] = (unsigned long)arg11; \
3169 __asm__ volatile( \
3170 VALGRIND_ALIGN_STACK \
3171 "mr 11,%1\n\t" \
3172 "std 2,-16(11)\n\t" \
3173 "ld 2,-8(11)\n\t" \
3174 "addi 1,1,-144\n\t" \
3175 \
3176 "ld 3,88(11)\n\t" \
3177 "std 3,128(1)\n\t" \
3178 \
3179 "ld 3,80(11)\n\t" \
3180 "std 3,120(1)\n\t" \
3181 \
3182 "ld 3,72(11)\n\t" \
3183 "std 3,112(1)\n\t" \
3184 \
3185 "ld 3, 8(11)\n\t" \
3186 "ld 4, 16(11)\n\t" \
3187 "ld 5, 24(11)\n\t" \
3188 "ld 6, 32(11)\n\t" \
3189 "ld 7, 40(11)\n\t" \
3190 "ld 8, 48(11)\n\t" \
3191 "ld 9, 56(11)\n\t" \
3192 "ld 10, 64(11)\n\t" \
3193 "ld 11, 0(11)\n\t" \
3194 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3195 "mr 11,%1\n\t" \
3196 "mr %0,3\n\t" \
3197 "ld 2,-16(11)\n\t" \
3198 VALGRIND_RESTORE_STACK \
3199 : "=r" (_res) \
3200 : "r" (&_argvec[2]) \
3201 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3202 ); \
3203 lval = (__typeof__(lval)) _res; \
3204 } while (0)
3205
3206 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3207 arg7,arg8,arg9,arg10,arg11,arg12) \
3208 do { \
3209 volatile OrigFn _orig = (orig); \
3210 volatile unsigned long _argvec[3+12]; \
3211 volatile unsigned long _res; \
3212 \
3213 _argvec[1] = (unsigned long)_orig.r2; \
3214 _argvec[2] = (unsigned long)_orig.nraddr; \
3215 _argvec[2+1] = (unsigned long)arg1; \
3216 _argvec[2+2] = (unsigned long)arg2; \
3217 _argvec[2+3] = (unsigned long)arg3; \
3218 _argvec[2+4] = (unsigned long)arg4; \
3219 _argvec[2+5] = (unsigned long)arg5; \
3220 _argvec[2+6] = (unsigned long)arg6; \
3221 _argvec[2+7] = (unsigned long)arg7; \
3222 _argvec[2+8] = (unsigned long)arg8; \
3223 _argvec[2+9] = (unsigned long)arg9; \
3224 _argvec[2+10] = (unsigned long)arg10; \
3225 _argvec[2+11] = (unsigned long)arg11; \
3226 _argvec[2+12] = (unsigned long)arg12; \
3227 __asm__ volatile( \
3228 VALGRIND_ALIGN_STACK \
3229 "mr 11,%1\n\t" \
3230 "std 2,-16(11)\n\t" \
3231 "ld 2,-8(11)\n\t" \
3232 "addi 1,1,-144\n\t" \
3233 \
3234 "ld 3,96(11)\n\t" \
3235 "std 3,136(1)\n\t" \
3236 \
3237 "ld 3,88(11)\n\t" \
3238 "std 3,128(1)\n\t" \
3239 \
3240 "ld 3,80(11)\n\t" \
3241 "std 3,120(1)\n\t" \
3242 \
3243 "ld 3,72(11)\n\t" \
3244 "std 3,112(1)\n\t" \
3245 \
3246 "ld 3, 8(11)\n\t" \
3247 "ld 4, 16(11)\n\t" \
3248 "ld 5, 24(11)\n\t" \
3249 "ld 6, 32(11)\n\t" \
3250 "ld 7, 40(11)\n\t" \
3251 "ld 8, 48(11)\n\t" \
3252 "ld 9, 56(11)\n\t" \
3253 "ld 10, 64(11)\n\t" \
3254 "ld 11, 0(11)\n\t" \
3255 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3256 "mr 11,%1\n\t" \
3257 "mr %0,3\n\t" \
3258 "ld 2,-16(11)\n\t" \
3259 VALGRIND_RESTORE_STACK \
3260 : "=r" (_res) \
3261 : "r" (&_argvec[2]) \
3262 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3263 ); \
3264 lval = (__typeof__(lval)) _res; \
3265 } while (0)
3266
3267 #endif
3268
3269
3270 #if defined(PLAT_ppc64le_linux)
3271
3272
3273
3274
3275 #define __CALLER_SAVED_REGS \
3276 "lr", "ctr", "xer", \
3277 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3278 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3279 "r11", "r12", "r13"
3280
3281
3282
3283
3284
3285
3286 #define VALGRIND_ALIGN_STACK \
3287 "mr 28,1\n\t" \
3288 "rldicr 1,1,0,59\n\t"
3289 #define VALGRIND_RESTORE_STACK \
3290 "mr 1,28\n\t"
3291
3292
3293
3294
3295 #define CALL_FN_W_v(lval, orig) \
3296 do { \
3297 volatile OrigFn _orig = (orig); \
3298 volatile unsigned long _argvec[3+0]; \
3299 volatile unsigned long _res; \
3300 \
3301 _argvec[1] = (unsigned long)_orig.r2; \
3302 _argvec[2] = (unsigned long)_orig.nraddr; \
3303 __asm__ volatile( \
3304 VALGRIND_ALIGN_STACK \
3305 "mr 12,%1\n\t" \
3306 "std 2,-16(12)\n\t" \
3307 "ld 2,-8(12)\n\t" \
3308 "ld 12, 0(12)\n\t" \
3309 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3310 "mr 12,%1\n\t" \
3311 "mr %0,3\n\t" \
3312 "ld 2,-16(12)\n\t" \
3313 VALGRIND_RESTORE_STACK \
3314 : "=r" (_res) \
3315 : "r" (&_argvec[2]) \
3316 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3317 ); \
3318 lval = (__typeof__(lval)) _res; \
3319 } while (0)
3320
3321 #define CALL_FN_W_W(lval, orig, arg1) \
3322 do { \
3323 volatile OrigFn _orig = (orig); \
3324 volatile unsigned long _argvec[3+1]; \
3325 volatile unsigned long _res; \
3326 \
3327 _argvec[1] = (unsigned long)_orig.r2; \
3328 _argvec[2] = (unsigned long)_orig.nraddr; \
3329 _argvec[2+1] = (unsigned long)arg1; \
3330 __asm__ volatile( \
3331 VALGRIND_ALIGN_STACK \
3332 "mr 12,%1\n\t" \
3333 "std 2,-16(12)\n\t" \
3334 "ld 2,-8(12)\n\t" \
3335 "ld 3, 8(12)\n\t" \
3336 "ld 12, 0(12)\n\t" \
3337 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3338 "mr 12,%1\n\t" \
3339 "mr %0,3\n\t" \
3340 "ld 2,-16(12)\n\t" \
3341 VALGRIND_RESTORE_STACK \
3342 : "=r" (_res) \
3343 : "r" (&_argvec[2]) \
3344 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3345 ); \
3346 lval = (__typeof__(lval)) _res; \
3347 } while (0)
3348
3349 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3350 do { \
3351 volatile OrigFn _orig = (orig); \
3352 volatile unsigned long _argvec[3+2]; \
3353 volatile unsigned long _res; \
3354 \
3355 _argvec[1] = (unsigned long)_orig.r2; \
3356 _argvec[2] = (unsigned long)_orig.nraddr; \
3357 _argvec[2+1] = (unsigned long)arg1; \
3358 _argvec[2+2] = (unsigned long)arg2; \
3359 __asm__ volatile( \
3360 VALGRIND_ALIGN_STACK \
3361 "mr 12,%1\n\t" \
3362 "std 2,-16(12)\n\t" \
3363 "ld 2,-8(12)\n\t" \
3364 "ld 3, 8(12)\n\t" \
3365 "ld 4, 16(12)\n\t" \
3366 "ld 12, 0(12)\n\t" \
3367 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3368 "mr 12,%1\n\t" \
3369 "mr %0,3\n\t" \
3370 "ld 2,-16(12)\n\t" \
3371 VALGRIND_RESTORE_STACK \
3372 : "=r" (_res) \
3373 : "r" (&_argvec[2]) \
3374 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3375 ); \
3376 lval = (__typeof__(lval)) _res; \
3377 } while (0)
3378
3379 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3380 do { \
3381 volatile OrigFn _orig = (orig); \
3382 volatile unsigned long _argvec[3+3]; \
3383 volatile unsigned long _res; \
3384 \
3385 _argvec[1] = (unsigned long)_orig.r2; \
3386 _argvec[2] = (unsigned long)_orig.nraddr; \
3387 _argvec[2+1] = (unsigned long)arg1; \
3388 _argvec[2+2] = (unsigned long)arg2; \
3389 _argvec[2+3] = (unsigned long)arg3; \
3390 __asm__ volatile( \
3391 VALGRIND_ALIGN_STACK \
3392 "mr 12,%1\n\t" \
3393 "std 2,-16(12)\n\t" \
3394 "ld 2,-8(12)\n\t" \
3395 "ld 3, 8(12)\n\t" \
3396 "ld 4, 16(12)\n\t" \
3397 "ld 5, 24(12)\n\t" \
3398 "ld 12, 0(12)\n\t" \
3399 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3400 "mr 12,%1\n\t" \
3401 "mr %0,3\n\t" \
3402 "ld 2,-16(12)\n\t" \
3403 VALGRIND_RESTORE_STACK \
3404 : "=r" (_res) \
3405 : "r" (&_argvec[2]) \
3406 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3407 ); \
3408 lval = (__typeof__(lval)) _res; \
3409 } while (0)
3410
3411 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3412 do { \
3413 volatile OrigFn _orig = (orig); \
3414 volatile unsigned long _argvec[3+4]; \
3415 volatile unsigned long _res; \
3416 \
3417 _argvec[1] = (unsigned long)_orig.r2; \
3418 _argvec[2] = (unsigned long)_orig.nraddr; \
3419 _argvec[2+1] = (unsigned long)arg1; \
3420 _argvec[2+2] = (unsigned long)arg2; \
3421 _argvec[2+3] = (unsigned long)arg3; \
3422 _argvec[2+4] = (unsigned long)arg4; \
3423 __asm__ volatile( \
3424 VALGRIND_ALIGN_STACK \
3425 "mr 12,%1\n\t" \
3426 "std 2,-16(12)\n\t" \
3427 "ld 2,-8(12)\n\t" \
3428 "ld 3, 8(12)\n\t" \
3429 "ld 4, 16(12)\n\t" \
3430 "ld 5, 24(12)\n\t" \
3431 "ld 6, 32(12)\n\t" \
3432 "ld 12, 0(12)\n\t" \
3433 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3434 "mr 12,%1\n\t" \
3435 "mr %0,3\n\t" \
3436 "ld 2,-16(12)\n\t" \
3437 VALGRIND_RESTORE_STACK \
3438 : "=r" (_res) \
3439 : "r" (&_argvec[2]) \
3440 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3441 ); \
3442 lval = (__typeof__(lval)) _res; \
3443 } while (0)
3444
3445 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3446 do { \
3447 volatile OrigFn _orig = (orig); \
3448 volatile unsigned long _argvec[3+5]; \
3449 volatile unsigned long _res; \
3450 \
3451 _argvec[1] = (unsigned long)_orig.r2; \
3452 _argvec[2] = (unsigned long)_orig.nraddr; \
3453 _argvec[2+1] = (unsigned long)arg1; \
3454 _argvec[2+2] = (unsigned long)arg2; \
3455 _argvec[2+3] = (unsigned long)arg3; \
3456 _argvec[2+4] = (unsigned long)arg4; \
3457 _argvec[2+5] = (unsigned long)arg5; \
3458 __asm__ volatile( \
3459 VALGRIND_ALIGN_STACK \
3460 "mr 12,%1\n\t" \
3461 "std 2,-16(12)\n\t" \
3462 "ld 2,-8(12)\n\t" \
3463 "ld 3, 8(12)\n\t" \
3464 "ld 4, 16(12)\n\t" \
3465 "ld 5, 24(12)\n\t" \
3466 "ld 6, 32(12)\n\t" \
3467 "ld 7, 40(12)\n\t" \
3468 "ld 12, 0(12)\n\t" \
3469 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3470 "mr 12,%1\n\t" \
3471 "mr %0,3\n\t" \
3472 "ld 2,-16(12)\n\t" \
3473 VALGRIND_RESTORE_STACK \
3474 : "=r" (_res) \
3475 : "r" (&_argvec[2]) \
3476 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3477 ); \
3478 lval = (__typeof__(lval)) _res; \
3479 } while (0)
3480
3481 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3482 do { \
3483 volatile OrigFn _orig = (orig); \
3484 volatile unsigned long _argvec[3+6]; \
3485 volatile unsigned long _res; \
3486 \
3487 _argvec[1] = (unsigned long)_orig.r2; \
3488 _argvec[2] = (unsigned long)_orig.nraddr; \
3489 _argvec[2+1] = (unsigned long)arg1; \
3490 _argvec[2+2] = (unsigned long)arg2; \
3491 _argvec[2+3] = (unsigned long)arg3; \
3492 _argvec[2+4] = (unsigned long)arg4; \
3493 _argvec[2+5] = (unsigned long)arg5; \
3494 _argvec[2+6] = (unsigned long)arg6; \
3495 __asm__ volatile( \
3496 VALGRIND_ALIGN_STACK \
3497 "mr 12,%1\n\t" \
3498 "std 2,-16(12)\n\t" \
3499 "ld 2,-8(12)\n\t" \
3500 "ld 3, 8(12)\n\t" \
3501 "ld 4, 16(12)\n\t" \
3502 "ld 5, 24(12)\n\t" \
3503 "ld 6, 32(12)\n\t" \
3504 "ld 7, 40(12)\n\t" \
3505 "ld 8, 48(12)\n\t" \
3506 "ld 12, 0(12)\n\t" \
3507 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3508 "mr 12,%1\n\t" \
3509 "mr %0,3\n\t" \
3510 "ld 2,-16(12)\n\t" \
3511 VALGRIND_RESTORE_STACK \
3512 : "=r" (_res) \
3513 : "r" (&_argvec[2]) \
3514 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3515 ); \
3516 lval = (__typeof__(lval)) _res; \
3517 } while (0)
3518
3519 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3520 arg7) \
3521 do { \
3522 volatile OrigFn _orig = (orig); \
3523 volatile unsigned long _argvec[3+7]; \
3524 volatile unsigned long _res; \
3525 \
3526 _argvec[1] = (unsigned long)_orig.r2; \
3527 _argvec[2] = (unsigned long)_orig.nraddr; \
3528 _argvec[2+1] = (unsigned long)arg1; \
3529 _argvec[2+2] = (unsigned long)arg2; \
3530 _argvec[2+3] = (unsigned long)arg3; \
3531 _argvec[2+4] = (unsigned long)arg4; \
3532 _argvec[2+5] = (unsigned long)arg5; \
3533 _argvec[2+6] = (unsigned long)arg6; \
3534 _argvec[2+7] = (unsigned long)arg7; \
3535 __asm__ volatile( \
3536 VALGRIND_ALIGN_STACK \
3537 "mr 12,%1\n\t" \
3538 "std 2,-16(12)\n\t" \
3539 "ld 2,-8(12)\n\t" \
3540 "ld 3, 8(12)\n\t" \
3541 "ld 4, 16(12)\n\t" \
3542 "ld 5, 24(12)\n\t" \
3543 "ld 6, 32(12)\n\t" \
3544 "ld 7, 40(12)\n\t" \
3545 "ld 8, 48(12)\n\t" \
3546 "ld 9, 56(12)\n\t" \
3547 "ld 12, 0(12)\n\t" \
3548 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3549 "mr 12,%1\n\t" \
3550 "mr %0,3\n\t" \
3551 "ld 2,-16(12)\n\t" \
3552 VALGRIND_RESTORE_STACK \
3553 : "=r" (_res) \
3554 : "r" (&_argvec[2]) \
3555 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3556 ); \
3557 lval = (__typeof__(lval)) _res; \
3558 } while (0)
3559
3560 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3561 arg7,arg8) \
3562 do { \
3563 volatile OrigFn _orig = (orig); \
3564 volatile unsigned long _argvec[3+8]; \
3565 volatile unsigned long _res; \
3566 \
3567 _argvec[1] = (unsigned long)_orig.r2; \
3568 _argvec[2] = (unsigned long)_orig.nraddr; \
3569 _argvec[2+1] = (unsigned long)arg1; \
3570 _argvec[2+2] = (unsigned long)arg2; \
3571 _argvec[2+3] = (unsigned long)arg3; \
3572 _argvec[2+4] = (unsigned long)arg4; \
3573 _argvec[2+5] = (unsigned long)arg5; \
3574 _argvec[2+6] = (unsigned long)arg6; \
3575 _argvec[2+7] = (unsigned long)arg7; \
3576 _argvec[2+8] = (unsigned long)arg8; \
3577 __asm__ volatile( \
3578 VALGRIND_ALIGN_STACK \
3579 "mr 12,%1\n\t" \
3580 "std 2,-16(12)\n\t" \
3581 "ld 2,-8(12)\n\t" \
3582 "ld 3, 8(12)\n\t" \
3583 "ld 4, 16(12)\n\t" \
3584 "ld 5, 24(12)\n\t" \
3585 "ld 6, 32(12)\n\t" \
3586 "ld 7, 40(12)\n\t" \
3587 "ld 8, 48(12)\n\t" \
3588 "ld 9, 56(12)\n\t" \
3589 "ld 10, 64(12)\n\t" \
3590 "ld 12, 0(12)\n\t" \
3591 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3592 "mr 12,%1\n\t" \
3593 "mr %0,3\n\t" \
3594 "ld 2,-16(12)\n\t" \
3595 VALGRIND_RESTORE_STACK \
3596 : "=r" (_res) \
3597 : "r" (&_argvec[2]) \
3598 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3599 ); \
3600 lval = (__typeof__(lval)) _res; \
3601 } while (0)
3602
3603 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3604 arg7,arg8,arg9) \
3605 do { \
3606 volatile OrigFn _orig = (orig); \
3607 volatile unsigned long _argvec[3+9]; \
3608 volatile unsigned long _res; \
3609 \
3610 _argvec[1] = (unsigned long)_orig.r2; \
3611 _argvec[2] = (unsigned long)_orig.nraddr; \
3612 _argvec[2+1] = (unsigned long)arg1; \
3613 _argvec[2+2] = (unsigned long)arg2; \
3614 _argvec[2+3] = (unsigned long)arg3; \
3615 _argvec[2+4] = (unsigned long)arg4; \
3616 _argvec[2+5] = (unsigned long)arg5; \
3617 _argvec[2+6] = (unsigned long)arg6; \
3618 _argvec[2+7] = (unsigned long)arg7; \
3619 _argvec[2+8] = (unsigned long)arg8; \
3620 _argvec[2+9] = (unsigned long)arg9; \
3621 __asm__ volatile( \
3622 VALGRIND_ALIGN_STACK \
3623 "mr 12,%1\n\t" \
3624 "std 2,-16(12)\n\t" \
3625 "ld 2,-8(12)\n\t" \
3626 "addi 1,1,-128\n\t" \
3627 \
3628 "ld 3,72(12)\n\t" \
3629 "std 3,96(1)\n\t" \
3630 \
3631 "ld 3, 8(12)\n\t" \
3632 "ld 4, 16(12)\n\t" \
3633 "ld 5, 24(12)\n\t" \
3634 "ld 6, 32(12)\n\t" \
3635 "ld 7, 40(12)\n\t" \
3636 "ld 8, 48(12)\n\t" \
3637 "ld 9, 56(12)\n\t" \
3638 "ld 10, 64(12)\n\t" \
3639 "ld 12, 0(12)\n\t" \
3640 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3641 "mr 12,%1\n\t" \
3642 "mr %0,3\n\t" \
3643 "ld 2,-16(12)\n\t" \
3644 VALGRIND_RESTORE_STACK \
3645 : "=r" (_res) \
3646 : "r" (&_argvec[2]) \
3647 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3648 ); \
3649 lval = (__typeof__(lval)) _res; \
3650 } while (0)
3651
3652 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3653 arg7,arg8,arg9,arg10) \
3654 do { \
3655 volatile OrigFn _orig = (orig); \
3656 volatile unsigned long _argvec[3+10]; \
3657 volatile unsigned long _res; \
3658 \
3659 _argvec[1] = (unsigned long)_orig.r2; \
3660 _argvec[2] = (unsigned long)_orig.nraddr; \
3661 _argvec[2+1] = (unsigned long)arg1; \
3662 _argvec[2+2] = (unsigned long)arg2; \
3663 _argvec[2+3] = (unsigned long)arg3; \
3664 _argvec[2+4] = (unsigned long)arg4; \
3665 _argvec[2+5] = (unsigned long)arg5; \
3666 _argvec[2+6] = (unsigned long)arg6; \
3667 _argvec[2+7] = (unsigned long)arg7; \
3668 _argvec[2+8] = (unsigned long)arg8; \
3669 _argvec[2+9] = (unsigned long)arg9; \
3670 _argvec[2+10] = (unsigned long)arg10; \
3671 __asm__ volatile( \
3672 VALGRIND_ALIGN_STACK \
3673 "mr 12,%1\n\t" \
3674 "std 2,-16(12)\n\t" \
3675 "ld 2,-8(12)\n\t" \
3676 "addi 1,1,-128\n\t" \
3677 \
3678 "ld 3,80(12)\n\t" \
3679 "std 3,104(1)\n\t" \
3680 \
3681 "ld 3,72(12)\n\t" \
3682 "std 3,96(1)\n\t" \
3683 \
3684 "ld 3, 8(12)\n\t" \
3685 "ld 4, 16(12)\n\t" \
3686 "ld 5, 24(12)\n\t" \
3687 "ld 6, 32(12)\n\t" \
3688 "ld 7, 40(12)\n\t" \
3689 "ld 8, 48(12)\n\t" \
3690 "ld 9, 56(12)\n\t" \
3691 "ld 10, 64(12)\n\t" \
3692 "ld 12, 0(12)\n\t" \
3693 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3694 "mr 12,%1\n\t" \
3695 "mr %0,3\n\t" \
3696 "ld 2,-16(12)\n\t" \
3697 VALGRIND_RESTORE_STACK \
3698 : "=r" (_res) \
3699 : "r" (&_argvec[2]) \
3700 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3701 ); \
3702 lval = (__typeof__(lval)) _res; \
3703 } while (0)
3704
3705 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3706 arg7,arg8,arg9,arg10,arg11) \
3707 do { \
3708 volatile OrigFn _orig = (orig); \
3709 volatile unsigned long _argvec[3+11]; \
3710 volatile unsigned long _res; \
3711 \
3712 _argvec[1] = (unsigned long)_orig.r2; \
3713 _argvec[2] = (unsigned long)_orig.nraddr; \
3714 _argvec[2+1] = (unsigned long)arg1; \
3715 _argvec[2+2] = (unsigned long)arg2; \
3716 _argvec[2+3] = (unsigned long)arg3; \
3717 _argvec[2+4] = (unsigned long)arg4; \
3718 _argvec[2+5] = (unsigned long)arg5; \
3719 _argvec[2+6] = (unsigned long)arg6; \
3720 _argvec[2+7] = (unsigned long)arg7; \
3721 _argvec[2+8] = (unsigned long)arg8; \
3722 _argvec[2+9] = (unsigned long)arg9; \
3723 _argvec[2+10] = (unsigned long)arg10; \
3724 _argvec[2+11] = (unsigned long)arg11; \
3725 __asm__ volatile( \
3726 VALGRIND_ALIGN_STACK \
3727 "mr 12,%1\n\t" \
3728 "std 2,-16(12)\n\t" \
3729 "ld 2,-8(12)\n\t" \
3730 "addi 1,1,-144\n\t" \
3731 \
3732 "ld 3,88(12)\n\t" \
3733 "std 3,112(1)\n\t" \
3734 \
3735 "ld 3,80(12)\n\t" \
3736 "std 3,104(1)\n\t" \
3737 \
3738 "ld 3,72(12)\n\t" \
3739 "std 3,96(1)\n\t" \
3740 \
3741 "ld 3, 8(12)\n\t" \
3742 "ld 4, 16(12)\n\t" \
3743 "ld 5, 24(12)\n\t" \
3744 "ld 6, 32(12)\n\t" \
3745 "ld 7, 40(12)\n\t" \
3746 "ld 8, 48(12)\n\t" \
3747 "ld 9, 56(12)\n\t" \
3748 "ld 10, 64(12)\n\t" \
3749 "ld 12, 0(12)\n\t" \
3750 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3751 "mr 12,%1\n\t" \
3752 "mr %0,3\n\t" \
3753 "ld 2,-16(12)\n\t" \
3754 VALGRIND_RESTORE_STACK \
3755 : "=r" (_res) \
3756 : "r" (&_argvec[2]) \
3757 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3758 ); \
3759 lval = (__typeof__(lval)) _res; \
3760 } while (0)
3761
3762 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3763 arg7,arg8,arg9,arg10,arg11,arg12) \
3764 do { \
3765 volatile OrigFn _orig = (orig); \
3766 volatile unsigned long _argvec[3+12]; \
3767 volatile unsigned long _res; \
3768 \
3769 _argvec[1] = (unsigned long)_orig.r2; \
3770 _argvec[2] = (unsigned long)_orig.nraddr; \
3771 _argvec[2+1] = (unsigned long)arg1; \
3772 _argvec[2+2] = (unsigned long)arg2; \
3773 _argvec[2+3] = (unsigned long)arg3; \
3774 _argvec[2+4] = (unsigned long)arg4; \
3775 _argvec[2+5] = (unsigned long)arg5; \
3776 _argvec[2+6] = (unsigned long)arg6; \
3777 _argvec[2+7] = (unsigned long)arg7; \
3778 _argvec[2+8] = (unsigned long)arg8; \
3779 _argvec[2+9] = (unsigned long)arg9; \
3780 _argvec[2+10] = (unsigned long)arg10; \
3781 _argvec[2+11] = (unsigned long)arg11; \
3782 _argvec[2+12] = (unsigned long)arg12; \
3783 __asm__ volatile( \
3784 VALGRIND_ALIGN_STACK \
3785 "mr 12,%1\n\t" \
3786 "std 2,-16(12)\n\t" \
3787 "ld 2,-8(12)\n\t" \
3788 "addi 1,1,-144\n\t" \
3789 \
3790 "ld 3,96(12)\n\t" \
3791 "std 3,120(1)\n\t" \
3792 \
3793 "ld 3,88(12)\n\t" \
3794 "std 3,112(1)\n\t" \
3795 \
3796 "ld 3,80(12)\n\t" \
3797 "std 3,104(1)\n\t" \
3798 \
3799 "ld 3,72(12)\n\t" \
3800 "std 3,96(1)\n\t" \
3801 \
3802 "ld 3, 8(12)\n\t" \
3803 "ld 4, 16(12)\n\t" \
3804 "ld 5, 24(12)\n\t" \
3805 "ld 6, 32(12)\n\t" \
3806 "ld 7, 40(12)\n\t" \
3807 "ld 8, 48(12)\n\t" \
3808 "ld 9, 56(12)\n\t" \
3809 "ld 10, 64(12)\n\t" \
3810 "ld 12, 0(12)\n\t" \
3811 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3812 "mr 12,%1\n\t" \
3813 "mr %0,3\n\t" \
3814 "ld 2,-16(12)\n\t" \
3815 VALGRIND_RESTORE_STACK \
3816 : "=r" (_res) \
3817 : "r" (&_argvec[2]) \
3818 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3819 ); \
3820 lval = (__typeof__(lval)) _res; \
3821 } while (0)
3822
3823 #endif
3824
3825
3826
3827 #if defined(PLAT_arm_linux)
3828
3829
3830 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3831
3832
3833
3834
3835
3836
3837
3838
3839
3840
3841
3842
3843
3844
3845 #define VALGRIND_ALIGN_STACK \
3846 "mov r10, sp\n\t" \
3847 "mov r4, sp\n\t" \
3848 "bic r4, r4, #7\n\t" \
3849 "mov sp, r4\n\t"
3850 #define VALGRIND_RESTORE_STACK \
3851 "mov sp, r10\n\t"
3852
3853
3854
3855
3856 #define CALL_FN_W_v(lval, orig) \
3857 do { \
3858 volatile OrigFn _orig = (orig); \
3859 volatile unsigned long _argvec[1]; \
3860 volatile unsigned long _res; \
3861 _argvec[0] = (unsigned long)_orig.nraddr; \
3862 __asm__ volatile( \
3863 VALGRIND_ALIGN_STACK \
3864 "ldr r4, [%1] \n\t" \
3865 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3866 VALGRIND_RESTORE_STACK \
3867 "mov %0, r0\n" \
3868 : "=r" (_res) \
3869 : "0" (&_argvec[0]) \
3870 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3871 ); \
3872 lval = (__typeof__(lval)) _res; \
3873 } while (0)
3874
3875 #define CALL_FN_W_W(lval, orig, arg1) \
3876 do { \
3877 volatile OrigFn _orig = (orig); \
3878 volatile unsigned long _argvec[2]; \
3879 volatile unsigned long _res; \
3880 _argvec[0] = (unsigned long)_orig.nraddr; \
3881 _argvec[1] = (unsigned long)(arg1); \
3882 __asm__ volatile( \
3883 VALGRIND_ALIGN_STACK \
3884 "ldr r0, [%1, #4] \n\t" \
3885 "ldr r4, [%1] \n\t" \
3886 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3887 VALGRIND_RESTORE_STACK \
3888 "mov %0, r0\n" \
3889 : "=r" (_res) \
3890 : "0" (&_argvec[0]) \
3891 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3892 ); \
3893 lval = (__typeof__(lval)) _res; \
3894 } while (0)
3895
3896 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3897 do { \
3898 volatile OrigFn _orig = (orig); \
3899 volatile unsigned long _argvec[3]; \
3900 volatile unsigned long _res; \
3901 _argvec[0] = (unsigned long)_orig.nraddr; \
3902 _argvec[1] = (unsigned long)(arg1); \
3903 _argvec[2] = (unsigned long)(arg2); \
3904 __asm__ volatile( \
3905 VALGRIND_ALIGN_STACK \
3906 "ldr r0, [%1, #4] \n\t" \
3907 "ldr r1, [%1, #8] \n\t" \
3908 "ldr r4, [%1] \n\t" \
3909 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3910 VALGRIND_RESTORE_STACK \
3911 "mov %0, r0\n" \
3912 : "=r" (_res) \
3913 : "0" (&_argvec[0]) \
3914 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3915 ); \
3916 lval = (__typeof__(lval)) _res; \
3917 } while (0)
3918
3919 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3920 do { \
3921 volatile OrigFn _orig = (orig); \
3922 volatile unsigned long _argvec[4]; \
3923 volatile unsigned long _res; \
3924 _argvec[0] = (unsigned long)_orig.nraddr; \
3925 _argvec[1] = (unsigned long)(arg1); \
3926 _argvec[2] = (unsigned long)(arg2); \
3927 _argvec[3] = (unsigned long)(arg3); \
3928 __asm__ volatile( \
3929 VALGRIND_ALIGN_STACK \
3930 "ldr r0, [%1, #4] \n\t" \
3931 "ldr r1, [%1, #8] \n\t" \
3932 "ldr r2, [%1, #12] \n\t" \
3933 "ldr r4, [%1] \n\t" \
3934 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3935 VALGRIND_RESTORE_STACK \
3936 "mov %0, r0\n" \
3937 : "=r" (_res) \
3938 : "0" (&_argvec[0]) \
3939 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3940 ); \
3941 lval = (__typeof__(lval)) _res; \
3942 } while (0)
3943
3944 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3945 do { \
3946 volatile OrigFn _orig = (orig); \
3947 volatile unsigned long _argvec[5]; \
3948 volatile unsigned long _res; \
3949 _argvec[0] = (unsigned long)_orig.nraddr; \
3950 _argvec[1] = (unsigned long)(arg1); \
3951 _argvec[2] = (unsigned long)(arg2); \
3952 _argvec[3] = (unsigned long)(arg3); \
3953 _argvec[4] = (unsigned long)(arg4); \
3954 __asm__ volatile( \
3955 VALGRIND_ALIGN_STACK \
3956 "ldr r0, [%1, #4] \n\t" \
3957 "ldr r1, [%1, #8] \n\t" \
3958 "ldr r2, [%1, #12] \n\t" \
3959 "ldr r3, [%1, #16] \n\t" \
3960 "ldr r4, [%1] \n\t" \
3961 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3962 VALGRIND_RESTORE_STACK \
3963 "mov %0, r0" \
3964 : "=r" (_res) \
3965 : "0" (&_argvec[0]) \
3966 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3967 ); \
3968 lval = (__typeof__(lval)) _res; \
3969 } while (0)
3970
3971 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3972 do { \
3973 volatile OrigFn _orig = (orig); \
3974 volatile unsigned long _argvec[6]; \
3975 volatile unsigned long _res; \
3976 _argvec[0] = (unsigned long)_orig.nraddr; \
3977 _argvec[1] = (unsigned long)(arg1); \
3978 _argvec[2] = (unsigned long)(arg2); \
3979 _argvec[3] = (unsigned long)(arg3); \
3980 _argvec[4] = (unsigned long)(arg4); \
3981 _argvec[5] = (unsigned long)(arg5); \
3982 __asm__ volatile( \
3983 VALGRIND_ALIGN_STACK \
3984 "sub sp, sp, #4 \n\t" \
3985 "ldr r0, [%1, #20] \n\t" \
3986 "push {r0} \n\t" \
3987 "ldr r0, [%1, #4] \n\t" \
3988 "ldr r1, [%1, #8] \n\t" \
3989 "ldr r2, [%1, #12] \n\t" \
3990 "ldr r3, [%1, #16] \n\t" \
3991 "ldr r4, [%1] \n\t" \
3992 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3993 VALGRIND_RESTORE_STACK \
3994 "mov %0, r0" \
3995 : "=r" (_res) \
3996 : "0" (&_argvec[0]) \
3997 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3998 ); \
3999 lval = (__typeof__(lval)) _res; \
4000 } while (0)
4001
4002 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4003 do { \
4004 volatile OrigFn _orig = (orig); \
4005 volatile unsigned long _argvec[7]; \
4006 volatile unsigned long _res; \
4007 _argvec[0] = (unsigned long)_orig.nraddr; \
4008 _argvec[1] = (unsigned long)(arg1); \
4009 _argvec[2] = (unsigned long)(arg2); \
4010 _argvec[3] = (unsigned long)(arg3); \
4011 _argvec[4] = (unsigned long)(arg4); \
4012 _argvec[5] = (unsigned long)(arg5); \
4013 _argvec[6] = (unsigned long)(arg6); \
4014 __asm__ volatile( \
4015 VALGRIND_ALIGN_STACK \
4016 "ldr r0, [%1, #20] \n\t" \
4017 "ldr r1, [%1, #24] \n\t" \
4018 "push {r0, r1} \n\t" \
4019 "ldr r0, [%1, #4] \n\t" \
4020 "ldr r1, [%1, #8] \n\t" \
4021 "ldr r2, [%1, #12] \n\t" \
4022 "ldr r3, [%1, #16] \n\t" \
4023 "ldr r4, [%1] \n\t" \
4024 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4025 VALGRIND_RESTORE_STACK \
4026 "mov %0, r0" \
4027 : "=r" (_res) \
4028 : "0" (&_argvec[0]) \
4029 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4030 ); \
4031 lval = (__typeof__(lval)) _res; \
4032 } while (0)
4033
4034 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4035 arg7) \
4036 do { \
4037 volatile OrigFn _orig = (orig); \
4038 volatile unsigned long _argvec[8]; \
4039 volatile unsigned long _res; \
4040 _argvec[0] = (unsigned long)_orig.nraddr; \
4041 _argvec[1] = (unsigned long)(arg1); \
4042 _argvec[2] = (unsigned long)(arg2); \
4043 _argvec[3] = (unsigned long)(arg3); \
4044 _argvec[4] = (unsigned long)(arg4); \
4045 _argvec[5] = (unsigned long)(arg5); \
4046 _argvec[6] = (unsigned long)(arg6); \
4047 _argvec[7] = (unsigned long)(arg7); \
4048 __asm__ volatile( \
4049 VALGRIND_ALIGN_STACK \
4050 "sub sp, sp, #4 \n\t" \
4051 "ldr r0, [%1, #20] \n\t" \
4052 "ldr r1, [%1, #24] \n\t" \
4053 "ldr r2, [%1, #28] \n\t" \
4054 "push {r0, r1, r2} \n\t" \
4055 "ldr r0, [%1, #4] \n\t" \
4056 "ldr r1, [%1, #8] \n\t" \
4057 "ldr r2, [%1, #12] \n\t" \
4058 "ldr r3, [%1, #16] \n\t" \
4059 "ldr r4, [%1] \n\t" \
4060 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4061 VALGRIND_RESTORE_STACK \
4062 "mov %0, r0" \
4063 : "=r" (_res) \
4064 : "0" (&_argvec[0]) \
4065 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4066 ); \
4067 lval = (__typeof__(lval)) _res; \
4068 } while (0)
4069
4070 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4071 arg7,arg8) \
4072 do { \
4073 volatile OrigFn _orig = (orig); \
4074 volatile unsigned long _argvec[9]; \
4075 volatile unsigned long _res; \
4076 _argvec[0] = (unsigned long)_orig.nraddr; \
4077 _argvec[1] = (unsigned long)(arg1); \
4078 _argvec[2] = (unsigned long)(arg2); \
4079 _argvec[3] = (unsigned long)(arg3); \
4080 _argvec[4] = (unsigned long)(arg4); \
4081 _argvec[5] = (unsigned long)(arg5); \
4082 _argvec[6] = (unsigned long)(arg6); \
4083 _argvec[7] = (unsigned long)(arg7); \
4084 _argvec[8] = (unsigned long)(arg8); \
4085 __asm__ volatile( \
4086 VALGRIND_ALIGN_STACK \
4087 "ldr r0, [%1, #20] \n\t" \
4088 "ldr r1, [%1, #24] \n\t" \
4089 "ldr r2, [%1, #28] \n\t" \
4090 "ldr r3, [%1, #32] \n\t" \
4091 "push {r0, r1, r2, r3} \n\t" \
4092 "ldr r0, [%1, #4] \n\t" \
4093 "ldr r1, [%1, #8] \n\t" \
4094 "ldr r2, [%1, #12] \n\t" \
4095 "ldr r3, [%1, #16] \n\t" \
4096 "ldr r4, [%1] \n\t" \
4097 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4098 VALGRIND_RESTORE_STACK \
4099 "mov %0, r0" \
4100 : "=r" (_res) \
4101 : "0" (&_argvec[0]) \
4102 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4103 ); \
4104 lval = (__typeof__(lval)) _res; \
4105 } while (0)
4106
4107 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4108 arg7,arg8,arg9) \
4109 do { \
4110 volatile OrigFn _orig = (orig); \
4111 volatile unsigned long _argvec[10]; \
4112 volatile unsigned long _res; \
4113 _argvec[0] = (unsigned long)_orig.nraddr; \
4114 _argvec[1] = (unsigned long)(arg1); \
4115 _argvec[2] = (unsigned long)(arg2); \
4116 _argvec[3] = (unsigned long)(arg3); \
4117 _argvec[4] = (unsigned long)(arg4); \
4118 _argvec[5] = (unsigned long)(arg5); \
4119 _argvec[6] = (unsigned long)(arg6); \
4120 _argvec[7] = (unsigned long)(arg7); \
4121 _argvec[8] = (unsigned long)(arg8); \
4122 _argvec[9] = (unsigned long)(arg9); \
4123 __asm__ volatile( \
4124 VALGRIND_ALIGN_STACK \
4125 "sub sp, sp, #4 \n\t" \
4126 "ldr r0, [%1, #20] \n\t" \
4127 "ldr r1, [%1, #24] \n\t" \
4128 "ldr r2, [%1, #28] \n\t" \
4129 "ldr r3, [%1, #32] \n\t" \
4130 "ldr r4, [%1, #36] \n\t" \
4131 "push {r0, r1, r2, r3, r4} \n\t" \
4132 "ldr r0, [%1, #4] \n\t" \
4133 "ldr r1, [%1, #8] \n\t" \
4134 "ldr r2, [%1, #12] \n\t" \
4135 "ldr r3, [%1, #16] \n\t" \
4136 "ldr r4, [%1] \n\t" \
4137 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4138 VALGRIND_RESTORE_STACK \
4139 "mov %0, r0" \
4140 : "=r" (_res) \
4141 : "0" (&_argvec[0]) \
4142 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4143 ); \
4144 lval = (__typeof__(lval)) _res; \
4145 } while (0)
4146
4147 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4148 arg7,arg8,arg9,arg10) \
4149 do { \
4150 volatile OrigFn _orig = (orig); \
4151 volatile unsigned long _argvec[11]; \
4152 volatile unsigned long _res; \
4153 _argvec[0] = (unsigned long)_orig.nraddr; \
4154 _argvec[1] = (unsigned long)(arg1); \
4155 _argvec[2] = (unsigned long)(arg2); \
4156 _argvec[3] = (unsigned long)(arg3); \
4157 _argvec[4] = (unsigned long)(arg4); \
4158 _argvec[5] = (unsigned long)(arg5); \
4159 _argvec[6] = (unsigned long)(arg6); \
4160 _argvec[7] = (unsigned long)(arg7); \
4161 _argvec[8] = (unsigned long)(arg8); \
4162 _argvec[9] = (unsigned long)(arg9); \
4163 _argvec[10] = (unsigned long)(arg10); \
4164 __asm__ volatile( \
4165 VALGRIND_ALIGN_STACK \
4166 "ldr r0, [%1, #40] \n\t" \
4167 "push {r0} \n\t" \
4168 "ldr r0, [%1, #20] \n\t" \
4169 "ldr r1, [%1, #24] \n\t" \
4170 "ldr r2, [%1, #28] \n\t" \
4171 "ldr r3, [%1, #32] \n\t" \
4172 "ldr r4, [%1, #36] \n\t" \
4173 "push {r0, r1, r2, r3, r4} \n\t" \
4174 "ldr r0, [%1, #4] \n\t" \
4175 "ldr r1, [%1, #8] \n\t" \
4176 "ldr r2, [%1, #12] \n\t" \
4177 "ldr r3, [%1, #16] \n\t" \
4178 "ldr r4, [%1] \n\t" \
4179 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4180 VALGRIND_RESTORE_STACK \
4181 "mov %0, r0" \
4182 : "=r" (_res) \
4183 : "0" (&_argvec[0]) \
4184 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4185 ); \
4186 lval = (__typeof__(lval)) _res; \
4187 } while (0)
4188
4189 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4190 arg6,arg7,arg8,arg9,arg10, \
4191 arg11) \
4192 do { \
4193 volatile OrigFn _orig = (orig); \
4194 volatile unsigned long _argvec[12]; \
4195 volatile unsigned long _res; \
4196 _argvec[0] = (unsigned long)_orig.nraddr; \
4197 _argvec[1] = (unsigned long)(arg1); \
4198 _argvec[2] = (unsigned long)(arg2); \
4199 _argvec[3] = (unsigned long)(arg3); \
4200 _argvec[4] = (unsigned long)(arg4); \
4201 _argvec[5] = (unsigned long)(arg5); \
4202 _argvec[6] = (unsigned long)(arg6); \
4203 _argvec[7] = (unsigned long)(arg7); \
4204 _argvec[8] = (unsigned long)(arg8); \
4205 _argvec[9] = (unsigned long)(arg9); \
4206 _argvec[10] = (unsigned long)(arg10); \
4207 _argvec[11] = (unsigned long)(arg11); \
4208 __asm__ volatile( \
4209 VALGRIND_ALIGN_STACK \
4210 "sub sp, sp, #4 \n\t" \
4211 "ldr r0, [%1, #40] \n\t" \
4212 "ldr r1, [%1, #44] \n\t" \
4213 "push {r0, r1} \n\t" \
4214 "ldr r0, [%1, #20] \n\t" \
4215 "ldr r1, [%1, #24] \n\t" \
4216 "ldr r2, [%1, #28] \n\t" \
4217 "ldr r3, [%1, #32] \n\t" \
4218 "ldr r4, [%1, #36] \n\t" \
4219 "push {r0, r1, r2, r3, r4} \n\t" \
4220 "ldr r0, [%1, #4] \n\t" \
4221 "ldr r1, [%1, #8] \n\t" \
4222 "ldr r2, [%1, #12] \n\t" \
4223 "ldr r3, [%1, #16] \n\t" \
4224 "ldr r4, [%1] \n\t" \
4225 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4226 VALGRIND_RESTORE_STACK \
4227 "mov %0, r0" \
4228 : "=r" (_res) \
4229 : "0" (&_argvec[0]) \
4230 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4231 ); \
4232 lval = (__typeof__(lval)) _res; \
4233 } while (0)
4234
4235 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4236 arg6,arg7,arg8,arg9,arg10, \
4237 arg11,arg12) \
4238 do { \
4239 volatile OrigFn _orig = (orig); \
4240 volatile unsigned long _argvec[13]; \
4241 volatile unsigned long _res; \
4242 _argvec[0] = (unsigned long)_orig.nraddr; \
4243 _argvec[1] = (unsigned long)(arg1); \
4244 _argvec[2] = (unsigned long)(arg2); \
4245 _argvec[3] = (unsigned long)(arg3); \
4246 _argvec[4] = (unsigned long)(arg4); \
4247 _argvec[5] = (unsigned long)(arg5); \
4248 _argvec[6] = (unsigned long)(arg6); \
4249 _argvec[7] = (unsigned long)(arg7); \
4250 _argvec[8] = (unsigned long)(arg8); \
4251 _argvec[9] = (unsigned long)(arg9); \
4252 _argvec[10] = (unsigned long)(arg10); \
4253 _argvec[11] = (unsigned long)(arg11); \
4254 _argvec[12] = (unsigned long)(arg12); \
4255 __asm__ volatile( \
4256 VALGRIND_ALIGN_STACK \
4257 "ldr r0, [%1, #40] \n\t" \
4258 "ldr r1, [%1, #44] \n\t" \
4259 "ldr r2, [%1, #48] \n\t" \
4260 "push {r0, r1, r2} \n\t" \
4261 "ldr r0, [%1, #20] \n\t" \
4262 "ldr r1, [%1, #24] \n\t" \
4263 "ldr r2, [%1, #28] \n\t" \
4264 "ldr r3, [%1, #32] \n\t" \
4265 "ldr r4, [%1, #36] \n\t" \
4266 "push {r0, r1, r2, r3, r4} \n\t" \
4267 "ldr r0, [%1, #4] \n\t" \
4268 "ldr r1, [%1, #8] \n\t" \
4269 "ldr r2, [%1, #12] \n\t" \
4270 "ldr r3, [%1, #16] \n\t" \
4271 "ldr r4, [%1] \n\t" \
4272 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4273 VALGRIND_RESTORE_STACK \
4274 "mov %0, r0" \
4275 : "=r" (_res) \
4276 : "0" (&_argvec[0]) \
4277 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4278 ); \
4279 lval = (__typeof__(lval)) _res; \
4280 } while (0)
4281
4282 #endif
4283
4284
4285
4286 #if defined(PLAT_arm64_linux)
4287
4288
4289 #define __CALLER_SAVED_REGS \
4290 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4291 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4292 "x18", "x19", "x20", "x30", \
4293 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4294 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4295 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4296 "v26", "v27", "v28", "v29", "v30", "v31"
4297
4298
4299
4300 #define VALGRIND_ALIGN_STACK \
4301 "mov x21, sp\n\t" \
4302 "bic sp, x21, #15\n\t"
4303 #define VALGRIND_RESTORE_STACK \
4304 "mov sp, x21\n\t"
4305
4306
4307
4308
4309 #define CALL_FN_W_v(lval, orig) \
4310 do { \
4311 volatile OrigFn _orig = (orig); \
4312 volatile unsigned long _argvec[1]; \
4313 volatile unsigned long _res; \
4314 _argvec[0] = (unsigned long)_orig.nraddr; \
4315 __asm__ volatile( \
4316 VALGRIND_ALIGN_STACK \
4317 "ldr x8, [%1] \n\t" \
4318 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4319 VALGRIND_RESTORE_STACK \
4320 "mov %0, x0\n" \
4321 : "=r" (_res) \
4322 : "0" (&_argvec[0]) \
4323 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4324 ); \
4325 lval = (__typeof__(lval)) _res; \
4326 } while (0)
4327
4328 #define CALL_FN_W_W(lval, orig, arg1) \
4329 do { \
4330 volatile OrigFn _orig = (orig); \
4331 volatile unsigned long _argvec[2]; \
4332 volatile unsigned long _res; \
4333 _argvec[0] = (unsigned long)_orig.nraddr; \
4334 _argvec[1] = (unsigned long)(arg1); \
4335 __asm__ volatile( \
4336 VALGRIND_ALIGN_STACK \
4337 "ldr x0, [%1, #8] \n\t" \
4338 "ldr x8, [%1] \n\t" \
4339 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4340 VALGRIND_RESTORE_STACK \
4341 "mov %0, x0\n" \
4342 : "=r" (_res) \
4343 : "0" (&_argvec[0]) \
4344 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4345 ); \
4346 lval = (__typeof__(lval)) _res; \
4347 } while (0)
4348
4349 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4350 do { \
4351 volatile OrigFn _orig = (orig); \
4352 volatile unsigned long _argvec[3]; \
4353 volatile unsigned long _res; \
4354 _argvec[0] = (unsigned long)_orig.nraddr; \
4355 _argvec[1] = (unsigned long)(arg1); \
4356 _argvec[2] = (unsigned long)(arg2); \
4357 __asm__ volatile( \
4358 VALGRIND_ALIGN_STACK \
4359 "ldr x0, [%1, #8] \n\t" \
4360 "ldr x1, [%1, #16] \n\t" \
4361 "ldr x8, [%1] \n\t" \
4362 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4363 VALGRIND_RESTORE_STACK \
4364 "mov %0, x0\n" \
4365 : "=r" (_res) \
4366 : "0" (&_argvec[0]) \
4367 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4368 ); \
4369 lval = (__typeof__(lval)) _res; \
4370 } while (0)
4371
4372 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4373 do { \
4374 volatile OrigFn _orig = (orig); \
4375 volatile unsigned long _argvec[4]; \
4376 volatile unsigned long _res; \
4377 _argvec[0] = (unsigned long)_orig.nraddr; \
4378 _argvec[1] = (unsigned long)(arg1); \
4379 _argvec[2] = (unsigned long)(arg2); \
4380 _argvec[3] = (unsigned long)(arg3); \
4381 __asm__ volatile( \
4382 VALGRIND_ALIGN_STACK \
4383 "ldr x0, [%1, #8] \n\t" \
4384 "ldr x1, [%1, #16] \n\t" \
4385 "ldr x2, [%1, #24] \n\t" \
4386 "ldr x8, [%1] \n\t" \
4387 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4388 VALGRIND_RESTORE_STACK \
4389 "mov %0, x0\n" \
4390 : "=r" (_res) \
4391 : "0" (&_argvec[0]) \
4392 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4393 ); \
4394 lval = (__typeof__(lval)) _res; \
4395 } while (0)
4396
4397 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4398 do { \
4399 volatile OrigFn _orig = (orig); \
4400 volatile unsigned long _argvec[5]; \
4401 volatile unsigned long _res; \
4402 _argvec[0] = (unsigned long)_orig.nraddr; \
4403 _argvec[1] = (unsigned long)(arg1); \
4404 _argvec[2] = (unsigned long)(arg2); \
4405 _argvec[3] = (unsigned long)(arg3); \
4406 _argvec[4] = (unsigned long)(arg4); \
4407 __asm__ volatile( \
4408 VALGRIND_ALIGN_STACK \
4409 "ldr x0, [%1, #8] \n\t" \
4410 "ldr x1, [%1, #16] \n\t" \
4411 "ldr x2, [%1, #24] \n\t" \
4412 "ldr x3, [%1, #32] \n\t" \
4413 "ldr x8, [%1] \n\t" \
4414 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4415 VALGRIND_RESTORE_STACK \
4416 "mov %0, x0" \
4417 : "=r" (_res) \
4418 : "0" (&_argvec[0]) \
4419 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4420 ); \
4421 lval = (__typeof__(lval)) _res; \
4422 } while (0)
4423
4424 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4425 do { \
4426 volatile OrigFn _orig = (orig); \
4427 volatile unsigned long _argvec[6]; \
4428 volatile unsigned long _res; \
4429 _argvec[0] = (unsigned long)_orig.nraddr; \
4430 _argvec[1] = (unsigned long)(arg1); \
4431 _argvec[2] = (unsigned long)(arg2); \
4432 _argvec[3] = (unsigned long)(arg3); \
4433 _argvec[4] = (unsigned long)(arg4); \
4434 _argvec[5] = (unsigned long)(arg5); \
4435 __asm__ volatile( \
4436 VALGRIND_ALIGN_STACK \
4437 "ldr x0, [%1, #8] \n\t" \
4438 "ldr x1, [%1, #16] \n\t" \
4439 "ldr x2, [%1, #24] \n\t" \
4440 "ldr x3, [%1, #32] \n\t" \
4441 "ldr x4, [%1, #40] \n\t" \
4442 "ldr x8, [%1] \n\t" \
4443 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4444 VALGRIND_RESTORE_STACK \
4445 "mov %0, x0" \
4446 : "=r" (_res) \
4447 : "0" (&_argvec[0]) \
4448 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4449 ); \
4450 lval = (__typeof__(lval)) _res; \
4451 } while (0)
4452
4453 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4454 do { \
4455 volatile OrigFn _orig = (orig); \
4456 volatile unsigned long _argvec[7]; \
4457 volatile unsigned long _res; \
4458 _argvec[0] = (unsigned long)_orig.nraddr; \
4459 _argvec[1] = (unsigned long)(arg1); \
4460 _argvec[2] = (unsigned long)(arg2); \
4461 _argvec[3] = (unsigned long)(arg3); \
4462 _argvec[4] = (unsigned long)(arg4); \
4463 _argvec[5] = (unsigned long)(arg5); \
4464 _argvec[6] = (unsigned long)(arg6); \
4465 __asm__ volatile( \
4466 VALGRIND_ALIGN_STACK \
4467 "ldr x0, [%1, #8] \n\t" \
4468 "ldr x1, [%1, #16] \n\t" \
4469 "ldr x2, [%1, #24] \n\t" \
4470 "ldr x3, [%1, #32] \n\t" \
4471 "ldr x4, [%1, #40] \n\t" \
4472 "ldr x5, [%1, #48] \n\t" \
4473 "ldr x8, [%1] \n\t" \
4474 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4475 VALGRIND_RESTORE_STACK \
4476 "mov %0, x0" \
4477 : "=r" (_res) \
4478 : "0" (&_argvec[0]) \
4479 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4480 ); \
4481 lval = (__typeof__(lval)) _res; \
4482 } while (0)
4483
4484 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4485 arg7) \
4486 do { \
4487 volatile OrigFn _orig = (orig); \
4488 volatile unsigned long _argvec[8]; \
4489 volatile unsigned long _res; \
4490 _argvec[0] = (unsigned long)_orig.nraddr; \
4491 _argvec[1] = (unsigned long)(arg1); \
4492 _argvec[2] = (unsigned long)(arg2); \
4493 _argvec[3] = (unsigned long)(arg3); \
4494 _argvec[4] = (unsigned long)(arg4); \
4495 _argvec[5] = (unsigned long)(arg5); \
4496 _argvec[6] = (unsigned long)(arg6); \
4497 _argvec[7] = (unsigned long)(arg7); \
4498 __asm__ volatile( \
4499 VALGRIND_ALIGN_STACK \
4500 "ldr x0, [%1, #8] \n\t" \
4501 "ldr x1, [%1, #16] \n\t" \
4502 "ldr x2, [%1, #24] \n\t" \
4503 "ldr x3, [%1, #32] \n\t" \
4504 "ldr x4, [%1, #40] \n\t" \
4505 "ldr x5, [%1, #48] \n\t" \
4506 "ldr x6, [%1, #56] \n\t" \
4507 "ldr x8, [%1] \n\t" \
4508 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4509 VALGRIND_RESTORE_STACK \
4510 "mov %0, x0" \
4511 : "=r" (_res) \
4512 : "0" (&_argvec[0]) \
4513 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4514 ); \
4515 lval = (__typeof__(lval)) _res; \
4516 } while (0)
4517
4518 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4519 arg7,arg8) \
4520 do { \
4521 volatile OrigFn _orig = (orig); \
4522 volatile unsigned long _argvec[9]; \
4523 volatile unsigned long _res; \
4524 _argvec[0] = (unsigned long)_orig.nraddr; \
4525 _argvec[1] = (unsigned long)(arg1); \
4526 _argvec[2] = (unsigned long)(arg2); \
4527 _argvec[3] = (unsigned long)(arg3); \
4528 _argvec[4] = (unsigned long)(arg4); \
4529 _argvec[5] = (unsigned long)(arg5); \
4530 _argvec[6] = (unsigned long)(arg6); \
4531 _argvec[7] = (unsigned long)(arg7); \
4532 _argvec[8] = (unsigned long)(arg8); \
4533 __asm__ volatile( \
4534 VALGRIND_ALIGN_STACK \
4535 "ldr x0, [%1, #8] \n\t" \
4536 "ldr x1, [%1, #16] \n\t" \
4537 "ldr x2, [%1, #24] \n\t" \
4538 "ldr x3, [%1, #32] \n\t" \
4539 "ldr x4, [%1, #40] \n\t" \
4540 "ldr x5, [%1, #48] \n\t" \
4541 "ldr x6, [%1, #56] \n\t" \
4542 "ldr x7, [%1, #64] \n\t" \
4543 "ldr x8, [%1] \n\t" \
4544 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4545 VALGRIND_RESTORE_STACK \
4546 "mov %0, x0" \
4547 : "=r" (_res) \
4548 : "0" (&_argvec[0]) \
4549 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4550 ); \
4551 lval = (__typeof__(lval)) _res; \
4552 } while (0)
4553
4554 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4555 arg7,arg8,arg9) \
4556 do { \
4557 volatile OrigFn _orig = (orig); \
4558 volatile unsigned long _argvec[10]; \
4559 volatile unsigned long _res; \
4560 _argvec[0] = (unsigned long)_orig.nraddr; \
4561 _argvec[1] = (unsigned long)(arg1); \
4562 _argvec[2] = (unsigned long)(arg2); \
4563 _argvec[3] = (unsigned long)(arg3); \
4564 _argvec[4] = (unsigned long)(arg4); \
4565 _argvec[5] = (unsigned long)(arg5); \
4566 _argvec[6] = (unsigned long)(arg6); \
4567 _argvec[7] = (unsigned long)(arg7); \
4568 _argvec[8] = (unsigned long)(arg8); \
4569 _argvec[9] = (unsigned long)(arg9); \
4570 __asm__ volatile( \
4571 VALGRIND_ALIGN_STACK \
4572 "sub sp, sp, #0x20 \n\t" \
4573 "ldr x0, [%1, #8] \n\t" \
4574 "ldr x1, [%1, #16] \n\t" \
4575 "ldr x2, [%1, #24] \n\t" \
4576 "ldr x3, [%1, #32] \n\t" \
4577 "ldr x4, [%1, #40] \n\t" \
4578 "ldr x5, [%1, #48] \n\t" \
4579 "ldr x6, [%1, #56] \n\t" \
4580 "ldr x7, [%1, #64] \n\t" \
4581 "ldr x8, [%1, #72] \n\t" \
4582 "str x8, [sp, #0] \n\t" \
4583 "ldr x8, [%1] \n\t" \
4584 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4585 VALGRIND_RESTORE_STACK \
4586 "mov %0, x0" \
4587 : "=r" (_res) \
4588 : "0" (&_argvec[0]) \
4589 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4590 ); \
4591 lval = (__typeof__(lval)) _res; \
4592 } while (0)
4593
4594 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4595 arg7,arg8,arg9,arg10) \
4596 do { \
4597 volatile OrigFn _orig = (orig); \
4598 volatile unsigned long _argvec[11]; \
4599 volatile unsigned long _res; \
4600 _argvec[0] = (unsigned long)_orig.nraddr; \
4601 _argvec[1] = (unsigned long)(arg1); \
4602 _argvec[2] = (unsigned long)(arg2); \
4603 _argvec[3] = (unsigned long)(arg3); \
4604 _argvec[4] = (unsigned long)(arg4); \
4605 _argvec[5] = (unsigned long)(arg5); \
4606 _argvec[6] = (unsigned long)(arg6); \
4607 _argvec[7] = (unsigned long)(arg7); \
4608 _argvec[8] = (unsigned long)(arg8); \
4609 _argvec[9] = (unsigned long)(arg9); \
4610 _argvec[10] = (unsigned long)(arg10); \
4611 __asm__ volatile( \
4612 VALGRIND_ALIGN_STACK \
4613 "sub sp, sp, #0x20 \n\t" \
4614 "ldr x0, [%1, #8] \n\t" \
4615 "ldr x1, [%1, #16] \n\t" \
4616 "ldr x2, [%1, #24] \n\t" \
4617 "ldr x3, [%1, #32] \n\t" \
4618 "ldr x4, [%1, #40] \n\t" \
4619 "ldr x5, [%1, #48] \n\t" \
4620 "ldr x6, [%1, #56] \n\t" \
4621 "ldr x7, [%1, #64] \n\t" \
4622 "ldr x8, [%1, #72] \n\t" \
4623 "str x8, [sp, #0] \n\t" \
4624 "ldr x8, [%1, #80] \n\t" \
4625 "str x8, [sp, #8] \n\t" \
4626 "ldr x8, [%1] \n\t" \
4627 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4628 VALGRIND_RESTORE_STACK \
4629 "mov %0, x0" \
4630 : "=r" (_res) \
4631 : "0" (&_argvec[0]) \
4632 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4633 ); \
4634 lval = (__typeof__(lval)) _res; \
4635 } while (0)
4636
4637 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4638 arg7,arg8,arg9,arg10,arg11) \
4639 do { \
4640 volatile OrigFn _orig = (orig); \
4641 volatile unsigned long _argvec[12]; \
4642 volatile unsigned long _res; \
4643 _argvec[0] = (unsigned long)_orig.nraddr; \
4644 _argvec[1] = (unsigned long)(arg1); \
4645 _argvec[2] = (unsigned long)(arg2); \
4646 _argvec[3] = (unsigned long)(arg3); \
4647 _argvec[4] = (unsigned long)(arg4); \
4648 _argvec[5] = (unsigned long)(arg5); \
4649 _argvec[6] = (unsigned long)(arg6); \
4650 _argvec[7] = (unsigned long)(arg7); \
4651 _argvec[8] = (unsigned long)(arg8); \
4652 _argvec[9] = (unsigned long)(arg9); \
4653 _argvec[10] = (unsigned long)(arg10); \
4654 _argvec[11] = (unsigned long)(arg11); \
4655 __asm__ volatile( \
4656 VALGRIND_ALIGN_STACK \
4657 "sub sp, sp, #0x30 \n\t" \
4658 "ldr x0, [%1, #8] \n\t" \
4659 "ldr x1, [%1, #16] \n\t" \
4660 "ldr x2, [%1, #24] \n\t" \
4661 "ldr x3, [%1, #32] \n\t" \
4662 "ldr x4, [%1, #40] \n\t" \
4663 "ldr x5, [%1, #48] \n\t" \
4664 "ldr x6, [%1, #56] \n\t" \
4665 "ldr x7, [%1, #64] \n\t" \
4666 "ldr x8, [%1, #72] \n\t" \
4667 "str x8, [sp, #0] \n\t" \
4668 "ldr x8, [%1, #80] \n\t" \
4669 "str x8, [sp, #8] \n\t" \
4670 "ldr x8, [%1, #88] \n\t" \
4671 "str x8, [sp, #16] \n\t" \
4672 "ldr x8, [%1] \n\t" \
4673 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4674 VALGRIND_RESTORE_STACK \
4675 "mov %0, x0" \
4676 : "=r" (_res) \
4677 : "0" (&_argvec[0]) \
4678 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4679 ); \
4680 lval = (__typeof__(lval)) _res; \
4681 } while (0)
4682
4683 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4684 arg7,arg8,arg9,arg10,arg11, \
4685 arg12) \
4686 do { \
4687 volatile OrigFn _orig = (orig); \
4688 volatile unsigned long _argvec[13]; \
4689 volatile unsigned long _res; \
4690 _argvec[0] = (unsigned long)_orig.nraddr; \
4691 _argvec[1] = (unsigned long)(arg1); \
4692 _argvec[2] = (unsigned long)(arg2); \
4693 _argvec[3] = (unsigned long)(arg3); \
4694 _argvec[4] = (unsigned long)(arg4); \
4695 _argvec[5] = (unsigned long)(arg5); \
4696 _argvec[6] = (unsigned long)(arg6); \
4697 _argvec[7] = (unsigned long)(arg7); \
4698 _argvec[8] = (unsigned long)(arg8); \
4699 _argvec[9] = (unsigned long)(arg9); \
4700 _argvec[10] = (unsigned long)(arg10); \
4701 _argvec[11] = (unsigned long)(arg11); \
4702 _argvec[12] = (unsigned long)(arg12); \
4703 __asm__ volatile( \
4704 VALGRIND_ALIGN_STACK \
4705 "sub sp, sp, #0x30 \n\t" \
4706 "ldr x0, [%1, #8] \n\t" \
4707 "ldr x1, [%1, #16] \n\t" \
4708 "ldr x2, [%1, #24] \n\t" \
4709 "ldr x3, [%1, #32] \n\t" \
4710 "ldr x4, [%1, #40] \n\t" \
4711 "ldr x5, [%1, #48] \n\t" \
4712 "ldr x6, [%1, #56] \n\t" \
4713 "ldr x7, [%1, #64] \n\t" \
4714 "ldr x8, [%1, #72] \n\t" \
4715 "str x8, [sp, #0] \n\t" \
4716 "ldr x8, [%1, #80] \n\t" \
4717 "str x8, [sp, #8] \n\t" \
4718 "ldr x8, [%1, #88] \n\t" \
4719 "str x8, [sp, #16] \n\t" \
4720 "ldr x8, [%1, #96] \n\t" \
4721 "str x8, [sp, #24] \n\t" \
4722 "ldr x8, [%1] \n\t" \
4723 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4724 VALGRIND_RESTORE_STACK \
4725 "mov %0, x0" \
4726 : "=r" (_res) \
4727 : "0" (&_argvec[0]) \
4728 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4729 ); \
4730 lval = (__typeof__(lval)) _res; \
4731 } while (0)
4732
4733 #endif
4734
4735
4736
4737 #if defined(PLAT_s390x_linux)
4738
4739
4740
4741
4742
4743 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4744 # define __FRAME_POINTER \
4745 ,"d"(__builtin_dwarf_cfa())
4746 # define VALGRIND_CFI_PROLOGUE \
4747 ".cfi_remember_state\n\t" \
4748 "lgr 1,%1\n\t" \
4749 "lgr 7,11\n\t" \
4750 "lgr 11,%2\n\t" \
4751 ".cfi_def_cfa r11, 0\n\t"
4752 # define VALGRIND_CFI_EPILOGUE \
4753 "lgr 11, 7\n\t" \
4754 ".cfi_restore_state\n\t"
4755 #else
4756 # define __FRAME_POINTER
4757 # define VALGRIND_CFI_PROLOGUE \
4758 "lgr 1,%1\n\t"
4759 # define VALGRIND_CFI_EPILOGUE
4760 #endif
4761
4762
4763
4764
4765
4766
4767
4768
4769
4770
4771 #if defined(__VX__) || defined(__S390_VX__)
4772 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4773 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4774 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4775 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4776 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4777 #else
4778 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4779 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4780 #endif
4781
4782
4783
4784
4785
4786
4787
4788
4789
4790
4791 #define CALL_FN_W_v(lval, orig) \
4792 do { \
4793 volatile OrigFn _orig = (orig); \
4794 volatile unsigned long _argvec[1]; \
4795 volatile unsigned long _res; \
4796 _argvec[0] = (unsigned long)_orig.nraddr; \
4797 __asm__ volatile( \
4798 VALGRIND_CFI_PROLOGUE \
4799 "aghi 15,-160\n\t" \
4800 "lg 1, 0(1)\n\t" \
4801 VALGRIND_CALL_NOREDIR_R1 \
4802 "aghi 15,160\n\t" \
4803 VALGRIND_CFI_EPILOGUE \
4804 "lgr %0, 2\n\t" \
4805 : "=d" (_res) \
4806 : "d" (&_argvec[0]) __FRAME_POINTER \
4807 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4808 ); \
4809 lval = (__typeof__(lval)) _res; \
4810 } while (0)
4811
4812
4813 #define CALL_FN_W_W(lval, orig, arg1) \
4814 do { \
4815 volatile OrigFn _orig = (orig); \
4816 volatile unsigned long _argvec[2]; \
4817 volatile unsigned long _res; \
4818 _argvec[0] = (unsigned long)_orig.nraddr; \
4819 _argvec[1] = (unsigned long)arg1; \
4820 __asm__ volatile( \
4821 VALGRIND_CFI_PROLOGUE \
4822 "aghi 15,-160\n\t" \
4823 "lg 2, 8(1)\n\t" \
4824 "lg 1, 0(1)\n\t" \
4825 VALGRIND_CALL_NOREDIR_R1 \
4826 "aghi 15,160\n\t" \
4827 VALGRIND_CFI_EPILOGUE \
4828 "lgr %0, 2\n\t" \
4829 : "=d" (_res) \
4830 : "a" (&_argvec[0]) __FRAME_POINTER \
4831 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4832 ); \
4833 lval = (__typeof__(lval)) _res; \
4834 } while (0)
4835
4836 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4837 do { \
4838 volatile OrigFn _orig = (orig); \
4839 volatile unsigned long _argvec[3]; \
4840 volatile unsigned long _res; \
4841 _argvec[0] = (unsigned long)_orig.nraddr; \
4842 _argvec[1] = (unsigned long)arg1; \
4843 _argvec[2] = (unsigned long)arg2; \
4844 __asm__ volatile( \
4845 VALGRIND_CFI_PROLOGUE \
4846 "aghi 15,-160\n\t" \
4847 "lg 2, 8(1)\n\t" \
4848 "lg 3,16(1)\n\t" \
4849 "lg 1, 0(1)\n\t" \
4850 VALGRIND_CALL_NOREDIR_R1 \
4851 "aghi 15,160\n\t" \
4852 VALGRIND_CFI_EPILOGUE \
4853 "lgr %0, 2\n\t" \
4854 : "=d" (_res) \
4855 : "a" (&_argvec[0]) __FRAME_POINTER \
4856 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4857 ); \
4858 lval = (__typeof__(lval)) _res; \
4859 } while (0)
4860
4861 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4862 do { \
4863 volatile OrigFn _orig = (orig); \
4864 volatile unsigned long _argvec[4]; \
4865 volatile unsigned long _res; \
4866 _argvec[0] = (unsigned long)_orig.nraddr; \
4867 _argvec[1] = (unsigned long)arg1; \
4868 _argvec[2] = (unsigned long)arg2; \
4869 _argvec[3] = (unsigned long)arg3; \
4870 __asm__ volatile( \
4871 VALGRIND_CFI_PROLOGUE \
4872 "aghi 15,-160\n\t" \
4873 "lg 2, 8(1)\n\t" \
4874 "lg 3,16(1)\n\t" \
4875 "lg 4,24(1)\n\t" \
4876 "lg 1, 0(1)\n\t" \
4877 VALGRIND_CALL_NOREDIR_R1 \
4878 "aghi 15,160\n\t" \
4879 VALGRIND_CFI_EPILOGUE \
4880 "lgr %0, 2\n\t" \
4881 : "=d" (_res) \
4882 : "a" (&_argvec[0]) __FRAME_POINTER \
4883 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4884 ); \
4885 lval = (__typeof__(lval)) _res; \
4886 } while (0)
4887
4888 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4889 do { \
4890 volatile OrigFn _orig = (orig); \
4891 volatile unsigned long _argvec[5]; \
4892 volatile unsigned long _res; \
4893 _argvec[0] = (unsigned long)_orig.nraddr; \
4894 _argvec[1] = (unsigned long)arg1; \
4895 _argvec[2] = (unsigned long)arg2; \
4896 _argvec[3] = (unsigned long)arg3; \
4897 _argvec[4] = (unsigned long)arg4; \
4898 __asm__ volatile( \
4899 VALGRIND_CFI_PROLOGUE \
4900 "aghi 15,-160\n\t" \
4901 "lg 2, 8(1)\n\t" \
4902 "lg 3,16(1)\n\t" \
4903 "lg 4,24(1)\n\t" \
4904 "lg 5,32(1)\n\t" \
4905 "lg 1, 0(1)\n\t" \
4906 VALGRIND_CALL_NOREDIR_R1 \
4907 "aghi 15,160\n\t" \
4908 VALGRIND_CFI_EPILOGUE \
4909 "lgr %0, 2\n\t" \
4910 : "=d" (_res) \
4911 : "a" (&_argvec[0]) __FRAME_POINTER \
4912 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4913 ); \
4914 lval = (__typeof__(lval)) _res; \
4915 } while (0)
4916
4917 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4918 do { \
4919 volatile OrigFn _orig = (orig); \
4920 volatile unsigned long _argvec[6]; \
4921 volatile unsigned long _res; \
4922 _argvec[0] = (unsigned long)_orig.nraddr; \
4923 _argvec[1] = (unsigned long)arg1; \
4924 _argvec[2] = (unsigned long)arg2; \
4925 _argvec[3] = (unsigned long)arg3; \
4926 _argvec[4] = (unsigned long)arg4; \
4927 _argvec[5] = (unsigned long)arg5; \
4928 __asm__ volatile( \
4929 VALGRIND_CFI_PROLOGUE \
4930 "aghi 15,-160\n\t" \
4931 "lg 2, 8(1)\n\t" \
4932 "lg 3,16(1)\n\t" \
4933 "lg 4,24(1)\n\t" \
4934 "lg 5,32(1)\n\t" \
4935 "lg 6,40(1)\n\t" \
4936 "lg 1, 0(1)\n\t" \
4937 VALGRIND_CALL_NOREDIR_R1 \
4938 "aghi 15,160\n\t" \
4939 VALGRIND_CFI_EPILOGUE \
4940 "lgr %0, 2\n\t" \
4941 : "=d" (_res) \
4942 : "a" (&_argvec[0]) __FRAME_POINTER \
4943 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4944 ); \
4945 lval = (__typeof__(lval)) _res; \
4946 } while (0)
4947
4948 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4949 arg6) \
4950 do { \
4951 volatile OrigFn _orig = (orig); \
4952 volatile unsigned long _argvec[7]; \
4953 volatile unsigned long _res; \
4954 _argvec[0] = (unsigned long)_orig.nraddr; \
4955 _argvec[1] = (unsigned long)arg1; \
4956 _argvec[2] = (unsigned long)arg2; \
4957 _argvec[3] = (unsigned long)arg3; \
4958 _argvec[4] = (unsigned long)arg4; \
4959 _argvec[5] = (unsigned long)arg5; \
4960 _argvec[6] = (unsigned long)arg6; \
4961 __asm__ volatile( \
4962 VALGRIND_CFI_PROLOGUE \
4963 "aghi 15,-168\n\t" \
4964 "lg 2, 8(1)\n\t" \
4965 "lg 3,16(1)\n\t" \
4966 "lg 4,24(1)\n\t" \
4967 "lg 5,32(1)\n\t" \
4968 "lg 6,40(1)\n\t" \
4969 "mvc 160(8,15), 48(1)\n\t" \
4970 "lg 1, 0(1)\n\t" \
4971 VALGRIND_CALL_NOREDIR_R1 \
4972 "aghi 15,168\n\t" \
4973 VALGRIND_CFI_EPILOGUE \
4974 "lgr %0, 2\n\t" \
4975 : "=d" (_res) \
4976 : "a" (&_argvec[0]) __FRAME_POINTER \
4977 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4978 ); \
4979 lval = (__typeof__(lval)) _res; \
4980 } while (0)
4981
4982 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4983 arg6, arg7) \
4984 do { \
4985 volatile OrigFn _orig = (orig); \
4986 volatile unsigned long _argvec[8]; \
4987 volatile unsigned long _res; \
4988 _argvec[0] = (unsigned long)_orig.nraddr; \
4989 _argvec[1] = (unsigned long)arg1; \
4990 _argvec[2] = (unsigned long)arg2; \
4991 _argvec[3] = (unsigned long)arg3; \
4992 _argvec[4] = (unsigned long)arg4; \
4993 _argvec[5] = (unsigned long)arg5; \
4994 _argvec[6] = (unsigned long)arg6; \
4995 _argvec[7] = (unsigned long)arg7; \
4996 __asm__ volatile( \
4997 VALGRIND_CFI_PROLOGUE \
4998 "aghi 15,-176\n\t" \
4999 "lg 2, 8(1)\n\t" \
5000 "lg 3,16(1)\n\t" \
5001 "lg 4,24(1)\n\t" \
5002 "lg 5,32(1)\n\t" \
5003 "lg 6,40(1)\n\t" \
5004 "mvc 160(8,15), 48(1)\n\t" \
5005 "mvc 168(8,15), 56(1)\n\t" \
5006 "lg 1, 0(1)\n\t" \
5007 VALGRIND_CALL_NOREDIR_R1 \
5008 "aghi 15,176\n\t" \
5009 VALGRIND_CFI_EPILOGUE \
5010 "lgr %0, 2\n\t" \
5011 : "=d" (_res) \
5012 : "a" (&_argvec[0]) __FRAME_POINTER \
5013 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5014 ); \
5015 lval = (__typeof__(lval)) _res; \
5016 } while (0)
5017
5018 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5019 arg6, arg7 ,arg8) \
5020 do { \
5021 volatile OrigFn _orig = (orig); \
5022 volatile unsigned long _argvec[9]; \
5023 volatile unsigned long _res; \
5024 _argvec[0] = (unsigned long)_orig.nraddr; \
5025 _argvec[1] = (unsigned long)arg1; \
5026 _argvec[2] = (unsigned long)arg2; \
5027 _argvec[3] = (unsigned long)arg3; \
5028 _argvec[4] = (unsigned long)arg4; \
5029 _argvec[5] = (unsigned long)arg5; \
5030 _argvec[6] = (unsigned long)arg6; \
5031 _argvec[7] = (unsigned long)arg7; \
5032 _argvec[8] = (unsigned long)arg8; \
5033 __asm__ volatile( \
5034 VALGRIND_CFI_PROLOGUE \
5035 "aghi 15,-184\n\t" \
5036 "lg 2, 8(1)\n\t" \
5037 "lg 3,16(1)\n\t" \
5038 "lg 4,24(1)\n\t" \
5039 "lg 5,32(1)\n\t" \
5040 "lg 6,40(1)\n\t" \
5041 "mvc 160(8,15), 48(1)\n\t" \
5042 "mvc 168(8,15), 56(1)\n\t" \
5043 "mvc 176(8,15), 64(1)\n\t" \
5044 "lg 1, 0(1)\n\t" \
5045 VALGRIND_CALL_NOREDIR_R1 \
5046 "aghi 15,184\n\t" \
5047 VALGRIND_CFI_EPILOGUE \
5048 "lgr %0, 2\n\t" \
5049 : "=d" (_res) \
5050 : "a" (&_argvec[0]) __FRAME_POINTER \
5051 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5052 ); \
5053 lval = (__typeof__(lval)) _res; \
5054 } while (0)
5055
5056 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5057 arg6, arg7 ,arg8, arg9) \
5058 do { \
5059 volatile OrigFn _orig = (orig); \
5060 volatile unsigned long _argvec[10]; \
5061 volatile unsigned long _res; \
5062 _argvec[0] = (unsigned long)_orig.nraddr; \
5063 _argvec[1] = (unsigned long)arg1; \
5064 _argvec[2] = (unsigned long)arg2; \
5065 _argvec[3] = (unsigned long)arg3; \
5066 _argvec[4] = (unsigned long)arg4; \
5067 _argvec[5] = (unsigned long)arg5; \
5068 _argvec[6] = (unsigned long)arg6; \
5069 _argvec[7] = (unsigned long)arg7; \
5070 _argvec[8] = (unsigned long)arg8; \
5071 _argvec[9] = (unsigned long)arg9; \
5072 __asm__ volatile( \
5073 VALGRIND_CFI_PROLOGUE \
5074 "aghi 15,-192\n\t" \
5075 "lg 2, 8(1)\n\t" \
5076 "lg 3,16(1)\n\t" \
5077 "lg 4,24(1)\n\t" \
5078 "lg 5,32(1)\n\t" \
5079 "lg 6,40(1)\n\t" \
5080 "mvc 160(8,15), 48(1)\n\t" \
5081 "mvc 168(8,15), 56(1)\n\t" \
5082 "mvc 176(8,15), 64(1)\n\t" \
5083 "mvc 184(8,15), 72(1)\n\t" \
5084 "lg 1, 0(1)\n\t" \
5085 VALGRIND_CALL_NOREDIR_R1 \
5086 "aghi 15,192\n\t" \
5087 VALGRIND_CFI_EPILOGUE \
5088 "lgr %0, 2\n\t" \
5089 : "=d" (_res) \
5090 : "a" (&_argvec[0]) __FRAME_POINTER \
5091 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5092 ); \
5093 lval = (__typeof__(lval)) _res; \
5094 } while (0)
5095
5096 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5097 arg6, arg7 ,arg8, arg9, arg10) \
5098 do { \
5099 volatile OrigFn _orig = (orig); \
5100 volatile unsigned long _argvec[11]; \
5101 volatile unsigned long _res; \
5102 _argvec[0] = (unsigned long)_orig.nraddr; \
5103 _argvec[1] = (unsigned long)arg1; \
5104 _argvec[2] = (unsigned long)arg2; \
5105 _argvec[3] = (unsigned long)arg3; \
5106 _argvec[4] = (unsigned long)arg4; \
5107 _argvec[5] = (unsigned long)arg5; \
5108 _argvec[6] = (unsigned long)arg6; \
5109 _argvec[7] = (unsigned long)arg7; \
5110 _argvec[8] = (unsigned long)arg8; \
5111 _argvec[9] = (unsigned long)arg9; \
5112 _argvec[10] = (unsigned long)arg10; \
5113 __asm__ volatile( \
5114 VALGRIND_CFI_PROLOGUE \
5115 "aghi 15,-200\n\t" \
5116 "lg 2, 8(1)\n\t" \
5117 "lg 3,16(1)\n\t" \
5118 "lg 4,24(1)\n\t" \
5119 "lg 5,32(1)\n\t" \
5120 "lg 6,40(1)\n\t" \
5121 "mvc 160(8,15), 48(1)\n\t" \
5122 "mvc 168(8,15), 56(1)\n\t" \
5123 "mvc 176(8,15), 64(1)\n\t" \
5124 "mvc 184(8,15), 72(1)\n\t" \
5125 "mvc 192(8,15), 80(1)\n\t" \
5126 "lg 1, 0(1)\n\t" \
5127 VALGRIND_CALL_NOREDIR_R1 \
5128 "aghi 15,200\n\t" \
5129 VALGRIND_CFI_EPILOGUE \
5130 "lgr %0, 2\n\t" \
5131 : "=d" (_res) \
5132 : "a" (&_argvec[0]) __FRAME_POINTER \
5133 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5134 ); \
5135 lval = (__typeof__(lval)) _res; \
5136 } while (0)
5137
5138 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5139 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5140 do { \
5141 volatile OrigFn _orig = (orig); \
5142 volatile unsigned long _argvec[12]; \
5143 volatile unsigned long _res; \
5144 _argvec[0] = (unsigned long)_orig.nraddr; \
5145 _argvec[1] = (unsigned long)arg1; \
5146 _argvec[2] = (unsigned long)arg2; \
5147 _argvec[3] = (unsigned long)arg3; \
5148 _argvec[4] = (unsigned long)arg4; \
5149 _argvec[5] = (unsigned long)arg5; \
5150 _argvec[6] = (unsigned long)arg6; \
5151 _argvec[7] = (unsigned long)arg7; \
5152 _argvec[8] = (unsigned long)arg8; \
5153 _argvec[9] = (unsigned long)arg9; \
5154 _argvec[10] = (unsigned long)arg10; \
5155 _argvec[11] = (unsigned long)arg11; \
5156 __asm__ volatile( \
5157 VALGRIND_CFI_PROLOGUE \
5158 "aghi 15,-208\n\t" \
5159 "lg 2, 8(1)\n\t" \
5160 "lg 3,16(1)\n\t" \
5161 "lg 4,24(1)\n\t" \
5162 "lg 5,32(1)\n\t" \
5163 "lg 6,40(1)\n\t" \
5164 "mvc 160(8,15), 48(1)\n\t" \
5165 "mvc 168(8,15), 56(1)\n\t" \
5166 "mvc 176(8,15), 64(1)\n\t" \
5167 "mvc 184(8,15), 72(1)\n\t" \
5168 "mvc 192(8,15), 80(1)\n\t" \
5169 "mvc 200(8,15), 88(1)\n\t" \
5170 "lg 1, 0(1)\n\t" \
5171 VALGRIND_CALL_NOREDIR_R1 \
5172 "aghi 15,208\n\t" \
5173 VALGRIND_CFI_EPILOGUE \
5174 "lgr %0, 2\n\t" \
5175 : "=d" (_res) \
5176 : "a" (&_argvec[0]) __FRAME_POINTER \
5177 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5178 ); \
5179 lval = (__typeof__(lval)) _res; \
5180 } while (0)
5181
5182 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5183 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5184 do { \
5185 volatile OrigFn _orig = (orig); \
5186 volatile unsigned long _argvec[13]; \
5187 volatile unsigned long _res; \
5188 _argvec[0] = (unsigned long)_orig.nraddr; \
5189 _argvec[1] = (unsigned long)arg1; \
5190 _argvec[2] = (unsigned long)arg2; \
5191 _argvec[3] = (unsigned long)arg3; \
5192 _argvec[4] = (unsigned long)arg4; \
5193 _argvec[5] = (unsigned long)arg5; \
5194 _argvec[6] = (unsigned long)arg6; \
5195 _argvec[7] = (unsigned long)arg7; \
5196 _argvec[8] = (unsigned long)arg8; \
5197 _argvec[9] = (unsigned long)arg9; \
5198 _argvec[10] = (unsigned long)arg10; \
5199 _argvec[11] = (unsigned long)arg11; \
5200 _argvec[12] = (unsigned long)arg12; \
5201 __asm__ volatile( \
5202 VALGRIND_CFI_PROLOGUE \
5203 "aghi 15,-216\n\t" \
5204 "lg 2, 8(1)\n\t" \
5205 "lg 3,16(1)\n\t" \
5206 "lg 4,24(1)\n\t" \
5207 "lg 5,32(1)\n\t" \
5208 "lg 6,40(1)\n\t" \
5209 "mvc 160(8,15), 48(1)\n\t" \
5210 "mvc 168(8,15), 56(1)\n\t" \
5211 "mvc 176(8,15), 64(1)\n\t" \
5212 "mvc 184(8,15), 72(1)\n\t" \
5213 "mvc 192(8,15), 80(1)\n\t" \
5214 "mvc 200(8,15), 88(1)\n\t" \
5215 "mvc 208(8,15), 96(1)\n\t" \
5216 "lg 1, 0(1)\n\t" \
5217 VALGRIND_CALL_NOREDIR_R1 \
5218 "aghi 15,216\n\t" \
5219 VALGRIND_CFI_EPILOGUE \
5220 "lgr %0, 2\n\t" \
5221 : "=d" (_res) \
5222 : "a" (&_argvec[0]) __FRAME_POINTER \
5223 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5224 ); \
5225 lval = (__typeof__(lval)) _res; \
5226 } while (0)
5227
5228
5229 #endif
5230
5231
5232
5233 #if defined(PLAT_mips32_linux)
5234
5235
5236 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5237 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5238 "$25", "$31"
5239
5240
5241
5242
5243 #define CALL_FN_W_v(lval, orig) \
5244 do { \
5245 volatile OrigFn _orig = (orig); \
5246 volatile unsigned long _argvec[1]; \
5247 volatile unsigned long _res; \
5248 _argvec[0] = (unsigned long)_orig.nraddr; \
5249 __asm__ volatile( \
5250 "subu $29, $29, 8 \n\t" \
5251 "sw $28, 0($29) \n\t" \
5252 "sw $31, 4($29) \n\t" \
5253 "subu $29, $29, 16 \n\t" \
5254 "lw $25, 0(%1) \n\t" \
5255 VALGRIND_CALL_NOREDIR_T9 \
5256 "addu $29, $29, 16\n\t" \
5257 "lw $28, 0($29) \n\t" \
5258 "lw $31, 4($29) \n\t" \
5259 "addu $29, $29, 8 \n\t" \
5260 "move %0, $2\n" \
5261 : "=r" (_res) \
5262 : "0" (&_argvec[0]) \
5263 : "memory", __CALLER_SAVED_REGS \
5264 ); \
5265 lval = (__typeof__(lval)) _res; \
5266 } while (0)
5267
5268 #define CALL_FN_W_W(lval, orig, arg1) \
5269 do { \
5270 volatile OrigFn _orig = (orig); \
5271 volatile unsigned long _argvec[2]; \
5272 volatile unsigned long _res; \
5273 _argvec[0] = (unsigned long)_orig.nraddr; \
5274 _argvec[1] = (unsigned long)(arg1); \
5275 __asm__ volatile( \
5276 "subu $29, $29, 8 \n\t" \
5277 "sw $28, 0($29) \n\t" \
5278 "sw $31, 4($29) \n\t" \
5279 "subu $29, $29, 16 \n\t" \
5280 "lw $4, 4(%1) \n\t" \
5281 "lw $25, 0(%1) \n\t" \
5282 VALGRIND_CALL_NOREDIR_T9 \
5283 "addu $29, $29, 16 \n\t" \
5284 "lw $28, 0($29) \n\t" \
5285 "lw $31, 4($29) \n\t" \
5286 "addu $29, $29, 8 \n\t" \
5287 "move %0, $2\n" \
5288 : "=r" (_res) \
5289 : "0" (&_argvec[0]) \
5290 : "memory", __CALLER_SAVED_REGS \
5291 ); \
5292 lval = (__typeof__(lval)) _res; \
5293 } while (0)
5294
5295 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5296 do { \
5297 volatile OrigFn _orig = (orig); \
5298 volatile unsigned long _argvec[3]; \
5299 volatile unsigned long _res; \
5300 _argvec[0] = (unsigned long)_orig.nraddr; \
5301 _argvec[1] = (unsigned long)(arg1); \
5302 _argvec[2] = (unsigned long)(arg2); \
5303 __asm__ volatile( \
5304 "subu $29, $29, 8 \n\t" \
5305 "sw $28, 0($29) \n\t" \
5306 "sw $31, 4($29) \n\t" \
5307 "subu $29, $29, 16 \n\t" \
5308 "lw $4, 4(%1) \n\t" \
5309 "lw $5, 8(%1) \n\t" \
5310 "lw $25, 0(%1) \n\t" \
5311 VALGRIND_CALL_NOREDIR_T9 \
5312 "addu $29, $29, 16 \n\t" \
5313 "lw $28, 0($29) \n\t" \
5314 "lw $31, 4($29) \n\t" \
5315 "addu $29, $29, 8 \n\t" \
5316 "move %0, $2\n" \
5317 : "=r" (_res) \
5318 : "0" (&_argvec[0]) \
5319 : "memory", __CALLER_SAVED_REGS \
5320 ); \
5321 lval = (__typeof__(lval)) _res; \
5322 } while (0)
5323
5324 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5325 do { \
5326 volatile OrigFn _orig = (orig); \
5327 volatile unsigned long _argvec[4]; \
5328 volatile unsigned long _res; \
5329 _argvec[0] = (unsigned long)_orig.nraddr; \
5330 _argvec[1] = (unsigned long)(arg1); \
5331 _argvec[2] = (unsigned long)(arg2); \
5332 _argvec[3] = (unsigned long)(arg3); \
5333 __asm__ volatile( \
5334 "subu $29, $29, 8 \n\t" \
5335 "sw $28, 0($29) \n\t" \
5336 "sw $31, 4($29) \n\t" \
5337 "subu $29, $29, 16 \n\t" \
5338 "lw $4, 4(%1) \n\t" \
5339 "lw $5, 8(%1) \n\t" \
5340 "lw $6, 12(%1) \n\t" \
5341 "lw $25, 0(%1) \n\t" \
5342 VALGRIND_CALL_NOREDIR_T9 \
5343 "addu $29, $29, 16 \n\t" \
5344 "lw $28, 0($29) \n\t" \
5345 "lw $31, 4($29) \n\t" \
5346 "addu $29, $29, 8 \n\t" \
5347 "move %0, $2\n" \
5348 : "=r" (_res) \
5349 : "0" (&_argvec[0]) \
5350 : "memory", __CALLER_SAVED_REGS \
5351 ); \
5352 lval = (__typeof__(lval)) _res; \
5353 } while (0)
5354
5355 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5356 do { \
5357 volatile OrigFn _orig = (orig); \
5358 volatile unsigned long _argvec[5]; \
5359 volatile unsigned long _res; \
5360 _argvec[0] = (unsigned long)_orig.nraddr; \
5361 _argvec[1] = (unsigned long)(arg1); \
5362 _argvec[2] = (unsigned long)(arg2); \
5363 _argvec[3] = (unsigned long)(arg3); \
5364 _argvec[4] = (unsigned long)(arg4); \
5365 __asm__ volatile( \
5366 "subu $29, $29, 8 \n\t" \
5367 "sw $28, 0($29) \n\t" \
5368 "sw $31, 4($29) \n\t" \
5369 "subu $29, $29, 16 \n\t" \
5370 "lw $4, 4(%1) \n\t" \
5371 "lw $5, 8(%1) \n\t" \
5372 "lw $6, 12(%1) \n\t" \
5373 "lw $7, 16(%1) \n\t" \
5374 "lw $25, 0(%1) \n\t" \
5375 VALGRIND_CALL_NOREDIR_T9 \
5376 "addu $29, $29, 16 \n\t" \
5377 "lw $28, 0($29) \n\t" \
5378 "lw $31, 4($29) \n\t" \
5379 "addu $29, $29, 8 \n\t" \
5380 "move %0, $2\n" \
5381 : "=r" (_res) \
5382 : "0" (&_argvec[0]) \
5383 : "memory", __CALLER_SAVED_REGS \
5384 ); \
5385 lval = (__typeof__(lval)) _res; \
5386 } while (0)
5387
5388 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5389 do { \
5390 volatile OrigFn _orig = (orig); \
5391 volatile unsigned long _argvec[6]; \
5392 volatile unsigned long _res; \
5393 _argvec[0] = (unsigned long)_orig.nraddr; \
5394 _argvec[1] = (unsigned long)(arg1); \
5395 _argvec[2] = (unsigned long)(arg2); \
5396 _argvec[3] = (unsigned long)(arg3); \
5397 _argvec[4] = (unsigned long)(arg4); \
5398 _argvec[5] = (unsigned long)(arg5); \
5399 __asm__ volatile( \
5400 "subu $29, $29, 8 \n\t" \
5401 "sw $28, 0($29) \n\t" \
5402 "sw $31, 4($29) \n\t" \
5403 "lw $4, 20(%1) \n\t" \
5404 "subu $29, $29, 24\n\t" \
5405 "sw $4, 16($29) \n\t" \
5406 "lw $4, 4(%1) \n\t" \
5407 "lw $5, 8(%1) \n\t" \
5408 "lw $6, 12(%1) \n\t" \
5409 "lw $7, 16(%1) \n\t" \
5410 "lw $25, 0(%1) \n\t" \
5411 VALGRIND_CALL_NOREDIR_T9 \
5412 "addu $29, $29, 24 \n\t" \
5413 "lw $28, 0($29) \n\t" \
5414 "lw $31, 4($29) \n\t" \
5415 "addu $29, $29, 8 \n\t" \
5416 "move %0, $2\n" \
5417 : "=r" (_res) \
5418 : "0" (&_argvec[0]) \
5419 : "memory", __CALLER_SAVED_REGS \
5420 ); \
5421 lval = (__typeof__(lval)) _res; \
5422 } while (0)
5423 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5424 do { \
5425 volatile OrigFn _orig = (orig); \
5426 volatile unsigned long _argvec[7]; \
5427 volatile unsigned long _res; \
5428 _argvec[0] = (unsigned long)_orig.nraddr; \
5429 _argvec[1] = (unsigned long)(arg1); \
5430 _argvec[2] = (unsigned long)(arg2); \
5431 _argvec[3] = (unsigned long)(arg3); \
5432 _argvec[4] = (unsigned long)(arg4); \
5433 _argvec[5] = (unsigned long)(arg5); \
5434 _argvec[6] = (unsigned long)(arg6); \
5435 __asm__ volatile( \
5436 "subu $29, $29, 8 \n\t" \
5437 "sw $28, 0($29) \n\t" \
5438 "sw $31, 4($29) \n\t" \
5439 "lw $4, 20(%1) \n\t" \
5440 "subu $29, $29, 32\n\t" \
5441 "sw $4, 16($29) \n\t" \
5442 "lw $4, 24(%1) \n\t" \
5443 "nop\n\t" \
5444 "sw $4, 20($29) \n\t" \
5445 "lw $4, 4(%1) \n\t" \
5446 "lw $5, 8(%1) \n\t" \
5447 "lw $6, 12(%1) \n\t" \
5448 "lw $7, 16(%1) \n\t" \
5449 "lw $25, 0(%1) \n\t" \
5450 VALGRIND_CALL_NOREDIR_T9 \
5451 "addu $29, $29, 32 \n\t" \
5452 "lw $28, 0($29) \n\t" \
5453 "lw $31, 4($29) \n\t" \
5454 "addu $29, $29, 8 \n\t" \
5455 "move %0, $2\n" \
5456 : "=r" (_res) \
5457 : "0" (&_argvec[0]) \
5458 : "memory", __CALLER_SAVED_REGS \
5459 ); \
5460 lval = (__typeof__(lval)) _res; \
5461 } while (0)
5462
5463 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5464 arg7) \
5465 do { \
5466 volatile OrigFn _orig = (orig); \
5467 volatile unsigned long _argvec[8]; \
5468 volatile unsigned long _res; \
5469 _argvec[0] = (unsigned long)_orig.nraddr; \
5470 _argvec[1] = (unsigned long)(arg1); \
5471 _argvec[2] = (unsigned long)(arg2); \
5472 _argvec[3] = (unsigned long)(arg3); \
5473 _argvec[4] = (unsigned long)(arg4); \
5474 _argvec[5] = (unsigned long)(arg5); \
5475 _argvec[6] = (unsigned long)(arg6); \
5476 _argvec[7] = (unsigned long)(arg7); \
5477 __asm__ volatile( \
5478 "subu $29, $29, 8 \n\t" \
5479 "sw $28, 0($29) \n\t" \
5480 "sw $31, 4($29) \n\t" \
5481 "lw $4, 20(%1) \n\t" \
5482 "subu $29, $29, 32\n\t" \
5483 "sw $4, 16($29) \n\t" \
5484 "lw $4, 24(%1) \n\t" \
5485 "sw $4, 20($29) \n\t" \
5486 "lw $4, 28(%1) \n\t" \
5487 "sw $4, 24($29) \n\t" \
5488 "lw $4, 4(%1) \n\t" \
5489 "lw $5, 8(%1) \n\t" \
5490 "lw $6, 12(%1) \n\t" \
5491 "lw $7, 16(%1) \n\t" \
5492 "lw $25, 0(%1) \n\t" \
5493 VALGRIND_CALL_NOREDIR_T9 \
5494 "addu $29, $29, 32 \n\t" \
5495 "lw $28, 0($29) \n\t" \
5496 "lw $31, 4($29) \n\t" \
5497 "addu $29, $29, 8 \n\t" \
5498 "move %0, $2\n" \
5499 : "=r" (_res) \
5500 : "0" (&_argvec[0]) \
5501 : "memory", __CALLER_SAVED_REGS \
5502 ); \
5503 lval = (__typeof__(lval)) _res; \
5504 } while (0)
5505
5506 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5507 arg7,arg8) \
5508 do { \
5509 volatile OrigFn _orig = (orig); \
5510 volatile unsigned long _argvec[9]; \
5511 volatile unsigned long _res; \
5512 _argvec[0] = (unsigned long)_orig.nraddr; \
5513 _argvec[1] = (unsigned long)(arg1); \
5514 _argvec[2] = (unsigned long)(arg2); \
5515 _argvec[3] = (unsigned long)(arg3); \
5516 _argvec[4] = (unsigned long)(arg4); \
5517 _argvec[5] = (unsigned long)(arg5); \
5518 _argvec[6] = (unsigned long)(arg6); \
5519 _argvec[7] = (unsigned long)(arg7); \
5520 _argvec[8] = (unsigned long)(arg8); \
5521 __asm__ volatile( \
5522 "subu $29, $29, 8 \n\t" \
5523 "sw $28, 0($29) \n\t" \
5524 "sw $31, 4($29) \n\t" \
5525 "lw $4, 20(%1) \n\t" \
5526 "subu $29, $29, 40\n\t" \
5527 "sw $4, 16($29) \n\t" \
5528 "lw $4, 24(%1) \n\t" \
5529 "sw $4, 20($29) \n\t" \
5530 "lw $4, 28(%1) \n\t" \
5531 "sw $4, 24($29) \n\t" \
5532 "lw $4, 32(%1) \n\t" \
5533 "sw $4, 28($29) \n\t" \
5534 "lw $4, 4(%1) \n\t" \
5535 "lw $5, 8(%1) \n\t" \
5536 "lw $6, 12(%1) \n\t" \
5537 "lw $7, 16(%1) \n\t" \
5538 "lw $25, 0(%1) \n\t" \
5539 VALGRIND_CALL_NOREDIR_T9 \
5540 "addu $29, $29, 40 \n\t" \
5541 "lw $28, 0($29) \n\t" \
5542 "lw $31, 4($29) \n\t" \
5543 "addu $29, $29, 8 \n\t" \
5544 "move %0, $2\n" \
5545 : "=r" (_res) \
5546 : "0" (&_argvec[0]) \
5547 : "memory", __CALLER_SAVED_REGS \
5548 ); \
5549 lval = (__typeof__(lval)) _res; \
5550 } while (0)
5551
5552 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5553 arg7,arg8,arg9) \
5554 do { \
5555 volatile OrigFn _orig = (orig); \
5556 volatile unsigned long _argvec[10]; \
5557 volatile unsigned long _res; \
5558 _argvec[0] = (unsigned long)_orig.nraddr; \
5559 _argvec[1] = (unsigned long)(arg1); \
5560 _argvec[2] = (unsigned long)(arg2); \
5561 _argvec[3] = (unsigned long)(arg3); \
5562 _argvec[4] = (unsigned long)(arg4); \
5563 _argvec[5] = (unsigned long)(arg5); \
5564 _argvec[6] = (unsigned long)(arg6); \
5565 _argvec[7] = (unsigned long)(arg7); \
5566 _argvec[8] = (unsigned long)(arg8); \
5567 _argvec[9] = (unsigned long)(arg9); \
5568 __asm__ volatile( \
5569 "subu $29, $29, 8 \n\t" \
5570 "sw $28, 0($29) \n\t" \
5571 "sw $31, 4($29) \n\t" \
5572 "lw $4, 20(%1) \n\t" \
5573 "subu $29, $29, 40\n\t" \
5574 "sw $4, 16($29) \n\t" \
5575 "lw $4, 24(%1) \n\t" \
5576 "sw $4, 20($29) \n\t" \
5577 "lw $4, 28(%1) \n\t" \
5578 "sw $4, 24($29) \n\t" \
5579 "lw $4, 32(%1) \n\t" \
5580 "sw $4, 28($29) \n\t" \
5581 "lw $4, 36(%1) \n\t" \
5582 "sw $4, 32($29) \n\t" \
5583 "lw $4, 4(%1) \n\t" \
5584 "lw $5, 8(%1) \n\t" \
5585 "lw $6, 12(%1) \n\t" \
5586 "lw $7, 16(%1) \n\t" \
5587 "lw $25, 0(%1) \n\t" \
5588 VALGRIND_CALL_NOREDIR_T9 \
5589 "addu $29, $29, 40 \n\t" \
5590 "lw $28, 0($29) \n\t" \
5591 "lw $31, 4($29) \n\t" \
5592 "addu $29, $29, 8 \n\t" \
5593 "move %0, $2\n" \
5594 : "=r" (_res) \
5595 : "0" (&_argvec[0]) \
5596 : "memory", __CALLER_SAVED_REGS \
5597 ); \
5598 lval = (__typeof__(lval)) _res; \
5599 } while (0)
5600
5601 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5602 arg7,arg8,arg9,arg10) \
5603 do { \
5604 volatile OrigFn _orig = (orig); \
5605 volatile unsigned long _argvec[11]; \
5606 volatile unsigned long _res; \
5607 _argvec[0] = (unsigned long)_orig.nraddr; \
5608 _argvec[1] = (unsigned long)(arg1); \
5609 _argvec[2] = (unsigned long)(arg2); \
5610 _argvec[3] = (unsigned long)(arg3); \
5611 _argvec[4] = (unsigned long)(arg4); \
5612 _argvec[5] = (unsigned long)(arg5); \
5613 _argvec[6] = (unsigned long)(arg6); \
5614 _argvec[7] = (unsigned long)(arg7); \
5615 _argvec[8] = (unsigned long)(arg8); \
5616 _argvec[9] = (unsigned long)(arg9); \
5617 _argvec[10] = (unsigned long)(arg10); \
5618 __asm__ volatile( \
5619 "subu $29, $29, 8 \n\t" \
5620 "sw $28, 0($29) \n\t" \
5621 "sw $31, 4($29) \n\t" \
5622 "lw $4, 20(%1) \n\t" \
5623 "subu $29, $29, 48\n\t" \
5624 "sw $4, 16($29) \n\t" \
5625 "lw $4, 24(%1) \n\t" \
5626 "sw $4, 20($29) \n\t" \
5627 "lw $4, 28(%1) \n\t" \
5628 "sw $4, 24($29) \n\t" \
5629 "lw $4, 32(%1) \n\t" \
5630 "sw $4, 28($29) \n\t" \
5631 "lw $4, 36(%1) \n\t" \
5632 "sw $4, 32($29) \n\t" \
5633 "lw $4, 40(%1) \n\t" \
5634 "sw $4, 36($29) \n\t" \
5635 "lw $4, 4(%1) \n\t" \
5636 "lw $5, 8(%1) \n\t" \
5637 "lw $6, 12(%1) \n\t" \
5638 "lw $7, 16(%1) \n\t" \
5639 "lw $25, 0(%1) \n\t" \
5640 VALGRIND_CALL_NOREDIR_T9 \
5641 "addu $29, $29, 48 \n\t" \
5642 "lw $28, 0($29) \n\t" \
5643 "lw $31, 4($29) \n\t" \
5644 "addu $29, $29, 8 \n\t" \
5645 "move %0, $2\n" \
5646 : "=r" (_res) \
5647 : "0" (&_argvec[0]) \
5648 : "memory", __CALLER_SAVED_REGS \
5649 ); \
5650 lval = (__typeof__(lval)) _res; \
5651 } while (0)
5652
5653 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5654 arg6,arg7,arg8,arg9,arg10, \
5655 arg11) \
5656 do { \
5657 volatile OrigFn _orig = (orig); \
5658 volatile unsigned long _argvec[12]; \
5659 volatile unsigned long _res; \
5660 _argvec[0] = (unsigned long)_orig.nraddr; \
5661 _argvec[1] = (unsigned long)(arg1); \
5662 _argvec[2] = (unsigned long)(arg2); \
5663 _argvec[3] = (unsigned long)(arg3); \
5664 _argvec[4] = (unsigned long)(arg4); \
5665 _argvec[5] = (unsigned long)(arg5); \
5666 _argvec[6] = (unsigned long)(arg6); \
5667 _argvec[7] = (unsigned long)(arg7); \
5668 _argvec[8] = (unsigned long)(arg8); \
5669 _argvec[9] = (unsigned long)(arg9); \
5670 _argvec[10] = (unsigned long)(arg10); \
5671 _argvec[11] = (unsigned long)(arg11); \
5672 __asm__ volatile( \
5673 "subu $29, $29, 8 \n\t" \
5674 "sw $28, 0($29) \n\t" \
5675 "sw $31, 4($29) \n\t" \
5676 "lw $4, 20(%1) \n\t" \
5677 "subu $29, $29, 48\n\t" \
5678 "sw $4, 16($29) \n\t" \
5679 "lw $4, 24(%1) \n\t" \
5680 "sw $4, 20($29) \n\t" \
5681 "lw $4, 28(%1) \n\t" \
5682 "sw $4, 24($29) \n\t" \
5683 "lw $4, 32(%1) \n\t" \
5684 "sw $4, 28($29) \n\t" \
5685 "lw $4, 36(%1) \n\t" \
5686 "sw $4, 32($29) \n\t" \
5687 "lw $4, 40(%1) \n\t" \
5688 "sw $4, 36($29) \n\t" \
5689 "lw $4, 44(%1) \n\t" \
5690 "sw $4, 40($29) \n\t" \
5691 "lw $4, 4(%1) \n\t" \
5692 "lw $5, 8(%1) \n\t" \
5693 "lw $6, 12(%1) \n\t" \
5694 "lw $7, 16(%1) \n\t" \
5695 "lw $25, 0(%1) \n\t" \
5696 VALGRIND_CALL_NOREDIR_T9 \
5697 "addu $29, $29, 48 \n\t" \
5698 "lw $28, 0($29) \n\t" \
5699 "lw $31, 4($29) \n\t" \
5700 "addu $29, $29, 8 \n\t" \
5701 "move %0, $2\n" \
5702 : "=r" (_res) \
5703 : "0" (&_argvec[0]) \
5704 : "memory", __CALLER_SAVED_REGS \
5705 ); \
5706 lval = (__typeof__(lval)) _res; \
5707 } while (0)
5708
5709 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5710 arg6,arg7,arg8,arg9,arg10, \
5711 arg11,arg12) \
5712 do { \
5713 volatile OrigFn _orig = (orig); \
5714 volatile unsigned long _argvec[13]; \
5715 volatile unsigned long _res; \
5716 _argvec[0] = (unsigned long)_orig.nraddr; \
5717 _argvec[1] = (unsigned long)(arg1); \
5718 _argvec[2] = (unsigned long)(arg2); \
5719 _argvec[3] = (unsigned long)(arg3); \
5720 _argvec[4] = (unsigned long)(arg4); \
5721 _argvec[5] = (unsigned long)(arg5); \
5722 _argvec[6] = (unsigned long)(arg6); \
5723 _argvec[7] = (unsigned long)(arg7); \
5724 _argvec[8] = (unsigned long)(arg8); \
5725 _argvec[9] = (unsigned long)(arg9); \
5726 _argvec[10] = (unsigned long)(arg10); \
5727 _argvec[11] = (unsigned long)(arg11); \
5728 _argvec[12] = (unsigned long)(arg12); \
5729 __asm__ volatile( \
5730 "subu $29, $29, 8 \n\t" \
5731 "sw $28, 0($29) \n\t" \
5732 "sw $31, 4($29) \n\t" \
5733 "lw $4, 20(%1) \n\t" \
5734 "subu $29, $29, 56\n\t" \
5735 "sw $4, 16($29) \n\t" \
5736 "lw $4, 24(%1) \n\t" \
5737 "sw $4, 20($29) \n\t" \
5738 "lw $4, 28(%1) \n\t" \
5739 "sw $4, 24($29) \n\t" \
5740 "lw $4, 32(%1) \n\t" \
5741 "sw $4, 28($29) \n\t" \
5742 "lw $4, 36(%1) \n\t" \
5743 "sw $4, 32($29) \n\t" \
5744 "lw $4, 40(%1) \n\t" \
5745 "sw $4, 36($29) \n\t" \
5746 "lw $4, 44(%1) \n\t" \
5747 "sw $4, 40($29) \n\t" \
5748 "lw $4, 48(%1) \n\t" \
5749 "sw $4, 44($29) \n\t" \
5750 "lw $4, 4(%1) \n\t" \
5751 "lw $5, 8(%1) \n\t" \
5752 "lw $6, 12(%1) \n\t" \
5753 "lw $7, 16(%1) \n\t" \
5754 "lw $25, 0(%1) \n\t" \
5755 VALGRIND_CALL_NOREDIR_T9 \
5756 "addu $29, $29, 56 \n\t" \
5757 "lw $28, 0($29) \n\t" \
5758 "lw $31, 4($29) \n\t" \
5759 "addu $29, $29, 8 \n\t" \
5760 "move %0, $2\n" \
5761 : "=r" (_res) \
5762 : "r" (&_argvec[0]) \
5763 : "memory", __CALLER_SAVED_REGS \
5764 ); \
5765 lval = (__typeof__(lval)) _res; \
5766 } while (0)
5767
5768 #endif
5769
5770
5771
5772 #if defined(PLAT_nanomips_linux)
5773
5774
5775 #define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5776 "$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5777 "$t8","$t9", "$at"
5778
5779
5780
5781
5782 #define CALL_FN_W_v(lval, orig) \
5783 do { \
5784 volatile OrigFn _orig = (orig); \
5785 volatile unsigned long _argvec[1]; \
5786 volatile unsigned long _res; \
5787 _argvec[0] = (unsigned long)_orig.nraddr; \
5788 __asm__ volatile( \
5789 "lw $t9, 0(%1)\n\t" \
5790 VALGRIND_CALL_NOREDIR_T9 \
5791 "move %0, $a0\n" \
5792 : "=r" (_res) \
5793 : "r" (&_argvec[0]) \
5794 : "memory", __CALLER_SAVED_REGS \
5795 ); \
5796 lval = (__typeof__(lval)) _res; \
5797 } while (0)
5798
5799 #define CALL_FN_W_W(lval, orig, arg1) \
5800 do { \
5801 volatile OrigFn _orig = (orig); \
5802 volatile unsigned long _argvec[2]; \
5803 volatile unsigned long _res; \
5804 _argvec[0] = (unsigned long)_orig.nraddr; \
5805 _argvec[1] = (unsigned long)(arg1); \
5806 __asm__ volatile( \
5807 "lw $t9, 0(%1)\n\t" \
5808 "lw $a0, 4(%1)\n\t" \
5809 VALGRIND_CALL_NOREDIR_T9 \
5810 "move %0, $a0\n" \
5811 : "=r" (_res) \
5812 : "r" (&_argvec[0]) \
5813 : "memory", __CALLER_SAVED_REGS \
5814 ); \
5815 lval = (__typeof__(lval)) _res; \
5816 } while (0)
5817
5818 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5819 do { \
5820 volatile OrigFn _orig = (orig); \
5821 volatile unsigned long _argvec[3]; \
5822 volatile unsigned long _res; \
5823 _argvec[0] = (unsigned long)_orig.nraddr; \
5824 _argvec[1] = (unsigned long)(arg1); \
5825 _argvec[2] = (unsigned long)(arg2); \
5826 __asm__ volatile( \
5827 "lw $t9, 0(%1)\n\t" \
5828 "lw $a0, 4(%1)\n\t" \
5829 "lw $a1, 8(%1)\n\t" \
5830 VALGRIND_CALL_NOREDIR_T9 \
5831 "move %0, $a0\n" \
5832 : "=r" (_res) \
5833 : "r" (&_argvec[0]) \
5834 : "memory", __CALLER_SAVED_REGS \
5835 ); \
5836 lval = (__typeof__(lval)) _res; \
5837 } while (0)
5838
5839 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5840 do { \
5841 volatile OrigFn _orig = (orig); \
5842 volatile unsigned long _argvec[4]; \
5843 volatile unsigned long _res; \
5844 _argvec[0] = (unsigned long)_orig.nraddr; \
5845 _argvec[1] = (unsigned long)(arg1); \
5846 _argvec[2] = (unsigned long)(arg2); \
5847 _argvec[3] = (unsigned long)(arg3); \
5848 __asm__ volatile( \
5849 "lw $t9, 0(%1)\n\t" \
5850 "lw $a0, 4(%1)\n\t" \
5851 "lw $a1, 8(%1)\n\t" \
5852 "lw $a2,12(%1)\n\t" \
5853 VALGRIND_CALL_NOREDIR_T9 \
5854 "move %0, $a0\n" \
5855 : "=r" (_res) \
5856 : "r" (&_argvec[0]) \
5857 : "memory", __CALLER_SAVED_REGS \
5858 ); \
5859 lval = (__typeof__(lval)) _res; \
5860 } while (0)
5861
5862 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5863 do { \
5864 volatile OrigFn _orig = (orig); \
5865 volatile unsigned long _argvec[5]; \
5866 volatile unsigned long _res; \
5867 _argvec[0] = (unsigned long)_orig.nraddr; \
5868 _argvec[1] = (unsigned long)(arg1); \
5869 _argvec[2] = (unsigned long)(arg2); \
5870 _argvec[3] = (unsigned long)(arg3); \
5871 _argvec[4] = (unsigned long)(arg4); \
5872 __asm__ volatile( \
5873 "lw $t9, 0(%1)\n\t" \
5874 "lw $a0, 4(%1)\n\t" \
5875 "lw $a1, 8(%1)\n\t" \
5876 "lw $a2,12(%1)\n\t" \
5877 "lw $a3,16(%1)\n\t" \
5878 VALGRIND_CALL_NOREDIR_T9 \
5879 "move %0, $a0\n" \
5880 : "=r" (_res) \
5881 : "r" (&_argvec[0]) \
5882 : "memory", __CALLER_SAVED_REGS \
5883 ); \
5884 lval = (__typeof__(lval)) _res; \
5885 } while (0)
5886
5887 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5888 do { \
5889 volatile OrigFn _orig = (orig); \
5890 volatile unsigned long _argvec[6]; \
5891 volatile unsigned long _res; \
5892 _argvec[0] = (unsigned long)_orig.nraddr; \
5893 _argvec[1] = (unsigned long)(arg1); \
5894 _argvec[2] = (unsigned long)(arg2); \
5895 _argvec[3] = (unsigned long)(arg3); \
5896 _argvec[4] = (unsigned long)(arg4); \
5897 _argvec[5] = (unsigned long)(arg5); \
5898 __asm__ volatile( \
5899 "lw $t9, 0(%1)\n\t" \
5900 "lw $a0, 4(%1)\n\t" \
5901 "lw $a1, 8(%1)\n\t" \
5902 "lw $a2,12(%1)\n\t" \
5903 "lw $a3,16(%1)\n\t" \
5904 "lw $a4,20(%1)\n\t" \
5905 VALGRIND_CALL_NOREDIR_T9 \
5906 "move %0, $a0\n" \
5907 : "=r" (_res) \
5908 : "r" (&_argvec[0]) \
5909 : "memory", __CALLER_SAVED_REGS \
5910 ); \
5911 lval = (__typeof__(lval)) _res; \
5912 } while (0)
5913 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5914 do { \
5915 volatile OrigFn _orig = (orig); \
5916 volatile unsigned long _argvec[7]; \
5917 volatile unsigned long _res; \
5918 _argvec[0] = (unsigned long)_orig.nraddr; \
5919 _argvec[1] = (unsigned long)(arg1); \
5920 _argvec[2] = (unsigned long)(arg2); \
5921 _argvec[3] = (unsigned long)(arg3); \
5922 _argvec[4] = (unsigned long)(arg4); \
5923 _argvec[5] = (unsigned long)(arg5); \
5924 _argvec[6] = (unsigned long)(arg6); \
5925 __asm__ volatile( \
5926 "lw $t9, 0(%1)\n\t" \
5927 "lw $a0, 4(%1)\n\t" \
5928 "lw $a1, 8(%1)\n\t" \
5929 "lw $a2,12(%1)\n\t" \
5930 "lw $a3,16(%1)\n\t" \
5931 "lw $a4,20(%1)\n\t" \
5932 "lw $a5,24(%1)\n\t" \
5933 VALGRIND_CALL_NOREDIR_T9 \
5934 "move %0, $a0\n" \
5935 : "=r" (_res) \
5936 : "r" (&_argvec[0]) \
5937 : "memory", __CALLER_SAVED_REGS \
5938 ); \
5939 lval = (__typeof__(lval)) _res; \
5940 } while (0)
5941
5942 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5943 arg7) \
5944 do { \
5945 volatile OrigFn _orig = (orig); \
5946 volatile unsigned long _argvec[8]; \
5947 volatile unsigned long _res; \
5948 _argvec[0] = (unsigned long)_orig.nraddr; \
5949 _argvec[1] = (unsigned long)(arg1); \
5950 _argvec[2] = (unsigned long)(arg2); \
5951 _argvec[3] = (unsigned long)(arg3); \
5952 _argvec[4] = (unsigned long)(arg4); \
5953 _argvec[5] = (unsigned long)(arg5); \
5954 _argvec[6] = (unsigned long)(arg6); \
5955 _argvec[7] = (unsigned long)(arg7); \
5956 __asm__ volatile( \
5957 "lw $t9, 0(%1)\n\t" \
5958 "lw $a0, 4(%1)\n\t" \
5959 "lw $a1, 8(%1)\n\t" \
5960 "lw $a2,12(%1)\n\t" \
5961 "lw $a3,16(%1)\n\t" \
5962 "lw $a4,20(%1)\n\t" \
5963 "lw $a5,24(%1)\n\t" \
5964 "lw $a6,28(%1)\n\t" \
5965 VALGRIND_CALL_NOREDIR_T9 \
5966 "move %0, $a0\n" \
5967 : "=r" (_res) \
5968 : "r" (&_argvec[0]) \
5969 : "memory", __CALLER_SAVED_REGS \
5970 ); \
5971 lval = (__typeof__(lval)) _res; \
5972 } while (0)
5973
5974 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5975 arg7,arg8) \
5976 do { \
5977 volatile OrigFn _orig = (orig); \
5978 volatile unsigned long _argvec[9]; \
5979 volatile unsigned long _res; \
5980 _argvec[0] = (unsigned long)_orig.nraddr; \
5981 _argvec[1] = (unsigned long)(arg1); \
5982 _argvec[2] = (unsigned long)(arg2); \
5983 _argvec[3] = (unsigned long)(arg3); \
5984 _argvec[4] = (unsigned long)(arg4); \
5985 _argvec[5] = (unsigned long)(arg5); \
5986 _argvec[6] = (unsigned long)(arg6); \
5987 _argvec[7] = (unsigned long)(arg7); \
5988 _argvec[8] = (unsigned long)(arg8); \
5989 __asm__ volatile( \
5990 "lw $t9, 0(%1)\n\t" \
5991 "lw $a0, 4(%1)\n\t" \
5992 "lw $a1, 8(%1)\n\t" \
5993 "lw $a2,12(%1)\n\t" \
5994 "lw $a3,16(%1)\n\t" \
5995 "lw $a4,20(%1)\n\t" \
5996 "lw $a5,24(%1)\n\t" \
5997 "lw $a6,28(%1)\n\t" \
5998 "lw $a7,32(%1)\n\t" \
5999 VALGRIND_CALL_NOREDIR_T9 \
6000 "move %0, $a0\n" \
6001 : "=r" (_res) \
6002 : "r" (&_argvec[0]) \
6003 : "memory", __CALLER_SAVED_REGS \
6004 ); \
6005 lval = (__typeof__(lval)) _res; \
6006 } while (0)
6007
6008 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6009 arg7,arg8,arg9) \
6010 do { \
6011 volatile OrigFn _orig = (orig); \
6012 volatile unsigned long _argvec[10]; \
6013 volatile unsigned long _res; \
6014 _argvec[0] = (unsigned long)_orig.nraddr; \
6015 _argvec[1] = (unsigned long)(arg1); \
6016 _argvec[2] = (unsigned long)(arg2); \
6017 _argvec[3] = (unsigned long)(arg3); \
6018 _argvec[4] = (unsigned long)(arg4); \
6019 _argvec[5] = (unsigned long)(arg5); \
6020 _argvec[6] = (unsigned long)(arg6); \
6021 _argvec[7] = (unsigned long)(arg7); \
6022 _argvec[8] = (unsigned long)(arg8); \
6023 _argvec[9] = (unsigned long)(arg9); \
6024 __asm__ volatile( \
6025 "addiu $sp, $sp, -16 \n\t" \
6026 "lw $t9,36(%1) \n\t" \
6027 "sw $t9, 0($sp) \n\t" \
6028 "lw $t9, 0(%1) \n\t" \
6029 "lw $a0, 4(%1) \n\t" \
6030 "lw $a1, 8(%1) \n\t" \
6031 "lw $a2,12(%1) \n\t" \
6032 "lw $a3,16(%1) \n\t" \
6033 "lw $a4,20(%1) \n\t" \
6034 "lw $a5,24(%1) \n\t" \
6035 "lw $a6,28(%1) \n\t" \
6036 "lw $a7,32(%1) \n\t" \
6037 VALGRIND_CALL_NOREDIR_T9 \
6038 "move %0, $a0 \n\t" \
6039 "addiu $sp, $sp, 16 \n\t" \
6040 : "=r" (_res) \
6041 : "r" (&_argvec[0]) \
6042 : "memory", __CALLER_SAVED_REGS \
6043 ); \
6044 lval = (__typeof__(lval)) _res; \
6045 } while (0)
6046
6047 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6048 arg7,arg8,arg9,arg10) \
6049 do { \
6050 volatile OrigFn _orig = (orig); \
6051 volatile unsigned long _argvec[11]; \
6052 volatile unsigned long _res; \
6053 _argvec[0] = (unsigned long)_orig.nraddr; \
6054 _argvec[1] = (unsigned long)(arg1); \
6055 _argvec[2] = (unsigned long)(arg2); \
6056 _argvec[3] = (unsigned long)(arg3); \
6057 _argvec[4] = (unsigned long)(arg4); \
6058 _argvec[5] = (unsigned long)(arg5); \
6059 _argvec[6] = (unsigned long)(arg6); \
6060 _argvec[7] = (unsigned long)(arg7); \
6061 _argvec[8] = (unsigned long)(arg8); \
6062 _argvec[9] = (unsigned long)(arg9); \
6063 _argvec[10] = (unsigned long)(arg10); \
6064 __asm__ volatile( \
6065 "addiu $sp, $sp, -16 \n\t" \
6066 "lw $t9,36(%1) \n\t" \
6067 "sw $t9, 0($sp) \n\t" \
6068 "lw $t9,40(%1) \n\t" \
6069 "sw $t9, 4($sp) \n\t" \
6070 "lw $t9, 0(%1) \n\t" \
6071 "lw $a0, 4(%1) \n\t" \
6072 "lw $a1, 8(%1) \n\t" \
6073 "lw $a2,12(%1) \n\t" \
6074 "lw $a3,16(%1) \n\t" \
6075 "lw $a4,20(%1) \n\t" \
6076 "lw $a5,24(%1) \n\t" \
6077 "lw $a6,28(%1) \n\t" \
6078 "lw $a7,32(%1) \n\t" \
6079 VALGRIND_CALL_NOREDIR_T9 \
6080 "move %0, $a0 \n\t" \
6081 "addiu $sp, $sp, 16 \n\t" \
6082 : "=r" (_res) \
6083 : "r" (&_argvec[0]) \
6084 : "memory", __CALLER_SAVED_REGS \
6085 ); \
6086 lval = (__typeof__(lval)) _res; \
6087 } while (0)
6088
6089 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6090 arg6,arg7,arg8,arg9,arg10, \
6091 arg11) \
6092 do { \
6093 volatile OrigFn _orig = (orig); \
6094 volatile unsigned long _argvec[12]; \
6095 volatile unsigned long _res; \
6096 _argvec[0] = (unsigned long)_orig.nraddr; \
6097 _argvec[1] = (unsigned long)(arg1); \
6098 _argvec[2] = (unsigned long)(arg2); \
6099 _argvec[3] = (unsigned long)(arg3); \
6100 _argvec[4] = (unsigned long)(arg4); \
6101 _argvec[5] = (unsigned long)(arg5); \
6102 _argvec[6] = (unsigned long)(arg6); \
6103 _argvec[7] = (unsigned long)(arg7); \
6104 _argvec[8] = (unsigned long)(arg8); \
6105 _argvec[9] = (unsigned long)(arg9); \
6106 _argvec[10] = (unsigned long)(arg10); \
6107 _argvec[11] = (unsigned long)(arg11); \
6108 __asm__ volatile( \
6109 "addiu $sp, $sp, -16 \n\t" \
6110 "lw $t9,36(%1) \n\t" \
6111 "sw $t9, 0($sp) \n\t" \
6112 "lw $t9,40(%1) \n\t" \
6113 "sw $t9, 4($sp) \n\t" \
6114 "lw $t9,44(%1) \n\t" \
6115 "sw $t9, 8($sp) \n\t" \
6116 "lw $t9, 0(%1) \n\t" \
6117 "lw $a0, 4(%1) \n\t" \
6118 "lw $a1, 8(%1) \n\t" \
6119 "lw $a2,12(%1) \n\t" \
6120 "lw $a3,16(%1) \n\t" \
6121 "lw $a4,20(%1) \n\t" \
6122 "lw $a5,24(%1) \n\t" \
6123 "lw $a6,28(%1) \n\t" \
6124 "lw $a7,32(%1) \n\t" \
6125 VALGRIND_CALL_NOREDIR_T9 \
6126 "move %0, $a0 \n\t" \
6127 "addiu $sp, $sp, 16 \n\t" \
6128 : "=r" (_res) \
6129 : "r" (&_argvec[0]) \
6130 : "memory", __CALLER_SAVED_REGS \
6131 ); \
6132 lval = (__typeof__(lval)) _res; \
6133 } while (0)
6134
6135 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6136 arg6,arg7,arg8,arg9,arg10, \
6137 arg11,arg12) \
6138 do { \
6139 volatile OrigFn _orig = (orig); \
6140 volatile unsigned long _argvec[13]; \
6141 volatile unsigned long _res; \
6142 _argvec[0] = (unsigned long)_orig.nraddr; \
6143 _argvec[1] = (unsigned long)(arg1); \
6144 _argvec[2] = (unsigned long)(arg2); \
6145 _argvec[3] = (unsigned long)(arg3); \
6146 _argvec[4] = (unsigned long)(arg4); \
6147 _argvec[5] = (unsigned long)(arg5); \
6148 _argvec[6] = (unsigned long)(arg6); \
6149 _argvec[7] = (unsigned long)(arg7); \
6150 _argvec[8] = (unsigned long)(arg8); \
6151 _argvec[9] = (unsigned long)(arg9); \
6152 _argvec[10] = (unsigned long)(arg10); \
6153 _argvec[11] = (unsigned long)(arg11); \
6154 _argvec[12] = (unsigned long)(arg12); \
6155 __asm__ volatile( \
6156 "addiu $sp, $sp, -16 \n\t" \
6157 "lw $t9,36(%1) \n\t" \
6158 "sw $t9, 0($sp) \n\t" \
6159 "lw $t9,40(%1) \n\t" \
6160 "sw $t9, 4($sp) \n\t" \
6161 "lw $t9,44(%1) \n\t" \
6162 "sw $t9, 8($sp) \n\t" \
6163 "lw $t9,48(%1) \n\t" \
6164 "sw $t9,12($sp) \n\t" \
6165 "lw $t9, 0(%1) \n\t" \
6166 "lw $a0, 4(%1) \n\t" \
6167 "lw $a1, 8(%1) \n\t" \
6168 "lw $a2,12(%1) \n\t" \
6169 "lw $a3,16(%1) \n\t" \
6170 "lw $a4,20(%1) \n\t" \
6171 "lw $a5,24(%1) \n\t" \
6172 "lw $a6,28(%1) \n\t" \
6173 "lw $a7,32(%1) \n\t" \
6174 VALGRIND_CALL_NOREDIR_T9 \
6175 "move %0, $a0 \n\t" \
6176 "addiu $sp, $sp, 16 \n\t" \
6177 : "=r" (_res) \
6178 : "r" (&_argvec[0]) \
6179 : "memory", __CALLER_SAVED_REGS \
6180 ); \
6181 lval = (__typeof__(lval)) _res; \
6182 } while (0)
6183
6184 #endif
6185
6186
6187
6188 #if defined(PLAT_mips64_linux)
6189
6190
6191 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6192 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6193 "$25", "$31"
6194
6195
6196
6197
6198 #define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6199
6200 #define CALL_FN_W_v(lval, orig) \
6201 do { \
6202 volatile OrigFn _orig = (orig); \
6203 volatile unsigned long long _argvec[1]; \
6204 volatile unsigned long long _res; \
6205 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6206 __asm__ volatile( \
6207 "ld $25, 0(%1)\n\t" \
6208 VALGRIND_CALL_NOREDIR_T9 \
6209 "move %0, $2\n" \
6210 : "=r" (_res) \
6211 : "0" (&_argvec[0]) \
6212 : "memory", __CALLER_SAVED_REGS \
6213 ); \
6214 lval = (__typeof__(lval)) (long)_res; \
6215 } while (0)
6216
6217 #define CALL_FN_W_W(lval, orig, arg1) \
6218 do { \
6219 volatile OrigFn _orig = (orig); \
6220 volatile unsigned long long _argvec[2]; \
6221 volatile unsigned long long _res; \
6222 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6223 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6224 __asm__ volatile( \
6225 "ld $4, 8(%1)\n\t" \
6226 "ld $25, 0(%1)\n\t" \
6227 VALGRIND_CALL_NOREDIR_T9 \
6228 "move %0, $2\n" \
6229 : "=r" (_res) \
6230 : "r" (&_argvec[0]) \
6231 : "memory", __CALLER_SAVED_REGS \
6232 ); \
6233 lval = (__typeof__(lval)) (long)_res; \
6234 } while (0)
6235
6236 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6237 do { \
6238 volatile OrigFn _orig = (orig); \
6239 volatile unsigned long long _argvec[3]; \
6240 volatile unsigned long long _res; \
6241 _argvec[0] = _orig.nraddr; \
6242 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6243 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6244 __asm__ volatile( \
6245 "ld $4, 8(%1)\n\t" \
6246 "ld $5, 16(%1)\n\t" \
6247 "ld $25, 0(%1)\n\t" \
6248 VALGRIND_CALL_NOREDIR_T9 \
6249 "move %0, $2\n" \
6250 : "=r" (_res) \
6251 : "r" (&_argvec[0]) \
6252 : "memory", __CALLER_SAVED_REGS \
6253 ); \
6254 lval = (__typeof__(lval)) (long)_res; \
6255 } while (0)
6256
6257
6258 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6259 do { \
6260 volatile OrigFn _orig = (orig); \
6261 volatile unsigned long long _argvec[4]; \
6262 volatile unsigned long long _res; \
6263 _argvec[0] = _orig.nraddr; \
6264 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6265 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6266 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6267 __asm__ volatile( \
6268 "ld $4, 8(%1)\n\t" \
6269 "ld $5, 16(%1)\n\t" \
6270 "ld $6, 24(%1)\n\t" \
6271 "ld $25, 0(%1)\n\t" \
6272 VALGRIND_CALL_NOREDIR_T9 \
6273 "move %0, $2\n" \
6274 : "=r" (_res) \
6275 : "r" (&_argvec[0]) \
6276 : "memory", __CALLER_SAVED_REGS \
6277 ); \
6278 lval = (__typeof__(lval)) (long)_res; \
6279 } while (0)
6280
6281 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6282 do { \
6283 volatile OrigFn _orig = (orig); \
6284 volatile unsigned long long _argvec[5]; \
6285 volatile unsigned long long _res; \
6286 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6287 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6288 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6289 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6290 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6291 __asm__ volatile( \
6292 "ld $4, 8(%1)\n\t" \
6293 "ld $5, 16(%1)\n\t" \
6294 "ld $6, 24(%1)\n\t" \
6295 "ld $7, 32(%1)\n\t" \
6296 "ld $25, 0(%1)\n\t" \
6297 VALGRIND_CALL_NOREDIR_T9 \
6298 "move %0, $2\n" \
6299 : "=r" (_res) \
6300 : "r" (&_argvec[0]) \
6301 : "memory", __CALLER_SAVED_REGS \
6302 ); \
6303 lval = (__typeof__(lval)) (long)_res; \
6304 } while (0)
6305
6306 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6307 do { \
6308 volatile OrigFn _orig = (orig); \
6309 volatile unsigned long long _argvec[6]; \
6310 volatile unsigned long long _res; \
6311 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6312 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6313 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6314 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6315 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6316 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6317 __asm__ volatile( \
6318 "ld $4, 8(%1)\n\t" \
6319 "ld $5, 16(%1)\n\t" \
6320 "ld $6, 24(%1)\n\t" \
6321 "ld $7, 32(%1)\n\t" \
6322 "ld $8, 40(%1)\n\t" \
6323 "ld $25, 0(%1)\n\t" \
6324 VALGRIND_CALL_NOREDIR_T9 \
6325 "move %0, $2\n" \
6326 : "=r" (_res) \
6327 : "r" (&_argvec[0]) \
6328 : "memory", __CALLER_SAVED_REGS \
6329 ); \
6330 lval = (__typeof__(lval)) (long)_res; \
6331 } while (0)
6332
6333 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6334 do { \
6335 volatile OrigFn _orig = (orig); \
6336 volatile unsigned long long _argvec[7]; \
6337 volatile unsigned long long _res; \
6338 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6339 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6340 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6341 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6342 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6343 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6344 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6345 __asm__ volatile( \
6346 "ld $4, 8(%1)\n\t" \
6347 "ld $5, 16(%1)\n\t" \
6348 "ld $6, 24(%1)\n\t" \
6349 "ld $7, 32(%1)\n\t" \
6350 "ld $8, 40(%1)\n\t" \
6351 "ld $9, 48(%1)\n\t" \
6352 "ld $25, 0(%1)\n\t" \
6353 VALGRIND_CALL_NOREDIR_T9 \
6354 "move %0, $2\n" \
6355 : "=r" (_res) \
6356 : "r" (&_argvec[0]) \
6357 : "memory", __CALLER_SAVED_REGS \
6358 ); \
6359 lval = (__typeof__(lval)) (long)_res; \
6360 } while (0)
6361
6362 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6363 arg7) \
6364 do { \
6365 volatile OrigFn _orig = (orig); \
6366 volatile unsigned long long _argvec[8]; \
6367 volatile unsigned long long _res; \
6368 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6369 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6370 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6371 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6372 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6373 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6374 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6375 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6376 __asm__ volatile( \
6377 "ld $4, 8(%1)\n\t" \
6378 "ld $5, 16(%1)\n\t" \
6379 "ld $6, 24(%1)\n\t" \
6380 "ld $7, 32(%1)\n\t" \
6381 "ld $8, 40(%1)\n\t" \
6382 "ld $9, 48(%1)\n\t" \
6383 "ld $10, 56(%1)\n\t" \
6384 "ld $25, 0(%1) \n\t" \
6385 VALGRIND_CALL_NOREDIR_T9 \
6386 "move %0, $2\n" \
6387 : "=r" (_res) \
6388 : "r" (&_argvec[0]) \
6389 : "memory", __CALLER_SAVED_REGS \
6390 ); \
6391 lval = (__typeof__(lval)) (long)_res; \
6392 } while (0)
6393
6394 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6395 arg7,arg8) \
6396 do { \
6397 volatile OrigFn _orig = (orig); \
6398 volatile unsigned long long _argvec[9]; \
6399 volatile unsigned long long _res; \
6400 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6401 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6402 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6403 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6404 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6405 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6406 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6407 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6408 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6409 __asm__ volatile( \
6410 "ld $4, 8(%1)\n\t" \
6411 "ld $5, 16(%1)\n\t" \
6412 "ld $6, 24(%1)\n\t" \
6413 "ld $7, 32(%1)\n\t" \
6414 "ld $8, 40(%1)\n\t" \
6415 "ld $9, 48(%1)\n\t" \
6416 "ld $10, 56(%1)\n\t" \
6417 "ld $11, 64(%1)\n\t" \
6418 "ld $25, 0(%1) \n\t" \
6419 VALGRIND_CALL_NOREDIR_T9 \
6420 "move %0, $2\n" \
6421 : "=r" (_res) \
6422 : "r" (&_argvec[0]) \
6423 : "memory", __CALLER_SAVED_REGS \
6424 ); \
6425 lval = (__typeof__(lval)) (long)_res; \
6426 } while (0)
6427
6428 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6429 arg7,arg8,arg9) \
6430 do { \
6431 volatile OrigFn _orig = (orig); \
6432 volatile unsigned long long _argvec[10]; \
6433 volatile unsigned long long _res; \
6434 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6435 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6436 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6437 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6438 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6439 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6440 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6441 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6442 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6443 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6444 __asm__ volatile( \
6445 "dsubu $29, $29, 8\n\t" \
6446 "ld $4, 72(%1)\n\t" \
6447 "sd $4, 0($29)\n\t" \
6448 "ld $4, 8(%1)\n\t" \
6449 "ld $5, 16(%1)\n\t" \
6450 "ld $6, 24(%1)\n\t" \
6451 "ld $7, 32(%1)\n\t" \
6452 "ld $8, 40(%1)\n\t" \
6453 "ld $9, 48(%1)\n\t" \
6454 "ld $10, 56(%1)\n\t" \
6455 "ld $11, 64(%1)\n\t" \
6456 "ld $25, 0(%1)\n\t" \
6457 VALGRIND_CALL_NOREDIR_T9 \
6458 "daddu $29, $29, 8\n\t" \
6459 "move %0, $2\n" \
6460 : "=r" (_res) \
6461 : "r" (&_argvec[0]) \
6462 : "memory", __CALLER_SAVED_REGS \
6463 ); \
6464 lval = (__typeof__(lval)) (long)_res; \
6465 } while (0)
6466
6467 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6468 arg7,arg8,arg9,arg10) \
6469 do { \
6470 volatile OrigFn _orig = (orig); \
6471 volatile unsigned long long _argvec[11]; \
6472 volatile unsigned long long _res; \
6473 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6474 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6475 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6476 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6477 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6478 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6479 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6480 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6481 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6482 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6483 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6484 __asm__ volatile( \
6485 "dsubu $29, $29, 16\n\t" \
6486 "ld $4, 72(%1)\n\t" \
6487 "sd $4, 0($29)\n\t" \
6488 "ld $4, 80(%1)\n\t" \
6489 "sd $4, 8($29)\n\t" \
6490 "ld $4, 8(%1)\n\t" \
6491 "ld $5, 16(%1)\n\t" \
6492 "ld $6, 24(%1)\n\t" \
6493 "ld $7, 32(%1)\n\t" \
6494 "ld $8, 40(%1)\n\t" \
6495 "ld $9, 48(%1)\n\t" \
6496 "ld $10, 56(%1)\n\t" \
6497 "ld $11, 64(%1)\n\t" \
6498 "ld $25, 0(%1)\n\t" \
6499 VALGRIND_CALL_NOREDIR_T9 \
6500 "daddu $29, $29, 16\n\t" \
6501 "move %0, $2\n" \
6502 : "=r" (_res) \
6503 : "r" (&_argvec[0]) \
6504 : "memory", __CALLER_SAVED_REGS \
6505 ); \
6506 lval = (__typeof__(lval)) (long)_res; \
6507 } while (0)
6508
6509 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6510 arg6,arg7,arg8,arg9,arg10, \
6511 arg11) \
6512 do { \
6513 volatile OrigFn _orig = (orig); \
6514 volatile unsigned long long _argvec[12]; \
6515 volatile unsigned long long _res; \
6516 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6517 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6518 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6519 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6520 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6521 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6522 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6523 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6524 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6525 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6526 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6527 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6528 __asm__ volatile( \
6529 "dsubu $29, $29, 24\n\t" \
6530 "ld $4, 72(%1)\n\t" \
6531 "sd $4, 0($29)\n\t" \
6532 "ld $4, 80(%1)\n\t" \
6533 "sd $4, 8($29)\n\t" \
6534 "ld $4, 88(%1)\n\t" \
6535 "sd $4, 16($29)\n\t" \
6536 "ld $4, 8(%1)\n\t" \
6537 "ld $5, 16(%1)\n\t" \
6538 "ld $6, 24(%1)\n\t" \
6539 "ld $7, 32(%1)\n\t" \
6540 "ld $8, 40(%1)\n\t" \
6541 "ld $9, 48(%1)\n\t" \
6542 "ld $10, 56(%1)\n\t" \
6543 "ld $11, 64(%1)\n\t" \
6544 "ld $25, 0(%1)\n\t" \
6545 VALGRIND_CALL_NOREDIR_T9 \
6546 "daddu $29, $29, 24\n\t" \
6547 "move %0, $2\n" \
6548 : "=r" (_res) \
6549 : "r" (&_argvec[0]) \
6550 : "memory", __CALLER_SAVED_REGS \
6551 ); \
6552 lval = (__typeof__(lval)) (long)_res; \
6553 } while (0)
6554
6555 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6556 arg6,arg7,arg8,arg9,arg10, \
6557 arg11,arg12) \
6558 do { \
6559 volatile OrigFn _orig = (orig); \
6560 volatile unsigned long long _argvec[13]; \
6561 volatile unsigned long long _res; \
6562 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6563 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6564 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6565 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6566 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6567 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6568 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6569 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6570 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6571 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6572 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6573 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6574 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6575 __asm__ volatile( \
6576 "dsubu $29, $29, 32\n\t" \
6577 "ld $4, 72(%1)\n\t" \
6578 "sd $4, 0($29)\n\t" \
6579 "ld $4, 80(%1)\n\t" \
6580 "sd $4, 8($29)\n\t" \
6581 "ld $4, 88(%1)\n\t" \
6582 "sd $4, 16($29)\n\t" \
6583 "ld $4, 96(%1)\n\t" \
6584 "sd $4, 24($29)\n\t" \
6585 "ld $4, 8(%1)\n\t" \
6586 "ld $5, 16(%1)\n\t" \
6587 "ld $6, 24(%1)\n\t" \
6588 "ld $7, 32(%1)\n\t" \
6589 "ld $8, 40(%1)\n\t" \
6590 "ld $9, 48(%1)\n\t" \
6591 "ld $10, 56(%1)\n\t" \
6592 "ld $11, 64(%1)\n\t" \
6593 "ld $25, 0(%1)\n\t" \
6594 VALGRIND_CALL_NOREDIR_T9 \
6595 "daddu $29, $29, 32\n\t" \
6596 "move %0, $2\n" \
6597 : "=r" (_res) \
6598 : "r" (&_argvec[0]) \
6599 : "memory", __CALLER_SAVED_REGS \
6600 ); \
6601 lval = (__typeof__(lval)) (long)_res; \
6602 } while (0)
6603
6604 #endif
6605
6606
6607
6608
6609
6610
6611
6612
6613
6614
6615
6616
6617
6618
6619
6620
6621 #define VG_USERREQ_TOOL_BASE(a,b) \
6622 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6623 #define VG_IS_TOOL_USERREQ(a, b, v) \
6624 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6625
6626
6627
6628
6629
6630
6631 typedef
6632 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6633 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6634
6635
6636
6637
6638
6639
6640 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6641 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6642 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6643 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6644
6645
6646
6647
6648 VG_USERREQ__COUNT_ERRORS = 0x1201,
6649
6650
6651
6652 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6653
6654
6655
6656 VG_USERREQ__CLO_CHANGE = 0x1203,
6657
6658
6659
6660 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6661 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6662 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6663
6664 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6665 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6666 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6667 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6668 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6669 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6670 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6671 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6672
6673
6674
6675
6676
6677
6678
6679
6680 VG_USERREQ__PRINTF = 0x1401,
6681 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6682
6683 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6684 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6685
6686
6687 VG_USERREQ__STACK_REGISTER = 0x1501,
6688 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6689 VG_USERREQ__STACK_CHANGE = 0x1503,
6690
6691
6692 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6693
6694
6695 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6696
6697
6698
6699
6700
6701
6702 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6703
6704
6705
6706
6707 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6708
6709
6710 VG_USERREQ__INNER_THREADS = 0x1902
6711 } Vg_ClientRequest;
6712
6713 #if !defined(__GNUC__)
6714 # define __extension__
6715 #endif
6716
6717
6718
6719
6720
6721
6722 #define RUNNING_ON_VALGRIND \
6723 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6724 VG_USERREQ__RUNNING_ON_VALGRIND, \
6725 0, 0, 0, 0, 0) \
6726
6727
6728
6729
6730
6731
6732 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6733 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6734 _qzz_addr, _qzz_len, 0, 0, 0)
6735
6736 #define VALGRIND_INNER_THREADS(_qzz_addr) \
6737 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6738 _qzz_addr, 0, 0, 0, 0)
6739
6740
6741
6742
6743
6744
6745
6746 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6747
6748
6749 static int VALGRIND_PRINTF(const char *format, ...)
6750 __attribute__((format(__printf__, 1, 2), __unused__));
6751 #endif
6752 static int
6753 #if defined(_MSC_VER)
6754 __inline
6755 #endif
6756 VALGRIND_PRINTF(const char *format, ...)
6757 {
6758 #if defined(NVALGRIND)
6759 (void)format;
6760 return 0;
6761 #else
6762 #if defined(_MSC_VER) || defined(__MINGW64__)
6763 uintptr_t _qzz_res;
6764 #else
6765 unsigned long _qzz_res;
6766 #endif
6767 va_list vargs;
6768 va_start(vargs, format);
6769 #if defined(_MSC_VER) || defined(__MINGW64__)
6770 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6771 VG_USERREQ__PRINTF_VALIST_BY_REF,
6772 (uintptr_t)format,
6773 (uintptr_t)&vargs,
6774 0, 0, 0);
6775 #else
6776 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6777 VG_USERREQ__PRINTF_VALIST_BY_REF,
6778 (unsigned long)format,
6779 (unsigned long)&vargs,
6780 0, 0, 0);
6781 #endif
6782 va_end(vargs);
6783 return (int)_qzz_res;
6784 #endif
6785 }
6786
6787 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6788 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6789 __attribute__((format(__printf__, 1, 2), __unused__));
6790 #endif
6791 static int
6792 #if defined(_MSC_VER)
6793 __inline
6794 #endif
6795 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6796 {
6797 #if defined(NVALGRIND)
6798 (void)format;
6799 return 0;
6800 #else
6801 #if defined(_MSC_VER) || defined(__MINGW64__)
6802 uintptr_t _qzz_res;
6803 #else
6804 unsigned long _qzz_res;
6805 #endif
6806 va_list vargs;
6807 va_start(vargs, format);
6808 #if defined(_MSC_VER) || defined(__MINGW64__)
6809 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6810 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6811 (uintptr_t)format,
6812 (uintptr_t)&vargs,
6813 0, 0, 0);
6814 #else
6815 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6816 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6817 (unsigned long)format,
6818 (unsigned long)&vargs,
6819 0, 0, 0);
6820 #endif
6821 va_end(vargs);
6822 return (int)_qzz_res;
6823 #endif
6824 }
6825
6826
6827
6828
6829
6830
6831
6832
6833
6834
6835
6836
6837
6838
6839
6840
6841
6842
6843
6844
6845
6846
6847
6848
6849
6850 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6851 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6852 VG_USERREQ__CLIENT_CALL0, \
6853 _qyy_fn, \
6854 0, 0, 0, 0)
6855
6856 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6857 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6858 VG_USERREQ__CLIENT_CALL1, \
6859 _qyy_fn, \
6860 _qyy_arg1, 0, 0, 0)
6861
6862 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6863 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6864 VG_USERREQ__CLIENT_CALL2, \
6865 _qyy_fn, \
6866 _qyy_arg1, _qyy_arg2, 0, 0)
6867
6868 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6869 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6870 VG_USERREQ__CLIENT_CALL3, \
6871 _qyy_fn, \
6872 _qyy_arg1, _qyy_arg2, \
6873 _qyy_arg3, 0)
6874
6875
6876
6877
6878
6879 #define VALGRIND_COUNT_ERRORS \
6880 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6881 0 , \
6882 VG_USERREQ__COUNT_ERRORS, \
6883 0, 0, 0, 0, 0)
6884
6885
6886
6887
6888
6889
6890
6891
6892
6893
6894
6895
6896
6897
6898
6899
6900
6901
6902
6903
6904
6905
6906
6907
6908
6909
6910
6911
6912
6913
6914
6915
6916
6917
6918
6919
6920
6921
6922
6923
6924
6925
6926
6927
6928
6929
6930
6931
6932
6933
6934
6935
6936
6937
6938
6939
6940
6941
6942
6943
6944
6945
6946
6947
6948
6949
6950
6951
6952
6953
6954
6955
6956
6957
6958
6959
6960
6961
6962
6963
6964
6965
6966
6967
6968
6969
6970
6971
6972
6973
6974
6975
6976
6977
6978
6979
6980
6981
6982
6983
6984
6985
6986 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6987 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6988 addr, sizeB, rzB, is_zeroed, 0)
6989
6990
6991
6992
6993 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6994 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6995 addr, oldSizeB, newSizeB, rzB, 0)
6996
6997
6998
6999
7000 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
7001 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
7002 addr, rzB, 0, 0, 0)
7003
7004
7005 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
7006 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7007 pool, rzB, is_zeroed, 0, 0)
7008
7009
7010
7011
7012
7013
7014
7015
7016
7017
7018
7019
7020
7021
7022
7023
7024
7025
7026
7027
7028
7029
7030
7031
7032
7033
7034
7035 #define VALGRIND_MEMPOOL_AUTO_FREE 1
7036 #define VALGRIND_MEMPOOL_METAPOOL 2
7037 #define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
7038 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7039 pool, rzB, is_zeroed, flags, 0)
7040
7041
7042 #define VALGRIND_DESTROY_MEMPOOL(pool) \
7043 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7044 pool, 0, 0, 0, 0)
7045
7046
7047 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7048 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7049 pool, addr, size, 0, 0)
7050
7051
7052 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
7053 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7054 pool, addr, 0, 0, 0)
7055
7056
7057 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7058 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7059 pool, addr, size, 0, 0)
7060
7061
7062 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7063 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7064 poolA, poolB, 0, 0, 0)
7065
7066
7067 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7068 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7069 pool, addrA, addrB, size, 0)
7070
7071
7072 #define VALGRIND_MEMPOOL_EXISTS(pool) \
7073 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7074 VG_USERREQ__MEMPOOL_EXISTS, \
7075 pool, 0, 0, 0, 0)
7076
7077
7078
7079
7080 #define VALGRIND_STACK_REGISTER(start, end) \
7081 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7082 VG_USERREQ__STACK_REGISTER, \
7083 start, end, 0, 0, 0)
7084
7085
7086
7087 #define VALGRIND_STACK_DEREGISTER(id) \
7088 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7089 id, 0, 0, 0, 0)
7090
7091
7092
7093
7094 #define VALGRIND_STACK_CHANGE(id, start, end) \
7095 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7096 id, start, end, 0, 0)
7097
7098
7099 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7100 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7101 fd, ptr, total_size, delta, 0)
7102
7103
7104
7105
7106
7107 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7108 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7109 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7110 addr, buf64, 0, 0, 0)
7111
7112
7113
7114
7115
7116
7117
7118
7119
7120 #define VALGRIND_DISABLE_ERROR_REPORTING \
7121 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7122 1, 0, 0, 0, 0)
7123
7124
7125
7126 #define VALGRIND_ENABLE_ERROR_REPORTING \
7127 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7128 -1, 0, 0, 0, 0)
7129
7130
7131
7132
7133
7134
7135 #define VALGRIND_MONITOR_COMMAND(command) \
7136 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7137 command, 0, 0, 0, 0)
7138
7139
7140
7141
7142
7143 #define VALGRIND_CLO_CHANGE(option) \
7144 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7145 option, 0, 0, 0, 0)
7146
7147
7148 #undef PLAT_x86_darwin
7149 #undef PLAT_amd64_darwin
7150 #undef PLAT_x86_win32
7151 #undef PLAT_amd64_win64
7152 #undef PLAT_x86_linux
7153 #undef PLAT_amd64_linux
7154 #undef PLAT_ppc32_linux
7155 #undef PLAT_ppc64be_linux
7156 #undef PLAT_ppc64le_linux
7157 #undef PLAT_arm_linux
7158 #undef PLAT_s390x_linux
7159 #undef PLAT_mips32_linux
7160 #undef PLAT_mips64_linux
7161 #undef PLAT_nanomips_linux
7162 #undef PLAT_x86_solaris
7163 #undef PLAT_amd64_solaris
7164
7165 #endif