1 /* -*- c -*- 2 ---------------------------------------------------------------- 3 4 Notice that the following BSD-style license applies to this one 5 file (valgrind.h) only. The rest of Valgrind is licensed under the 6 terms of the GNU General Public License, version 2, unless 7 otherwise indicated. See the COPYING file in the source 8 distribution for details. 9 10 ---------------------------------------------------------------- 11 12 This file is part of Valgrind, a dynamic binary instrumentation 13 framework. 14 15 Copyright (C) 2000-2017 Julian Seward. All rights reserved. 16 17 Redistribution and use in source and binary forms, with or without 18 modification, are permitted provided that the following conditions 19 are met: 20 21 1. Redistributions of source code must retain the above copyright 22 notice, this list of conditions and the following disclaimer. 23 24 2. The origin of this software must not be misrepresented; you must 25 not claim that you wrote the original software. If you use this 26 software in a product, an acknowledgment in the product 27 documentation would be appreciated but is not required. 28 29 3. Altered source versions must be plainly marked as such, and must 30 not be misrepresented as being the original software. 31 32 4. The name of the author may not be used to endorse or promote 33 products derived from this software without specific prior written 34 permission. 35 36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS 37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY 40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE 42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 47 48 ---------------------------------------------------------------- 49 50 Notice that the above BSD-style license applies to this one file 51 (valgrind.h) only. The entire rest of Valgrind is licensed under 52 the terms of the GNU General Public License, version 2. See the 53 COPYING file in the source distribution for details. 54 55 ---------------------------------------------------------------- 56 */ 57 58 59 /* This file is for inclusion into client (your!) code. 60 61 You can use these macros to manipulate and query Valgrind's 62 execution inside your own programs. 63 64 The resulting executables will still run without Valgrind, just a 65 little bit more slowly than they otherwise would, but otherwise 66 unchanged. When not running on valgrind, each client request 67 consumes very few (eg. 7) instructions, so the resulting performance 68 loss is negligible unless you plan to execute client requests 69 millions of times per second. Nevertheless, if that is still a 70 problem, you can compile with the NVALGRIND symbol defined (gcc 71 -DNVALGRIND) so that client requests are not even compiled in. */ 72 73 #ifndef __VALGRIND_H 74 #define __VALGRIND_H 75 76 77 /* ------------------------------------------------------------------ */ 78 /* VERSION NUMBER OF VALGRIND */ 79 /* ------------------------------------------------------------------ */ 80 81 /* Specify Valgrind's version number, so that user code can 82 conditionally compile based on our version number. Note that these 83 were introduced at version 3.6 and so do not exist in version 3.5 84 or earlier. The recommended way to use them to check for "version 85 X.Y or later" is (eg) 86 87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \ 88 && (__VALGRIND_MAJOR__ > 3 \ 89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6)) 90 */ 91 #define __VALGRIND_MAJOR__ 3 92 #define __VALGRIND_MINOR__ 17 93 94 95 #include <stdarg.h> 96 97 /* Nb: this file might be included in a file compiled with -ansi. So 98 we can't use C++ style "//" comments nor the "asm" keyword (instead 99 use "__asm__"). */ 100 101 /* Derive some tags indicating what the target platform is. Note 102 that in this file we're using the compiler's CPP symbols for 103 identifying architectures, which are different to the ones we use 104 within the rest of Valgrind. Note, __powerpc__ is active for both 105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the 106 latter (on Linux, that is). 107 108 Misc note: how to find out what's predefined in gcc by default: 109 gcc -Wp,-dM somefile.c 110 */ 111 #undef PLAT_x86_darwin 112 #undef PLAT_amd64_darwin 113 #undef PLAT_x86_win32 114 #undef PLAT_amd64_win64 115 #undef PLAT_x86_linux 116 #undef PLAT_amd64_linux 117 #undef PLAT_ppc32_linux 118 #undef PLAT_ppc64be_linux 119 #undef PLAT_ppc64le_linux 120 #undef PLAT_arm_linux 121 #undef PLAT_arm64_linux 122 #undef PLAT_s390x_linux 123 #undef PLAT_mips32_linux 124 #undef PLAT_mips64_linux 125 #undef PLAT_nanomips_linux 126 #undef PLAT_x86_solaris 127 #undef PLAT_amd64_solaris 128 129 130 #if defined(__APPLE__) && defined(__i386__) 131 # define PLAT_x86_darwin 1 132 #elif defined(__APPLE__) && defined(__x86_64__) 133 # define PLAT_amd64_darwin 1 134 #elif (defined(__MINGW32__) && defined(__i386__)) \ 135 || defined(__CYGWIN32__) \ 136 || (defined(_WIN32) && defined(_M_IX86)) 137 # define PLAT_x86_win32 1 138 #elif (defined(__MINGW32__) && defined(__x86_64__)) \ 139 || (defined(_WIN32) && defined(_M_X64)) 140 /* __MINGW32__ and _WIN32 are defined in 64 bit mode as well. */ 141 # define PLAT_amd64_win64 1 142 #elif defined(__linux__) && defined(__i386__) 143 # define PLAT_x86_linux 1 144 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__) 145 # define PLAT_amd64_linux 1 146 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__) 147 # define PLAT_ppc32_linux 1 148 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2 149 /* Big Endian uses ELF version 1 */ 150 # define PLAT_ppc64be_linux 1 151 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2 152 /* Little Endian uses ELF version 2 */ 153 # define PLAT_ppc64le_linux 1 154 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__) 155 # define PLAT_arm_linux 1 156 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__) 157 # define PLAT_arm64_linux 1 158 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__) 159 # define PLAT_s390x_linux 1 160 #elif defined(__linux__) && defined(__mips__) && (__mips==64) 161 # define PLAT_mips64_linux 1 162 #elif defined(__linux__) && defined(__mips__) && (__mips==32) 163 # define PLAT_mips32_linux 1 164 #elif defined(__linux__) && defined(__nanomips__) 165 # define PLAT_nanomips_linux 1 166 #elif defined(__sun) && defined(__i386__) 167 # define PLAT_x86_solaris 1 168 #elif defined(__sun) && defined(__x86_64__) 169 # define PLAT_amd64_solaris 1 170 #else 171 /* If we're not compiling for our target platform, don't generate 172 any inline asms. */ 173 # if !defined(NVALGRIND) 174 # define NVALGRIND 1 175 # endif 176 #endif 177 178 179 /* ------------------------------------------------------------------ */ 180 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */ 181 /* in here of use to end-users -- skip to the next section. */ 182 /* ------------------------------------------------------------------ */ 183 184 /* 185 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client 186 * request. Accepts both pointers and integers as arguments. 187 * 188 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind 189 * client request that does not return a value. 190 191 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind 192 * client request and whose value equals the client request result. Accepts 193 * both pointers and integers as arguments. Note that such calls are not 194 * necessarily pure functions -- they may have side effects. 195 */ 196 197 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \ 198 _zzq_request, _zzq_arg1, _zzq_arg2, \ 199 _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 200 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \ 201 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \ 202 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0) 203 204 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \ 205 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 206 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 207 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \ 208 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0) 209 210 #if defined(NVALGRIND) 211 212 /* Define NVALGRIND to completely remove the Valgrind magic sequence 213 from the compiled code (analogous to NDEBUG's effects on 214 assert()) */ 215 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 216 _zzq_default, _zzq_request, \ 217 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 218 (_zzq_default) 219 220 #else /* ! NVALGRIND */ 221 222 /* The following defines the magic code sequences which the JITter 223 spots and handles magically. Don't look too closely at them as 224 they will rot your brain. 225 226 The assembly code sequences for all architectures is in this one 227 file. This is because this file must be stand-alone, and we don't 228 want to have multiple files. 229 230 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default 231 value gets put in the return slot, so that everything works when 232 this is executed not under Valgrind. Args are passed in a memory 233 block, and so there's no intrinsic limit to the number that could 234 be passed, but it's currently five. 235 236 The macro args are: 237 _zzq_rlval result lvalue 238 _zzq_default default value (result returned when running on real CPU) 239 _zzq_request request code 240 _zzq_arg1..5 request params 241 242 The other two macros are used to support function wrapping, and are 243 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the 244 guest's NRADDR pseudo-register and whatever other information is 245 needed to safely run the call original from the wrapper: on 246 ppc64-linux, the R2 value at the divert point is also needed. This 247 information is abstracted into a user-visible type, OrigFn. 248 249 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the 250 guest, but guarantees that the branch instruction will not be 251 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64: 252 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a 253 complete inline asm, since it needs to be combined with more magic 254 inline asm stuff to be useful. 255 */ 256 257 /* ----------------- x86-{linux,darwin,solaris} ---------------- */ 258 259 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \ 260 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \ 261 || defined(PLAT_x86_solaris) 262 263 typedef 264 struct { 265 unsigned int nraddr; /* where's the code? */ 266 } 267 OrigFn; 268 269 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 270 "roll $3, %%edi ; roll $13, %%edi\n\t" \ 271 "roll $29, %%edi ; roll $19, %%edi\n\t" 272 273 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 274 _zzq_default, _zzq_request, \ 275 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 276 __extension__ \ 277 ({volatile unsigned int _zzq_args[6]; \ 278 volatile unsigned int _zzq_result; \ 279 _zzq_args[0] = (unsigned int)(_zzq_request); \ 280 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 281 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 282 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 283 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 284 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 285 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 286 /* %EDX = client_request ( %EAX ) */ \ 287 "xchgl %%ebx,%%ebx" \ 288 : "=d" (_zzq_result) \ 289 : "a" (&_zzq_args[0]), "0" (_zzq_default) \ 290 : "cc", "memory" \ 291 ); \ 292 _zzq_result; \ 293 }) 294 295 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 296 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 297 volatile unsigned int __addr; \ 298 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 299 /* %EAX = guest_NRADDR */ \ 300 "xchgl %%ecx,%%ecx" \ 301 : "=a" (__addr) \ 302 : \ 303 : "cc", "memory" \ 304 ); \ 305 _zzq_orig->nraddr = __addr; \ 306 } 307 308 #define VALGRIND_CALL_NOREDIR_EAX \ 309 __SPECIAL_INSTRUCTION_PREAMBLE \ 310 /* call-noredir *%EAX */ \ 311 "xchgl %%edx,%%edx\n\t" 312 313 #define VALGRIND_VEX_INJECT_IR() \ 314 do { \ 315 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 316 "xchgl %%edi,%%edi\n\t" \ 317 : : : "cc", "memory" \ 318 ); \ 319 } while (0) 320 321 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) 322 || PLAT_x86_solaris */ 323 324 /* ------------------------- x86-Win32 ------------------------- */ 325 326 #if defined(PLAT_x86_win32) && !defined(__GNUC__) 327 328 typedef 329 struct { 330 unsigned int nraddr; /* where's the code? */ 331 } 332 OrigFn; 333 334 #if defined(_MSC_VER) 335 336 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 337 __asm rol edi, 3 __asm rol edi, 13 \ 338 __asm rol edi, 29 __asm rol edi, 19 339 340 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 341 _zzq_default, _zzq_request, \ 342 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 343 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \ 344 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \ 345 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \ 346 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5)) 347 348 static __inline uintptr_t 349 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request, 350 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2, 351 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4, 352 uintptr_t _zzq_arg5) 353 { 354 volatile uintptr_t _zzq_args[6]; 355 volatile unsigned int _zzq_result; 356 _zzq_args[0] = (uintptr_t)(_zzq_request); 357 _zzq_args[1] = (uintptr_t)(_zzq_arg1); 358 _zzq_args[2] = (uintptr_t)(_zzq_arg2); 359 _zzq_args[3] = (uintptr_t)(_zzq_arg3); 360 _zzq_args[4] = (uintptr_t)(_zzq_arg4); 361 _zzq_args[5] = (uintptr_t)(_zzq_arg5); 362 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default 363 __SPECIAL_INSTRUCTION_PREAMBLE 364 /* %EDX = client_request ( %EAX ) */ 365 __asm xchg ebx,ebx 366 __asm mov _zzq_result, edx 367 } 368 return _zzq_result; 369 } 370 371 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 372 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 373 volatile unsigned int __addr; \ 374 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \ 375 /* %EAX = guest_NRADDR */ \ 376 __asm xchg ecx,ecx \ 377 __asm mov __addr, eax \ 378 } \ 379 _zzq_orig->nraddr = __addr; \ 380 } 381 382 #define VALGRIND_CALL_NOREDIR_EAX ERROR 383 384 #define VALGRIND_VEX_INJECT_IR() \ 385 do { \ 386 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \ 387 __asm xchg edi,edi \ 388 } \ 389 } while (0) 390 391 #else 392 #error Unsupported compiler. 393 #endif 394 395 #endif /* PLAT_x86_win32 */ 396 397 /* ----------------- amd64-{linux,darwin,solaris} --------------- */ 398 399 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \ 400 || defined(PLAT_amd64_solaris) \ 401 || (defined(PLAT_amd64_win64) && defined(__GNUC__)) 402 403 typedef 404 struct { 405 unsigned long int nraddr; /* where's the code? */ 406 } 407 OrigFn; 408 409 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 410 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \ 411 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t" 412 413 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 414 _zzq_default, _zzq_request, \ 415 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 416 __extension__ \ 417 ({ volatile unsigned long int _zzq_args[6]; \ 418 volatile unsigned long int _zzq_result; \ 419 _zzq_args[0] = (unsigned long int)(_zzq_request); \ 420 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \ 421 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \ 422 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \ 423 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \ 424 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \ 425 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 426 /* %RDX = client_request ( %RAX ) */ \ 427 "xchgq %%rbx,%%rbx" \ 428 : "=d" (_zzq_result) \ 429 : "a" (&_zzq_args[0]), "0" (_zzq_default) \ 430 : "cc", "memory" \ 431 ); \ 432 _zzq_result; \ 433 }) 434 435 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 436 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 437 volatile unsigned long int __addr; \ 438 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 439 /* %RAX = guest_NRADDR */ \ 440 "xchgq %%rcx,%%rcx" \ 441 : "=a" (__addr) \ 442 : \ 443 : "cc", "memory" \ 444 ); \ 445 _zzq_orig->nraddr = __addr; \ 446 } 447 448 #define VALGRIND_CALL_NOREDIR_RAX \ 449 __SPECIAL_INSTRUCTION_PREAMBLE \ 450 /* call-noredir *%RAX */ \ 451 "xchgq %%rdx,%%rdx\n\t" 452 453 #define VALGRIND_VEX_INJECT_IR() \ 454 do { \ 455 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 456 "xchgq %%rdi,%%rdi\n\t" \ 457 : : : "cc", "memory" \ 458 ); \ 459 } while (0) 460 461 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */ 462 463 /* ------------------------- amd64-Win64 ------------------------- */ 464 465 #if defined(PLAT_amd64_win64) && !defined(__GNUC__) 466 467 #error Unsupported compiler. 468 469 #endif /* PLAT_amd64_win64 */ 470 471 /* ------------------------ ppc32-linux ------------------------ */ 472 473 #if defined(PLAT_ppc32_linux) 474 475 typedef 476 struct { 477 unsigned int nraddr; /* where's the code? */ 478 } 479 OrigFn; 480 481 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 482 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \ 483 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t" 484 485 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 486 _zzq_default, _zzq_request, \ 487 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 488 \ 489 __extension__ \ 490 ({ unsigned int _zzq_args[6]; \ 491 unsigned int _zzq_result; \ 492 unsigned int* _zzq_ptr; \ 493 _zzq_args[0] = (unsigned int)(_zzq_request); \ 494 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 495 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 496 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 497 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 498 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 499 _zzq_ptr = _zzq_args; \ 500 __asm__ volatile("mr 3,%1\n\t" /*default*/ \ 501 "mr 4,%2\n\t" /*ptr*/ \ 502 __SPECIAL_INSTRUCTION_PREAMBLE \ 503 /* %R3 = client_request ( %R4 ) */ \ 504 "or 1,1,1\n\t" \ 505 "mr %0,3" /*result*/ \ 506 : "=b" (_zzq_result) \ 507 : "b" (_zzq_default), "b" (_zzq_ptr) \ 508 : "cc", "memory", "r3", "r4"); \ 509 _zzq_result; \ 510 }) 511 512 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 513 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 514 unsigned int __addr; \ 515 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 516 /* %R3 = guest_NRADDR */ \ 517 "or 2,2,2\n\t" \ 518 "mr %0,3" \ 519 : "=b" (__addr) \ 520 : \ 521 : "cc", "memory", "r3" \ 522 ); \ 523 _zzq_orig->nraddr = __addr; \ 524 } 525 526 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 527 __SPECIAL_INSTRUCTION_PREAMBLE \ 528 /* branch-and-link-to-noredir *%R11 */ \ 529 "or 3,3,3\n\t" 530 531 #define VALGRIND_VEX_INJECT_IR() \ 532 do { \ 533 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 534 "or 5,5,5\n\t" \ 535 ); \ 536 } while (0) 537 538 #endif /* PLAT_ppc32_linux */ 539 540 /* ------------------------ ppc64-linux ------------------------ */ 541 542 #if defined(PLAT_ppc64be_linux) 543 544 typedef 545 struct { 546 unsigned long int nraddr; /* where's the code? */ 547 unsigned long int r2; /* what tocptr do we need? */ 548 } 549 OrigFn; 550 551 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 552 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \ 553 "rotldi 0,0,61 ; rotldi 0,0,51\n\t" 554 555 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 556 _zzq_default, _zzq_request, \ 557 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 558 \ 559 __extension__ \ 560 ({ unsigned long int _zzq_args[6]; \ 561 unsigned long int _zzq_result; \ 562 unsigned long int* _zzq_ptr; \ 563 _zzq_args[0] = (unsigned long int)(_zzq_request); \ 564 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \ 565 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \ 566 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \ 567 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \ 568 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \ 569 _zzq_ptr = _zzq_args; \ 570 __asm__ volatile("mr 3,%1\n\t" /*default*/ \ 571 "mr 4,%2\n\t" /*ptr*/ \ 572 __SPECIAL_INSTRUCTION_PREAMBLE \ 573 /* %R3 = client_request ( %R4 ) */ \ 574 "or 1,1,1\n\t" \ 575 "mr %0,3" /*result*/ \ 576 : "=b" (_zzq_result) \ 577 : "b" (_zzq_default), "b" (_zzq_ptr) \ 578 : "cc", "memory", "r3", "r4"); \ 579 _zzq_result; \ 580 }) 581 582 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 583 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 584 unsigned long int __addr; \ 585 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 586 /* %R3 = guest_NRADDR */ \ 587 "or 2,2,2\n\t" \ 588 "mr %0,3" \ 589 : "=b" (__addr) \ 590 : \ 591 : "cc", "memory", "r3" \ 592 ); \ 593 _zzq_orig->nraddr = __addr; \ 594 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 595 /* %R3 = guest_NRADDR_GPR2 */ \ 596 "or 4,4,4\n\t" \ 597 "mr %0,3" \ 598 : "=b" (__addr) \ 599 : \ 600 : "cc", "memory", "r3" \ 601 ); \ 602 _zzq_orig->r2 = __addr; \ 603 } 604 605 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 606 __SPECIAL_INSTRUCTION_PREAMBLE \ 607 /* branch-and-link-to-noredir *%R11 */ \ 608 "or 3,3,3\n\t" 609 610 #define VALGRIND_VEX_INJECT_IR() \ 611 do { \ 612 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 613 "or 5,5,5\n\t" \ 614 ); \ 615 } while (0) 616 617 #endif /* PLAT_ppc64be_linux */ 618 619 #if defined(PLAT_ppc64le_linux) 620 621 typedef 622 struct { 623 unsigned long int nraddr; /* where's the code? */ 624 unsigned long int r2; /* what tocptr do we need? */ 625 } 626 OrigFn; 627 628 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 629 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \ 630 "rotldi 0,0,61 ; rotldi 0,0,51\n\t" 631 632 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 633 _zzq_default, _zzq_request, \ 634 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 635 \ 636 __extension__ \ 637 ({ unsigned long int _zzq_args[6]; \ 638 unsigned long int _zzq_result; \ 639 unsigned long int* _zzq_ptr; \ 640 _zzq_args[0] = (unsigned long int)(_zzq_request); \ 641 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \ 642 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \ 643 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \ 644 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \ 645 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \ 646 _zzq_ptr = _zzq_args; \ 647 __asm__ volatile("mr 3,%1\n\t" /*default*/ \ 648 "mr 4,%2\n\t" /*ptr*/ \ 649 __SPECIAL_INSTRUCTION_PREAMBLE \ 650 /* %R3 = client_request ( %R4 ) */ \ 651 "or 1,1,1\n\t" \ 652 "mr %0,3" /*result*/ \ 653 : "=b" (_zzq_result) \ 654 : "b" (_zzq_default), "b" (_zzq_ptr) \ 655 : "cc", "memory", "r3", "r4"); \ 656 _zzq_result; \ 657 }) 658 659 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 660 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 661 unsigned long int __addr; \ 662 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 663 /* %R3 = guest_NRADDR */ \ 664 "or 2,2,2\n\t" \ 665 "mr %0,3" \ 666 : "=b" (__addr) \ 667 : \ 668 : "cc", "memory", "r3" \ 669 ); \ 670 _zzq_orig->nraddr = __addr; \ 671 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 672 /* %R3 = guest_NRADDR_GPR2 */ \ 673 "or 4,4,4\n\t" \ 674 "mr %0,3" \ 675 : "=b" (__addr) \ 676 : \ 677 : "cc", "memory", "r3" \ 678 ); \ 679 _zzq_orig->r2 = __addr; \ 680 } 681 682 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 683 __SPECIAL_INSTRUCTION_PREAMBLE \ 684 /* branch-and-link-to-noredir *%R12 */ \ 685 "or 3,3,3\n\t" 686 687 #define VALGRIND_VEX_INJECT_IR() \ 688 do { \ 689 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 690 "or 5,5,5\n\t" \ 691 ); \ 692 } while (0) 693 694 #endif /* PLAT_ppc64le_linux */ 695 696 /* ------------------------- arm-linux ------------------------- */ 697 698 #if defined(PLAT_arm_linux) 699 700 typedef 701 struct { 702 unsigned int nraddr; /* where's the code? */ 703 } 704 OrigFn; 705 706 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 707 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \ 708 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t" 709 710 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 711 _zzq_default, _zzq_request, \ 712 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 713 \ 714 __extension__ \ 715 ({volatile unsigned int _zzq_args[6]; \ 716 volatile unsigned int _zzq_result; \ 717 _zzq_args[0] = (unsigned int)(_zzq_request); \ 718 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 719 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 720 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 721 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 722 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 723 __asm__ volatile("mov r3, %1\n\t" /*default*/ \ 724 "mov r4, %2\n\t" /*ptr*/ \ 725 __SPECIAL_INSTRUCTION_PREAMBLE \ 726 /* R3 = client_request ( R4 ) */ \ 727 "orr r10, r10, r10\n\t" \ 728 "mov %0, r3" /*result*/ \ 729 : "=r" (_zzq_result) \ 730 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 731 : "cc","memory", "r3", "r4"); \ 732 _zzq_result; \ 733 }) 734 735 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 736 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 737 unsigned int __addr; \ 738 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 739 /* R3 = guest_NRADDR */ \ 740 "orr r11, r11, r11\n\t" \ 741 "mov %0, r3" \ 742 : "=r" (__addr) \ 743 : \ 744 : "cc", "memory", "r3" \ 745 ); \ 746 _zzq_orig->nraddr = __addr; \ 747 } 748 749 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 750 __SPECIAL_INSTRUCTION_PREAMBLE \ 751 /* branch-and-link-to-noredir *%R4 */ \ 752 "orr r12, r12, r12\n\t" 753 754 #define VALGRIND_VEX_INJECT_IR() \ 755 do { \ 756 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 757 "orr r9, r9, r9\n\t" \ 758 : : : "cc", "memory" \ 759 ); \ 760 } while (0) 761 762 #endif /* PLAT_arm_linux */ 763 764 /* ------------------------ arm64-linux ------------------------- */ 765 766 #if defined(PLAT_arm64_linux) 767 768 typedef 769 struct { 770 unsigned long int nraddr; /* where's the code? */ 771 } 772 OrigFn; 773 774 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 775 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \ 776 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t" 777 778 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 779 _zzq_default, _zzq_request, \ 780 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 781 \ 782 __extension__ \ 783 ({volatile unsigned long int _zzq_args[6]; \ 784 volatile unsigned long int _zzq_result; \ 785 _zzq_args[0] = (unsigned long int)(_zzq_request); \ 786 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \ 787 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \ 788 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \ 789 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \ 790 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \ 791 __asm__ volatile("mov x3, %1\n\t" /*default*/ \ 792 "mov x4, %2\n\t" /*ptr*/ \ 793 __SPECIAL_INSTRUCTION_PREAMBLE \ 794 /* X3 = client_request ( X4 ) */ \ 795 "orr x10, x10, x10\n\t" \ 796 "mov %0, x3" /*result*/ \ 797 : "=r" (_zzq_result) \ 798 : "r" ((unsigned long int)(_zzq_default)), \ 799 "r" (&_zzq_args[0]) \ 800 : "cc","memory", "x3", "x4"); \ 801 _zzq_result; \ 802 }) 803 804 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 805 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 806 unsigned long int __addr; \ 807 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 808 /* X3 = guest_NRADDR */ \ 809 "orr x11, x11, x11\n\t" \ 810 "mov %0, x3" \ 811 : "=r" (__addr) \ 812 : \ 813 : "cc", "memory", "x3" \ 814 ); \ 815 _zzq_orig->nraddr = __addr; \ 816 } 817 818 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 819 __SPECIAL_INSTRUCTION_PREAMBLE \ 820 /* branch-and-link-to-noredir X8 */ \ 821 "orr x12, x12, x12\n\t" 822 823 #define VALGRIND_VEX_INJECT_IR() \ 824 do { \ 825 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 826 "orr x9, x9, x9\n\t" \ 827 : : : "cc", "memory" \ 828 ); \ 829 } while (0) 830 831 #endif /* PLAT_arm64_linux */ 832 833 /* ------------------------ s390x-linux ------------------------ */ 834 835 #if defined(PLAT_s390x_linux) 836 837 typedef 838 struct { 839 unsigned long int nraddr; /* where's the code? */ 840 } 841 OrigFn; 842 843 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific 844 * code. This detection is implemented in platform specific toIR.c 845 * (e.g. VEX/priv/guest_s390_decoder.c). 846 */ 847 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 848 "lr 15,15\n\t" \ 849 "lr 1,1\n\t" \ 850 "lr 2,2\n\t" \ 851 "lr 3,3\n\t" 852 853 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t" 854 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t" 855 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t" 856 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t" 857 858 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 859 _zzq_default, _zzq_request, \ 860 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 861 __extension__ \ 862 ({volatile unsigned long int _zzq_args[6]; \ 863 volatile unsigned long int _zzq_result; \ 864 _zzq_args[0] = (unsigned long int)(_zzq_request); \ 865 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \ 866 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \ 867 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \ 868 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \ 869 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \ 870 __asm__ volatile(/* r2 = args */ \ 871 "lgr 2,%1\n\t" \ 872 /* r3 = default */ \ 873 "lgr 3,%2\n\t" \ 874 __SPECIAL_INSTRUCTION_PREAMBLE \ 875 __CLIENT_REQUEST_CODE \ 876 /* results = r3 */ \ 877 "lgr %0, 3\n\t" \ 878 : "=d" (_zzq_result) \ 879 : "a" (&_zzq_args[0]), \ 880 "0" ((unsigned long int)_zzq_default) \ 881 : "cc", "2", "3", "memory" \ 882 ); \ 883 _zzq_result; \ 884 }) 885 886 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 887 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 888 volatile unsigned long int __addr; \ 889 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 890 __GET_NR_CONTEXT_CODE \ 891 "lgr %0, 3\n\t" \ 892 : "=a" (__addr) \ 893 : \ 894 : "cc", "3", "memory" \ 895 ); \ 896 _zzq_orig->nraddr = __addr; \ 897 } 898 899 #define VALGRIND_CALL_NOREDIR_R1 \ 900 __SPECIAL_INSTRUCTION_PREAMBLE \ 901 __CALL_NO_REDIR_CODE 902 903 #define VALGRIND_VEX_INJECT_IR() \ 904 do { \ 905 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 906 __VEX_INJECT_IR_CODE); \ 907 } while (0) 908 909 #endif /* PLAT_s390x_linux */ 910 911 /* ------------------------- mips32-linux ---------------- */ 912 913 #if defined(PLAT_mips32_linux) 914 915 typedef 916 struct { 917 unsigned int nraddr; /* where's the code? */ 918 } 919 OrigFn; 920 921 /* .word 0x342 922 * .word 0x742 923 * .word 0xC2 924 * .word 0x4C2*/ 925 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 926 "srl $0, $0, 13\n\t" \ 927 "srl $0, $0, 29\n\t" \ 928 "srl $0, $0, 3\n\t" \ 929 "srl $0, $0, 19\n\t" 930 931 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 932 _zzq_default, _zzq_request, \ 933 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 934 __extension__ \ 935 ({ volatile unsigned int _zzq_args[6]; \ 936 volatile unsigned int _zzq_result; \ 937 _zzq_args[0] = (unsigned int)(_zzq_request); \ 938 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 939 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 940 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 941 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 942 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 943 __asm__ volatile("move $11, %1\n\t" /*default*/ \ 944 "move $12, %2\n\t" /*ptr*/ \ 945 __SPECIAL_INSTRUCTION_PREAMBLE \ 946 /* T3 = client_request ( T4 ) */ \ 947 "or $13, $13, $13\n\t" \ 948 "move %0, $11\n\t" /*result*/ \ 949 : "=r" (_zzq_result) \ 950 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 951 : "$11", "$12", "memory"); \ 952 _zzq_result; \ 953 }) 954 955 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 956 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 957 volatile unsigned int __addr; \ 958 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 959 /* %t9 = guest_NRADDR */ \ 960 "or $14, $14, $14\n\t" \ 961 "move %0, $11" /*result*/ \ 962 : "=r" (__addr) \ 963 : \ 964 : "$11" \ 965 ); \ 966 _zzq_orig->nraddr = __addr; \ 967 } 968 969 #define VALGRIND_CALL_NOREDIR_T9 \ 970 __SPECIAL_INSTRUCTION_PREAMBLE \ 971 /* call-noredir *%t9 */ \ 972 "or $15, $15, $15\n\t" 973 974 #define VALGRIND_VEX_INJECT_IR() \ 975 do { \ 976 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 977 "or $11, $11, $11\n\t" \ 978 ); \ 979 } while (0) 980 981 982 #endif /* PLAT_mips32_linux */ 983 984 /* ------------------------- mips64-linux ---------------- */ 985 986 #if defined(PLAT_mips64_linux) 987 988 typedef 989 struct { 990 unsigned long nraddr; /* where's the code? */ 991 } 992 OrigFn; 993 994 /* dsll $0,$0, 3 995 * dsll $0,$0, 13 996 * dsll $0,$0, 29 997 * dsll $0,$0, 19*/ 998 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 999 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \ 1000 "dsll $0,$0,29 ; dsll $0,$0,19\n\t" 1001 1002 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 1003 _zzq_default, _zzq_request, \ 1004 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 1005 __extension__ \ 1006 ({ volatile unsigned long int _zzq_args[6]; \ 1007 volatile unsigned long int _zzq_result; \ 1008 _zzq_args[0] = (unsigned long int)(_zzq_request); \ 1009 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \ 1010 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \ 1011 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \ 1012 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \ 1013 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \ 1014 __asm__ volatile("move $11, %1\n\t" /*default*/ \ 1015 "move $12, %2\n\t" /*ptr*/ \ 1016 __SPECIAL_INSTRUCTION_PREAMBLE \ 1017 /* $11 = client_request ( $12 ) */ \ 1018 "or $13, $13, $13\n\t" \ 1019 "move %0, $11\n\t" /*result*/ \ 1020 : "=r" (_zzq_result) \ 1021 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 1022 : "$11", "$12", "memory"); \ 1023 _zzq_result; \ 1024 }) 1025 1026 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 1027 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 1028 volatile unsigned long int __addr; \ 1029 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 1030 /* $11 = guest_NRADDR */ \ 1031 "or $14, $14, $14\n\t" \ 1032 "move %0, $11" /*result*/ \ 1033 : "=r" (__addr) \ 1034 : \ 1035 : "$11"); \ 1036 _zzq_orig->nraddr = __addr; \ 1037 } 1038 1039 #define VALGRIND_CALL_NOREDIR_T9 \ 1040 __SPECIAL_INSTRUCTION_PREAMBLE \ 1041 /* call-noredir $25 */ \ 1042 "or $15, $15, $15\n\t" 1043 1044 #define VALGRIND_VEX_INJECT_IR() \ 1045 do { \ 1046 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 1047 "or $11, $11, $11\n\t" \ 1048 ); \ 1049 } while (0) 1050 1051 #endif /* PLAT_mips64_linux */ 1052 1053 #if defined(PLAT_nanomips_linux) 1054 1055 typedef 1056 struct { 1057 unsigned int nraddr; /* where's the code? */ 1058 } 1059 OrigFn; 1060 /* 1061 8000 c04d srl zero, zero, 13 1062 8000 c05d srl zero, zero, 29 1063 8000 c043 srl zero, zero, 3 1064 8000 c053 srl zero, zero, 19 1065 */ 1066 1067 #define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \ 1068 "srl[32] $zero, $zero, 29 \n\t" \ 1069 "srl[32] $zero, $zero, 3 \n\t" \ 1070 "srl[32] $zero, $zero, 19 \n\t" 1071 1072 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 1073 _zzq_default, _zzq_request, \ 1074 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 1075 __extension__ \ 1076 ({ volatile unsigned int _zzq_args[6]; \ 1077 volatile unsigned int _zzq_result; \ 1078 _zzq_args[0] = (unsigned int)(_zzq_request); \ 1079 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 1080 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 1081 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 1082 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 1083 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 1084 __asm__ volatile("move $a7, %1\n\t" /* default */ \ 1085 "move $t0, %2\n\t" /* ptr */ \ 1086 __SPECIAL_INSTRUCTION_PREAMBLE \ 1087 /* $a7 = client_request( $t0 ) */ \ 1088 "or[32] $t0, $t0, $t0\n\t" \ 1089 "move %0, $a7\n\t" /* result */ \ 1090 : "=r" (_zzq_result) \ 1091 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 1092 : "$a7", "$t0", "memory"); \ 1093 _zzq_result; \ 1094 }) 1095 1096 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 1097 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 1098 volatile unsigned long int __addr; \ 1099 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 1100 /* $a7 = guest_NRADDR */ \ 1101 "or[32] $t1, $t1, $t1\n\t" \ 1102 "move %0, $a7" /*result*/ \ 1103 : "=r" (__addr) \ 1104 : \ 1105 : "$a7"); \ 1106 _zzq_orig->nraddr = __addr; \ 1107 } 1108 1109 #define VALGRIND_CALL_NOREDIR_T9 \ 1110 __SPECIAL_INSTRUCTION_PREAMBLE \ 1111 /* call-noredir $25 */ \ 1112 "or[32] $t2, $t2, $t2\n\t" 1113 1114 #define VALGRIND_VEX_INJECT_IR() \ 1115 do { \ 1116 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 1117 "or[32] $t3, $t3, $t3\n\t" \ 1118 ); \ 1119 } while (0) 1120 1121 #endif 1122 /* Insert assembly code for other platforms here... */ 1123 1124 #endif /* NVALGRIND */ 1125 1126 1127 /* ------------------------------------------------------------------ */ 1128 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */ 1129 /* ugly. It's the least-worst tradeoff I can think of. */ 1130 /* ------------------------------------------------------------------ */ 1131 1132 /* This section defines magic (a.k.a appalling-hack) macros for doing 1133 guaranteed-no-redirection macros, so as to get from function 1134 wrappers to the functions they are wrapping. The whole point is to 1135 construct standard call sequences, but to do the call itself with a 1136 special no-redirect call pseudo-instruction that the JIT 1137 understands and handles specially. This section is long and 1138 repetitious, and I can't see a way to make it shorter. 1139 1140 The naming scheme is as follows: 1141 1142 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc} 1143 1144 'W' stands for "word" and 'v' for "void". Hence there are 1145 different macros for calling arity 0, 1, 2, 3, 4, etc, functions, 1146 and for each, the possibility of returning a word-typed result, or 1147 no result. 1148 */ 1149 1150 /* Use these to write the name of your wrapper. NOTE: duplicates 1151 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts 1152 the default behaviour equivalance class tag "0000" into the name. 1153 See pub_tool_redir.h for details -- normally you don't need to 1154 think about this, though. */ 1155 1156 /* Use an extra level of macroisation so as to ensure the soname/fnname 1157 args are fully macro-expanded before pasting them together. */ 1158 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd 1159 1160 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \ 1161 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname) 1162 1163 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \ 1164 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname) 1165 1166 /* Use this macro from within a wrapper function to collect the 1167 context (address and possibly other info) of the original function. 1168 Once you have that you can then use it in one of the CALL_FN_ 1169 macros. The type of the argument _lval is OrigFn. */ 1170 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval) 1171 1172 /* Also provide end-user facilities for function replacement, rather 1173 than wrapping. A replacement function differs from a wrapper in 1174 that it has no way to get hold of the original function being 1175 called, and hence no way to call onwards to it. In a replacement 1176 function, VALGRIND_GET_ORIG_FN always returns zero. */ 1177 1178 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \ 1179 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname) 1180 1181 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \ 1182 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname) 1183 1184 /* Derivatives of the main macros below, for calling functions 1185 returning void. */ 1186 1187 #define CALL_FN_v_v(fnptr) \ 1188 do { volatile unsigned long _junk; \ 1189 CALL_FN_W_v(_junk,fnptr); } while (0) 1190 1191 #define CALL_FN_v_W(fnptr, arg1) \ 1192 do { volatile unsigned long _junk; \ 1193 CALL_FN_W_W(_junk,fnptr,arg1); } while (0) 1194 1195 #define CALL_FN_v_WW(fnptr, arg1,arg2) \ 1196 do { volatile unsigned long _junk; \ 1197 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0) 1198 1199 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \ 1200 do { volatile unsigned long _junk; \ 1201 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0) 1202 1203 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \ 1204 do { volatile unsigned long _junk; \ 1205 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0) 1206 1207 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \ 1208 do { volatile unsigned long _junk; \ 1209 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0) 1210 1211 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \ 1212 do { volatile unsigned long _junk; \ 1213 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0) 1214 1215 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \ 1216 do { volatile unsigned long _junk; \ 1217 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0) 1218 1219 /* ----------------- x86-{linux,darwin,solaris} ---------------- */ 1220 1221 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \ 1222 || defined(PLAT_x86_solaris) 1223 1224 /* These regs are trashed by the hidden call. No need to mention eax 1225 as gcc can already see that, plus causes gcc to bomb. */ 1226 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx" 1227 1228 /* Macros to save and align the stack before making a function 1229 call and restore it afterwards as gcc may not keep the stack 1230 pointer aligned if it doesn't realise calls are being made 1231 to other functions. */ 1232 1233 #define VALGRIND_ALIGN_STACK \ 1234 "movl %%esp,%%edi\n\t" \ 1235 "andl $0xfffffff0,%%esp\n\t" 1236 #define VALGRIND_RESTORE_STACK \ 1237 "movl %%edi,%%esp\n\t" 1238 1239 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned 1240 long) == 4. */ 1241 1242 #define CALL_FN_W_v(lval, orig) \ 1243 do { \ 1244 volatile OrigFn _orig = (orig); \ 1245 volatile unsigned long _argvec[1]; \ 1246 volatile unsigned long _res; \ 1247 _argvec[0] = (unsigned long)_orig.nraddr; \ 1248 __asm__ volatile( \ 1249 VALGRIND_ALIGN_STACK \ 1250 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1251 VALGRIND_CALL_NOREDIR_EAX \ 1252 VALGRIND_RESTORE_STACK \ 1253 : /*out*/ "=a" (_res) \ 1254 : /*in*/ "a" (&_argvec[0]) \ 1255 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1256 ); \ 1257 lval = (__typeof__(lval)) _res; \ 1258 } while (0) 1259 1260 #define CALL_FN_W_W(lval, orig, arg1) \ 1261 do { \ 1262 volatile OrigFn _orig = (orig); \ 1263 volatile unsigned long _argvec[2]; \ 1264 volatile unsigned long _res; \ 1265 _argvec[0] = (unsigned long)_orig.nraddr; \ 1266 _argvec[1] = (unsigned long)(arg1); \ 1267 __asm__ volatile( \ 1268 VALGRIND_ALIGN_STACK \ 1269 "subl $12, %%esp\n\t" \ 1270 "pushl 4(%%eax)\n\t" \ 1271 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1272 VALGRIND_CALL_NOREDIR_EAX \ 1273 VALGRIND_RESTORE_STACK \ 1274 : /*out*/ "=a" (_res) \ 1275 : /*in*/ "a" (&_argvec[0]) \ 1276 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1277 ); \ 1278 lval = (__typeof__(lval)) _res; \ 1279 } while (0) 1280 1281 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 1282 do { \ 1283 volatile OrigFn _orig = (orig); \ 1284 volatile unsigned long _argvec[3]; \ 1285 volatile unsigned long _res; \ 1286 _argvec[0] = (unsigned long)_orig.nraddr; \ 1287 _argvec[1] = (unsigned long)(arg1); \ 1288 _argvec[2] = (unsigned long)(arg2); \ 1289 __asm__ volatile( \ 1290 VALGRIND_ALIGN_STACK \ 1291 "subl $8, %%esp\n\t" \ 1292 "pushl 8(%%eax)\n\t" \ 1293 "pushl 4(%%eax)\n\t" \ 1294 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1295 VALGRIND_CALL_NOREDIR_EAX \ 1296 VALGRIND_RESTORE_STACK \ 1297 : /*out*/ "=a" (_res) \ 1298 : /*in*/ "a" (&_argvec[0]) \ 1299 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1300 ); \ 1301 lval = (__typeof__(lval)) _res; \ 1302 } while (0) 1303 1304 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 1305 do { \ 1306 volatile OrigFn _orig = (orig); \ 1307 volatile unsigned long _argvec[4]; \ 1308 volatile unsigned long _res; \ 1309 _argvec[0] = (unsigned long)_orig.nraddr; \ 1310 _argvec[1] = (unsigned long)(arg1); \ 1311 _argvec[2] = (unsigned long)(arg2); \ 1312 _argvec[3] = (unsigned long)(arg3); \ 1313 __asm__ volatile( \ 1314 VALGRIND_ALIGN_STACK \ 1315 "subl $4, %%esp\n\t" \ 1316 "pushl 12(%%eax)\n\t" \ 1317 "pushl 8(%%eax)\n\t" \ 1318 "pushl 4(%%eax)\n\t" \ 1319 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1320 VALGRIND_CALL_NOREDIR_EAX \ 1321 VALGRIND_RESTORE_STACK \ 1322 : /*out*/ "=a" (_res) \ 1323 : /*in*/ "a" (&_argvec[0]) \ 1324 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1325 ); \ 1326 lval = (__typeof__(lval)) _res; \ 1327 } while (0) 1328 1329 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 1330 do { \ 1331 volatile OrigFn _orig = (orig); \ 1332 volatile unsigned long _argvec[5]; \ 1333 volatile unsigned long _res; \ 1334 _argvec[0] = (unsigned long)_orig.nraddr; \ 1335 _argvec[1] = (unsigned long)(arg1); \ 1336 _argvec[2] = (unsigned long)(arg2); \ 1337 _argvec[3] = (unsigned long)(arg3); \ 1338 _argvec[4] = (unsigned long)(arg4); \ 1339 __asm__ volatile( \ 1340 VALGRIND_ALIGN_STACK \ 1341 "pushl 16(%%eax)\n\t" \ 1342 "pushl 12(%%eax)\n\t" \ 1343 "pushl 8(%%eax)\n\t" \ 1344 "pushl 4(%%eax)\n\t" \ 1345 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1346 VALGRIND_CALL_NOREDIR_EAX \ 1347 VALGRIND_RESTORE_STACK \ 1348 : /*out*/ "=a" (_res) \ 1349 : /*in*/ "a" (&_argvec[0]) \ 1350 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1351 ); \ 1352 lval = (__typeof__(lval)) _res; \ 1353 } while (0) 1354 1355 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 1356 do { \ 1357 volatile OrigFn _orig = (orig); \ 1358 volatile unsigned long _argvec[6]; \ 1359 volatile unsigned long _res; \ 1360 _argvec[0] = (unsigned long)_orig.nraddr; \ 1361 _argvec[1] = (unsigned long)(arg1); \ 1362 _argvec[2] = (unsigned long)(arg2); \ 1363 _argvec[3] = (unsigned long)(arg3); \ 1364 _argvec[4] = (unsigned long)(arg4); \ 1365 _argvec[5] = (unsigned long)(arg5); \ 1366 __asm__ volatile( \ 1367 VALGRIND_ALIGN_STACK \ 1368 "subl $12, %%esp\n\t" \ 1369 "pushl 20(%%eax)\n\t" \ 1370 "pushl 16(%%eax)\n\t" \ 1371 "pushl 12(%%eax)\n\t" \ 1372 "pushl 8(%%eax)\n\t" \ 1373 "pushl 4(%%eax)\n\t" \ 1374 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1375 VALGRIND_CALL_NOREDIR_EAX \ 1376 VALGRIND_RESTORE_STACK \ 1377 : /*out*/ "=a" (_res) \ 1378 : /*in*/ "a" (&_argvec[0]) \ 1379 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1380 ); \ 1381 lval = (__typeof__(lval)) _res; \ 1382 } while (0) 1383 1384 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 1385 do { \ 1386 volatile OrigFn _orig = (orig); \ 1387 volatile unsigned long _argvec[7]; \ 1388 volatile unsigned long _res; \ 1389 _argvec[0] = (unsigned long)_orig.nraddr; \ 1390 _argvec[1] = (unsigned long)(arg1); \ 1391 _argvec[2] = (unsigned long)(arg2); \ 1392 _argvec[3] = (unsigned long)(arg3); \ 1393 _argvec[4] = (unsigned long)(arg4); \ 1394 _argvec[5] = (unsigned long)(arg5); \ 1395 _argvec[6] = (unsigned long)(arg6); \ 1396 __asm__ volatile( \ 1397 VALGRIND_ALIGN_STACK \ 1398 "subl $8, %%esp\n\t" \ 1399 "pushl 24(%%eax)\n\t" \ 1400 "pushl 20(%%eax)\n\t" \ 1401 "pushl 16(%%eax)\n\t" \ 1402 "pushl 12(%%eax)\n\t" \ 1403 "pushl 8(%%eax)\n\t" \ 1404 "pushl 4(%%eax)\n\t" \ 1405 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1406 VALGRIND_CALL_NOREDIR_EAX \ 1407 VALGRIND_RESTORE_STACK \ 1408 : /*out*/ "=a" (_res) \ 1409 : /*in*/ "a" (&_argvec[0]) \ 1410 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1411 ); \ 1412 lval = (__typeof__(lval)) _res; \ 1413 } while (0) 1414 1415 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1416 arg7) \ 1417 do { \ 1418 volatile OrigFn _orig = (orig); \ 1419 volatile unsigned long _argvec[8]; \ 1420 volatile unsigned long _res; \ 1421 _argvec[0] = (unsigned long)_orig.nraddr; \ 1422 _argvec[1] = (unsigned long)(arg1); \ 1423 _argvec[2] = (unsigned long)(arg2); \ 1424 _argvec[3] = (unsigned long)(arg3); \ 1425 _argvec[4] = (unsigned long)(arg4); \ 1426 _argvec[5] = (unsigned long)(arg5); \ 1427 _argvec[6] = (unsigned long)(arg6); \ 1428 _argvec[7] = (unsigned long)(arg7); \ 1429 __asm__ volatile( \ 1430 VALGRIND_ALIGN_STACK \ 1431 "subl $4, %%esp\n\t" \ 1432 "pushl 28(%%eax)\n\t" \ 1433 "pushl 24(%%eax)\n\t" \ 1434 "pushl 20(%%eax)\n\t" \ 1435 "pushl 16(%%eax)\n\t" \ 1436 "pushl 12(%%eax)\n\t" \ 1437 "pushl 8(%%eax)\n\t" \ 1438 "pushl 4(%%eax)\n\t" \ 1439 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1440 VALGRIND_CALL_NOREDIR_EAX \ 1441 VALGRIND_RESTORE_STACK \ 1442 : /*out*/ "=a" (_res) \ 1443 : /*in*/ "a" (&_argvec[0]) \ 1444 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1445 ); \ 1446 lval = (__typeof__(lval)) _res; \ 1447 } while (0) 1448 1449 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1450 arg7,arg8) \ 1451 do { \ 1452 volatile OrigFn _orig = (orig); \ 1453 volatile unsigned long _argvec[9]; \ 1454 volatile unsigned long _res; \ 1455 _argvec[0] = (unsigned long)_orig.nraddr; \ 1456 _argvec[1] = (unsigned long)(arg1); \ 1457 _argvec[2] = (unsigned long)(arg2); \ 1458 _argvec[3] = (unsigned long)(arg3); \ 1459 _argvec[4] = (unsigned long)(arg4); \ 1460 _argvec[5] = (unsigned long)(arg5); \ 1461 _argvec[6] = (unsigned long)(arg6); \ 1462 _argvec[7] = (unsigned long)(arg7); \ 1463 _argvec[8] = (unsigned long)(arg8); \ 1464 __asm__ volatile( \ 1465 VALGRIND_ALIGN_STACK \ 1466 "pushl 32(%%eax)\n\t" \ 1467 "pushl 28(%%eax)\n\t" \ 1468 "pushl 24(%%eax)\n\t" \ 1469 "pushl 20(%%eax)\n\t" \ 1470 "pushl 16(%%eax)\n\t" \ 1471 "pushl 12(%%eax)\n\t" \ 1472 "pushl 8(%%eax)\n\t" \ 1473 "pushl 4(%%eax)\n\t" \ 1474 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1475 VALGRIND_CALL_NOREDIR_EAX \ 1476 VALGRIND_RESTORE_STACK \ 1477 : /*out*/ "=a" (_res) \ 1478 : /*in*/ "a" (&_argvec[0]) \ 1479 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1480 ); \ 1481 lval = (__typeof__(lval)) _res; \ 1482 } while (0) 1483 1484 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1485 arg7,arg8,arg9) \ 1486 do { \ 1487 volatile OrigFn _orig = (orig); \ 1488 volatile unsigned long _argvec[10]; \ 1489 volatile unsigned long _res; \ 1490 _argvec[0] = (unsigned long)_orig.nraddr; \ 1491 _argvec[1] = (unsigned long)(arg1); \ 1492 _argvec[2] = (unsigned long)(arg2); \ 1493 _argvec[3] = (unsigned long)(arg3); \ 1494 _argvec[4] = (unsigned long)(arg4); \ 1495 _argvec[5] = (unsigned long)(arg5); \ 1496 _argvec[6] = (unsigned long)(arg6); \ 1497 _argvec[7] = (unsigned long)(arg7); \ 1498 _argvec[8] = (unsigned long)(arg8); \ 1499 _argvec[9] = (unsigned long)(arg9); \ 1500 __asm__ volatile( \ 1501 VALGRIND_ALIGN_STACK \ 1502 "subl $12, %%esp\n\t" \ 1503 "pushl 36(%%eax)\n\t" \ 1504 "pushl 32(%%eax)\n\t" \ 1505 "pushl 28(%%eax)\n\t" \ 1506 "pushl 24(%%eax)\n\t" \ 1507 "pushl 20(%%eax)\n\t" \ 1508 "pushl 16(%%eax)\n\t" \ 1509 "pushl 12(%%eax)\n\t" \ 1510 "pushl 8(%%eax)\n\t" \ 1511 "pushl 4(%%eax)\n\t" \ 1512 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1513 VALGRIND_CALL_NOREDIR_EAX \ 1514 VALGRIND_RESTORE_STACK \ 1515 : /*out*/ "=a" (_res) \ 1516 : /*in*/ "a" (&_argvec[0]) \ 1517 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1518 ); \ 1519 lval = (__typeof__(lval)) _res; \ 1520 } while (0) 1521 1522 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1523 arg7,arg8,arg9,arg10) \ 1524 do { \ 1525 volatile OrigFn _orig = (orig); \ 1526 volatile unsigned long _argvec[11]; \ 1527 volatile unsigned long _res; \ 1528 _argvec[0] = (unsigned long)_orig.nraddr; \ 1529 _argvec[1] = (unsigned long)(arg1); \ 1530 _argvec[2] = (unsigned long)(arg2); \ 1531 _argvec[3] = (unsigned long)(arg3); \ 1532 _argvec[4] = (unsigned long)(arg4); \ 1533 _argvec[5] = (unsigned long)(arg5); \ 1534 _argvec[6] = (unsigned long)(arg6); \ 1535 _argvec[7] = (unsigned long)(arg7); \ 1536 _argvec[8] = (unsigned long)(arg8); \ 1537 _argvec[9] = (unsigned long)(arg9); \ 1538 _argvec[10] = (unsigned long)(arg10); \ 1539 __asm__ volatile( \ 1540 VALGRIND_ALIGN_STACK \ 1541 "subl $8, %%esp\n\t" \ 1542 "pushl 40(%%eax)\n\t" \ 1543 "pushl 36(%%eax)\n\t" \ 1544 "pushl 32(%%eax)\n\t" \ 1545 "pushl 28(%%eax)\n\t" \ 1546 "pushl 24(%%eax)\n\t" \ 1547 "pushl 20(%%eax)\n\t" \ 1548 "pushl 16(%%eax)\n\t" \ 1549 "pushl 12(%%eax)\n\t" \ 1550 "pushl 8(%%eax)\n\t" \ 1551 "pushl 4(%%eax)\n\t" \ 1552 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1553 VALGRIND_CALL_NOREDIR_EAX \ 1554 VALGRIND_RESTORE_STACK \ 1555 : /*out*/ "=a" (_res) \ 1556 : /*in*/ "a" (&_argvec[0]) \ 1557 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1558 ); \ 1559 lval = (__typeof__(lval)) _res; \ 1560 } while (0) 1561 1562 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 1563 arg6,arg7,arg8,arg9,arg10, \ 1564 arg11) \ 1565 do { \ 1566 volatile OrigFn _orig = (orig); \ 1567 volatile unsigned long _argvec[12]; \ 1568 volatile unsigned long _res; \ 1569 _argvec[0] = (unsigned long)_orig.nraddr; \ 1570 _argvec[1] = (unsigned long)(arg1); \ 1571 _argvec[2] = (unsigned long)(arg2); \ 1572 _argvec[3] = (unsigned long)(arg3); \ 1573 _argvec[4] = (unsigned long)(arg4); \ 1574 _argvec[5] = (unsigned long)(arg5); \ 1575 _argvec[6] = (unsigned long)(arg6); \ 1576 _argvec[7] = (unsigned long)(arg7); \ 1577 _argvec[8] = (unsigned long)(arg8); \ 1578 _argvec[9] = (unsigned long)(arg9); \ 1579 _argvec[10] = (unsigned long)(arg10); \ 1580 _argvec[11] = (unsigned long)(arg11); \ 1581 __asm__ volatile( \ 1582 VALGRIND_ALIGN_STACK \ 1583 "subl $4, %%esp\n\t" \ 1584 "pushl 44(%%eax)\n\t" \ 1585 "pushl 40(%%eax)\n\t" \ 1586 "pushl 36(%%eax)\n\t" \ 1587 "pushl 32(%%eax)\n\t" \ 1588 "pushl 28(%%eax)\n\t" \ 1589 "pushl 24(%%eax)\n\t" \ 1590 "pushl 20(%%eax)\n\t" \ 1591 "pushl 16(%%eax)\n\t" \ 1592 "pushl 12(%%eax)\n\t" \ 1593 "pushl 8(%%eax)\n\t" \ 1594 "pushl 4(%%eax)\n\t" \ 1595 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1596 VALGRIND_CALL_NOREDIR_EAX \ 1597 VALGRIND_RESTORE_STACK \ 1598 : /*out*/ "=a" (_res) \ 1599 : /*in*/ "a" (&_argvec[0]) \ 1600 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1601 ); \ 1602 lval = (__typeof__(lval)) _res; \ 1603 } while (0) 1604 1605 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 1606 arg6,arg7,arg8,arg9,arg10, \ 1607 arg11,arg12) \ 1608 do { \ 1609 volatile OrigFn _orig = (orig); \ 1610 volatile unsigned long _argvec[13]; \ 1611 volatile unsigned long _res; \ 1612 _argvec[0] = (unsigned long)_orig.nraddr; \ 1613 _argvec[1] = (unsigned long)(arg1); \ 1614 _argvec[2] = (unsigned long)(arg2); \ 1615 _argvec[3] = (unsigned long)(arg3); \ 1616 _argvec[4] = (unsigned long)(arg4); \ 1617 _argvec[5] = (unsigned long)(arg5); \ 1618 _argvec[6] = (unsigned long)(arg6); \ 1619 _argvec[7] = (unsigned long)(arg7); \ 1620 _argvec[8] = (unsigned long)(arg8); \ 1621 _argvec[9] = (unsigned long)(arg9); \ 1622 _argvec[10] = (unsigned long)(arg10); \ 1623 _argvec[11] = (unsigned long)(arg11); \ 1624 _argvec[12] = (unsigned long)(arg12); \ 1625 __asm__ volatile( \ 1626 VALGRIND_ALIGN_STACK \ 1627 "pushl 48(%%eax)\n\t" \ 1628 "pushl 44(%%eax)\n\t" \ 1629 "pushl 40(%%eax)\n\t" \ 1630 "pushl 36(%%eax)\n\t" \ 1631 "pushl 32(%%eax)\n\t" \ 1632 "pushl 28(%%eax)\n\t" \ 1633 "pushl 24(%%eax)\n\t" \ 1634 "pushl 20(%%eax)\n\t" \ 1635 "pushl 16(%%eax)\n\t" \ 1636 "pushl 12(%%eax)\n\t" \ 1637 "pushl 8(%%eax)\n\t" \ 1638 "pushl 4(%%eax)\n\t" \ 1639 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1640 VALGRIND_CALL_NOREDIR_EAX \ 1641 VALGRIND_RESTORE_STACK \ 1642 : /*out*/ "=a" (_res) \ 1643 : /*in*/ "a" (&_argvec[0]) \ 1644 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1645 ); \ 1646 lval = (__typeof__(lval)) _res; \ 1647 } while (0) 1648 1649 #endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */ 1650 1651 /* ---------------- amd64-{linux,darwin,solaris} --------------- */ 1652 1653 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \ 1654 || defined(PLAT_amd64_solaris) 1655 1656 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */ 1657 1658 /* These regs are trashed by the hidden call. */ 1659 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \ 1660 "rdi", "r8", "r9", "r10", "r11" 1661 1662 /* This is all pretty complex. It's so as to make stack unwinding 1663 work reliably. See bug 243270. The basic problem is the sub and 1664 add of 128 of %rsp in all of the following macros. If gcc believes 1665 the CFA is in %rsp, then unwinding may fail, because what's at the 1666 CFA is not what gcc "expected" when it constructs the CFIs for the 1667 places where the macros are instantiated. 1668 1669 But we can't just add a CFI annotation to increase the CFA offset 1670 by 128, to match the sub of 128 from %rsp, because we don't know 1671 whether gcc has chosen %rsp as the CFA at that point, or whether it 1672 has chosen some other register (eg, %rbp). In the latter case, 1673 adding a CFI annotation to change the CFA offset is simply wrong. 1674 1675 So the solution is to get hold of the CFA using 1676 __builtin_dwarf_cfa(), put it in a known register, and add a 1677 CFI annotation to say what the register is. We choose %rbp for 1678 this (perhaps perversely), because: 1679 1680 (1) %rbp is already subject to unwinding. If a new register was 1681 chosen then the unwinder would have to unwind it in all stack 1682 traces, which is expensive, and 1683 1684 (2) %rbp is already subject to precise exception updates in the 1685 JIT. If a new register was chosen, we'd have to have precise 1686 exceptions for it too, which reduces performance of the 1687 generated code. 1688 1689 However .. one extra complication. We can't just whack the result 1690 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the 1691 list of trashed registers at the end of the inline assembly 1692 fragments; gcc won't allow %rbp to appear in that list. Hence 1693 instead we need to stash %rbp in %r15 for the duration of the asm, 1694 and say that %r15 is trashed instead. gcc seems happy to go with 1695 that. 1696 1697 Oh .. and this all needs to be conditionalised so that it is 1698 unchanged from before this commit, when compiled with older gccs 1699 that don't support __builtin_dwarf_cfa. Furthermore, since 1700 this header file is freestanding, it has to be independent of 1701 config.h, and so the following conditionalisation cannot depend on 1702 configure time checks. 1703 1704 Although it's not clear from 1705 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)', 1706 this expression excludes Darwin. 1707 .cfi directives in Darwin assembly appear to be completely 1708 different and I haven't investigated how they work. 1709 1710 For even more entertainment value, note we have to use the 1711 completely undocumented __builtin_dwarf_cfa(), which appears to 1712 really compute the CFA, whereas __builtin_frame_address(0) claims 1713 to but actually doesn't. See 1714 https://bugs.kde.org/show_bug.cgi?id=243270#c47 1715 */ 1716 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM) 1717 # define __FRAME_POINTER \ 1718 ,"r"(__builtin_dwarf_cfa()) 1719 # define VALGRIND_CFI_PROLOGUE \ 1720 "movq %%rbp, %%r15\n\t" \ 1721 "movq %2, %%rbp\n\t" \ 1722 ".cfi_remember_state\n\t" \ 1723 ".cfi_def_cfa rbp, 0\n\t" 1724 # define VALGRIND_CFI_EPILOGUE \ 1725 "movq %%r15, %%rbp\n\t" \ 1726 ".cfi_restore_state\n\t" 1727 #else 1728 # define __FRAME_POINTER 1729 # define VALGRIND_CFI_PROLOGUE 1730 # define VALGRIND_CFI_EPILOGUE 1731 #endif 1732 1733 /* Macros to save and align the stack before making a function 1734 call and restore it afterwards as gcc may not keep the stack 1735 pointer aligned if it doesn't realise calls are being made 1736 to other functions. */ 1737 1738 #define VALGRIND_ALIGN_STACK \ 1739 "movq %%rsp,%%r14\n\t" \ 1740 "andq $0xfffffffffffffff0,%%rsp\n\t" 1741 #define VALGRIND_RESTORE_STACK \ 1742 "movq %%r14,%%rsp\n\t" 1743 1744 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned 1745 long) == 8. */ 1746 1747 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_ 1748 macros. In order not to trash the stack redzone, we need to drop 1749 %rsp by 128 before the hidden call, and restore afterwards. The 1750 nastyness is that it is only by luck that the stack still appears 1751 to be unwindable during the hidden call - since then the behaviour 1752 of any routine using this macro does not match what the CFI data 1753 says. Sigh. 1754 1755 Why is this important? Imagine that a wrapper has a stack 1756 allocated local, and passes to the hidden call, a pointer to it. 1757 Because gcc does not know about the hidden call, it may allocate 1758 that local in the redzone. Unfortunately the hidden call may then 1759 trash it before it comes to use it. So we must step clear of the 1760 redzone, for the duration of the hidden call, to make it safe. 1761 1762 Probably the same problem afflicts the other redzone-style ABIs too 1763 (ppc64-linux); but for those, the stack is 1764 self describing (none of this CFI nonsense) so at least messing 1765 with the stack pointer doesn't give a danger of non-unwindable 1766 stack. */ 1767 1768 #define CALL_FN_W_v(lval, orig) \ 1769 do { \ 1770 volatile OrigFn _orig = (orig); \ 1771 volatile unsigned long _argvec[1]; \ 1772 volatile unsigned long _res; \ 1773 _argvec[0] = (unsigned long)_orig.nraddr; \ 1774 __asm__ volatile( \ 1775 VALGRIND_CFI_PROLOGUE \ 1776 VALGRIND_ALIGN_STACK \ 1777 "subq $128,%%rsp\n\t" \ 1778 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1779 VALGRIND_CALL_NOREDIR_RAX \ 1780 VALGRIND_RESTORE_STACK \ 1781 VALGRIND_CFI_EPILOGUE \ 1782 : /*out*/ "=a" (_res) \ 1783 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1784 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1785 ); \ 1786 lval = (__typeof__(lval)) _res; \ 1787 } while (0) 1788 1789 #define CALL_FN_W_W(lval, orig, arg1) \ 1790 do { \ 1791 volatile OrigFn _orig = (orig); \ 1792 volatile unsigned long _argvec[2]; \ 1793 volatile unsigned long _res; \ 1794 _argvec[0] = (unsigned long)_orig.nraddr; \ 1795 _argvec[1] = (unsigned long)(arg1); \ 1796 __asm__ volatile( \ 1797 VALGRIND_CFI_PROLOGUE \ 1798 VALGRIND_ALIGN_STACK \ 1799 "subq $128,%%rsp\n\t" \ 1800 "movq 8(%%rax), %%rdi\n\t" \ 1801 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1802 VALGRIND_CALL_NOREDIR_RAX \ 1803 VALGRIND_RESTORE_STACK \ 1804 VALGRIND_CFI_EPILOGUE \ 1805 : /*out*/ "=a" (_res) \ 1806 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1807 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1808 ); \ 1809 lval = (__typeof__(lval)) _res; \ 1810 } while (0) 1811 1812 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 1813 do { \ 1814 volatile OrigFn _orig = (orig); \ 1815 volatile unsigned long _argvec[3]; \ 1816 volatile unsigned long _res; \ 1817 _argvec[0] = (unsigned long)_orig.nraddr; \ 1818 _argvec[1] = (unsigned long)(arg1); \ 1819 _argvec[2] = (unsigned long)(arg2); \ 1820 __asm__ volatile( \ 1821 VALGRIND_CFI_PROLOGUE \ 1822 VALGRIND_ALIGN_STACK \ 1823 "subq $128,%%rsp\n\t" \ 1824 "movq 16(%%rax), %%rsi\n\t" \ 1825 "movq 8(%%rax), %%rdi\n\t" \ 1826 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1827 VALGRIND_CALL_NOREDIR_RAX \ 1828 VALGRIND_RESTORE_STACK \ 1829 VALGRIND_CFI_EPILOGUE \ 1830 : /*out*/ "=a" (_res) \ 1831 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1832 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1833 ); \ 1834 lval = (__typeof__(lval)) _res; \ 1835 } while (0) 1836 1837 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 1838 do { \ 1839 volatile OrigFn _orig = (orig); \ 1840 volatile unsigned long _argvec[4]; \ 1841 volatile unsigned long _res; \ 1842 _argvec[0] = (unsigned long)_orig.nraddr; \ 1843 _argvec[1] = (unsigned long)(arg1); \ 1844 _argvec[2] = (unsigned long)(arg2); \ 1845 _argvec[3] = (unsigned long)(arg3); \ 1846 __asm__ volatile( \ 1847 VALGRIND_CFI_PROLOGUE \ 1848 VALGRIND_ALIGN_STACK \ 1849 "subq $128,%%rsp\n\t" \ 1850 "movq 24(%%rax), %%rdx\n\t" \ 1851 "movq 16(%%rax), %%rsi\n\t" \ 1852 "movq 8(%%rax), %%rdi\n\t" \ 1853 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1854 VALGRIND_CALL_NOREDIR_RAX \ 1855 VALGRIND_RESTORE_STACK \ 1856 VALGRIND_CFI_EPILOGUE \ 1857 : /*out*/ "=a" (_res) \ 1858 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1859 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1860 ); \ 1861 lval = (__typeof__(lval)) _res; \ 1862 } while (0) 1863 1864 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 1865 do { \ 1866 volatile OrigFn _orig = (orig); \ 1867 volatile unsigned long _argvec[5]; \ 1868 volatile unsigned long _res; \ 1869 _argvec[0] = (unsigned long)_orig.nraddr; \ 1870 _argvec[1] = (unsigned long)(arg1); \ 1871 _argvec[2] = (unsigned long)(arg2); \ 1872 _argvec[3] = (unsigned long)(arg3); \ 1873 _argvec[4] = (unsigned long)(arg4); \ 1874 __asm__ volatile( \ 1875 VALGRIND_CFI_PROLOGUE \ 1876 VALGRIND_ALIGN_STACK \ 1877 "subq $128,%%rsp\n\t" \ 1878 "movq 32(%%rax), %%rcx\n\t" \ 1879 "movq 24(%%rax), %%rdx\n\t" \ 1880 "movq 16(%%rax), %%rsi\n\t" \ 1881 "movq 8(%%rax), %%rdi\n\t" \ 1882 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1883 VALGRIND_CALL_NOREDIR_RAX \ 1884 VALGRIND_RESTORE_STACK \ 1885 VALGRIND_CFI_EPILOGUE \ 1886 : /*out*/ "=a" (_res) \ 1887 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1888 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1889 ); \ 1890 lval = (__typeof__(lval)) _res; \ 1891 } while (0) 1892 1893 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 1894 do { \ 1895 volatile OrigFn _orig = (orig); \ 1896 volatile unsigned long _argvec[6]; \ 1897 volatile unsigned long _res; \ 1898 _argvec[0] = (unsigned long)_orig.nraddr; \ 1899 _argvec[1] = (unsigned long)(arg1); \ 1900 _argvec[2] = (unsigned long)(arg2); \ 1901 _argvec[3] = (unsigned long)(arg3); \ 1902 _argvec[4] = (unsigned long)(arg4); \ 1903 _argvec[5] = (unsigned long)(arg5); \ 1904 __asm__ volatile( \ 1905 VALGRIND_CFI_PROLOGUE \ 1906 VALGRIND_ALIGN_STACK \ 1907 "subq $128,%%rsp\n\t" \ 1908 "movq 40(%%rax), %%r8\n\t" \ 1909 "movq 32(%%rax), %%rcx\n\t" \ 1910 "movq 24(%%rax), %%rdx\n\t" \ 1911 "movq 16(%%rax), %%rsi\n\t" \ 1912 "movq 8(%%rax), %%rdi\n\t" \ 1913 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1914 VALGRIND_CALL_NOREDIR_RAX \ 1915 VALGRIND_RESTORE_STACK \ 1916 VALGRIND_CFI_EPILOGUE \ 1917 : /*out*/ "=a" (_res) \ 1918 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1919 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1920 ); \ 1921 lval = (__typeof__(lval)) _res; \ 1922 } while (0) 1923 1924 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 1925 do { \ 1926 volatile OrigFn _orig = (orig); \ 1927 volatile unsigned long _argvec[7]; \ 1928 volatile unsigned long _res; \ 1929 _argvec[0] = (unsigned long)_orig.nraddr; \ 1930 _argvec[1] = (unsigned long)(arg1); \ 1931 _argvec[2] = (unsigned long)(arg2); \ 1932 _argvec[3] = (unsigned long)(arg3); \ 1933 _argvec[4] = (unsigned long)(arg4); \ 1934 _argvec[5] = (unsigned long)(arg5); \ 1935 _argvec[6] = (unsigned long)(arg6); \ 1936 __asm__ volatile( \ 1937 VALGRIND_CFI_PROLOGUE \ 1938 VALGRIND_ALIGN_STACK \ 1939 "subq $128,%%rsp\n\t" \ 1940 "movq 48(%%rax), %%r9\n\t" \ 1941 "movq 40(%%rax), %%r8\n\t" \ 1942 "movq 32(%%rax), %%rcx\n\t" \ 1943 "movq 24(%%rax), %%rdx\n\t" \ 1944 "movq 16(%%rax), %%rsi\n\t" \ 1945 "movq 8(%%rax), %%rdi\n\t" \ 1946 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1947 VALGRIND_CALL_NOREDIR_RAX \ 1948 VALGRIND_RESTORE_STACK \ 1949 VALGRIND_CFI_EPILOGUE \ 1950 : /*out*/ "=a" (_res) \ 1951 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1952 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1953 ); \ 1954 lval = (__typeof__(lval)) _res; \ 1955 } while (0) 1956 1957 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1958 arg7) \ 1959 do { \ 1960 volatile OrigFn _orig = (orig); \ 1961 volatile unsigned long _argvec[8]; \ 1962 volatile unsigned long _res; \ 1963 _argvec[0] = (unsigned long)_orig.nraddr; \ 1964 _argvec[1] = (unsigned long)(arg1); \ 1965 _argvec[2] = (unsigned long)(arg2); \ 1966 _argvec[3] = (unsigned long)(arg3); \ 1967 _argvec[4] = (unsigned long)(arg4); \ 1968 _argvec[5] = (unsigned long)(arg5); \ 1969 _argvec[6] = (unsigned long)(arg6); \ 1970 _argvec[7] = (unsigned long)(arg7); \ 1971 __asm__ volatile( \ 1972 VALGRIND_CFI_PROLOGUE \ 1973 VALGRIND_ALIGN_STACK \ 1974 "subq $136,%%rsp\n\t" \ 1975 "pushq 56(%%rax)\n\t" \ 1976 "movq 48(%%rax), %%r9\n\t" \ 1977 "movq 40(%%rax), %%r8\n\t" \ 1978 "movq 32(%%rax), %%rcx\n\t" \ 1979 "movq 24(%%rax), %%rdx\n\t" \ 1980 "movq 16(%%rax), %%rsi\n\t" \ 1981 "movq 8(%%rax), %%rdi\n\t" \ 1982 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1983 VALGRIND_CALL_NOREDIR_RAX \ 1984 VALGRIND_RESTORE_STACK \ 1985 VALGRIND_CFI_EPILOGUE \ 1986 : /*out*/ "=a" (_res) \ 1987 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1988 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1989 ); \ 1990 lval = (__typeof__(lval)) _res; \ 1991 } while (0) 1992 1993 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1994 arg7,arg8) \ 1995 do { \ 1996 volatile OrigFn _orig = (orig); \ 1997 volatile unsigned long _argvec[9]; \ 1998 volatile unsigned long _res; \ 1999 _argvec[0] = (unsigned long)_orig.nraddr; \ 2000 _argvec[1] = (unsigned long)(arg1); \ 2001 _argvec[2] = (unsigned long)(arg2); \ 2002 _argvec[3] = (unsigned long)(arg3); \ 2003 _argvec[4] = (unsigned long)(arg4); \ 2004 _argvec[5] = (unsigned long)(arg5); \ 2005 _argvec[6] = (unsigned long)(arg6); \ 2006 _argvec[7] = (unsigned long)(arg7); \ 2007 _argvec[8] = (unsigned long)(arg8); \ 2008 __asm__ volatile( \ 2009 VALGRIND_CFI_PROLOGUE \ 2010 VALGRIND_ALIGN_STACK \ 2011 "subq $128,%%rsp\n\t" \ 2012 "pushq 64(%%rax)\n\t" \ 2013 "pushq 56(%%rax)\n\t" \ 2014 "movq 48(%%rax), %%r9\n\t" \ 2015 "movq 40(%%rax), %%r8\n\t" \ 2016 "movq 32(%%rax), %%rcx\n\t" \ 2017 "movq 24(%%rax), %%rdx\n\t" \ 2018 "movq 16(%%rax), %%rsi\n\t" \ 2019 "movq 8(%%rax), %%rdi\n\t" \ 2020 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 2021 VALGRIND_CALL_NOREDIR_RAX \ 2022 VALGRIND_RESTORE_STACK \ 2023 VALGRIND_CFI_EPILOGUE \ 2024 : /*out*/ "=a" (_res) \ 2025 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 2026 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 2027 ); \ 2028 lval = (__typeof__(lval)) _res; \ 2029 } while (0) 2030 2031 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2032 arg7,arg8,arg9) \ 2033 do { \ 2034 volatile OrigFn _orig = (orig); \ 2035 volatile unsigned long _argvec[10]; \ 2036 volatile unsigned long _res; \ 2037 _argvec[0] = (unsigned long)_orig.nraddr; \ 2038 _argvec[1] = (unsigned long)(arg1); \ 2039 _argvec[2] = (unsigned long)(arg2); \ 2040 _argvec[3] = (unsigned long)(arg3); \ 2041 _argvec[4] = (unsigned long)(arg4); \ 2042 _argvec[5] = (unsigned long)(arg5); \ 2043 _argvec[6] = (unsigned long)(arg6); \ 2044 _argvec[7] = (unsigned long)(arg7); \ 2045 _argvec[8] = (unsigned long)(arg8); \ 2046 _argvec[9] = (unsigned long)(arg9); \ 2047 __asm__ volatile( \ 2048 VALGRIND_CFI_PROLOGUE \ 2049 VALGRIND_ALIGN_STACK \ 2050 "subq $136,%%rsp\n\t" \ 2051 "pushq 72(%%rax)\n\t" \ 2052 "pushq 64(%%rax)\n\t" \ 2053 "pushq 56(%%rax)\n\t" \ 2054 "movq 48(%%rax), %%r9\n\t" \ 2055 "movq 40(%%rax), %%r8\n\t" \ 2056 "movq 32(%%rax), %%rcx\n\t" \ 2057 "movq 24(%%rax), %%rdx\n\t" \ 2058 "movq 16(%%rax), %%rsi\n\t" \ 2059 "movq 8(%%rax), %%rdi\n\t" \ 2060 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 2061 VALGRIND_CALL_NOREDIR_RAX \ 2062 VALGRIND_RESTORE_STACK \ 2063 VALGRIND_CFI_EPILOGUE \ 2064 : /*out*/ "=a" (_res) \ 2065 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 2066 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 2067 ); \ 2068 lval = (__typeof__(lval)) _res; \ 2069 } while (0) 2070 2071 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2072 arg7,arg8,arg9,arg10) \ 2073 do { \ 2074 volatile OrigFn _orig = (orig); \ 2075 volatile unsigned long _argvec[11]; \ 2076 volatile unsigned long _res; \ 2077 _argvec[0] = (unsigned long)_orig.nraddr; \ 2078 _argvec[1] = (unsigned long)(arg1); \ 2079 _argvec[2] = (unsigned long)(arg2); \ 2080 _argvec[3] = (unsigned long)(arg3); \ 2081 _argvec[4] = (unsigned long)(arg4); \ 2082 _argvec[5] = (unsigned long)(arg5); \ 2083 _argvec[6] = (unsigned long)(arg6); \ 2084 _argvec[7] = (unsigned long)(arg7); \ 2085 _argvec[8] = (unsigned long)(arg8); \ 2086 _argvec[9] = (unsigned long)(arg9); \ 2087 _argvec[10] = (unsigned long)(arg10); \ 2088 __asm__ volatile( \ 2089 VALGRIND_CFI_PROLOGUE \ 2090 VALGRIND_ALIGN_STACK \ 2091 "subq $128,%%rsp\n\t" \ 2092 "pushq 80(%%rax)\n\t" \ 2093 "pushq 72(%%rax)\n\t" \ 2094 "pushq 64(%%rax)\n\t" \ 2095 "pushq 56(%%rax)\n\t" \ 2096 "movq 48(%%rax), %%r9\n\t" \ 2097 "movq 40(%%rax), %%r8\n\t" \ 2098 "movq 32(%%rax), %%rcx\n\t" \ 2099 "movq 24(%%rax), %%rdx\n\t" \ 2100 "movq 16(%%rax), %%rsi\n\t" \ 2101 "movq 8(%%rax), %%rdi\n\t" \ 2102 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 2103 VALGRIND_CALL_NOREDIR_RAX \ 2104 VALGRIND_RESTORE_STACK \ 2105 VALGRIND_CFI_EPILOGUE \ 2106 : /*out*/ "=a" (_res) \ 2107 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 2108 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 2109 ); \ 2110 lval = (__typeof__(lval)) _res; \ 2111 } while (0) 2112 2113 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2114 arg7,arg8,arg9,arg10,arg11) \ 2115 do { \ 2116 volatile OrigFn _orig = (orig); \ 2117 volatile unsigned long _argvec[12]; \ 2118 volatile unsigned long _res; \ 2119 _argvec[0] = (unsigned long)_orig.nraddr; \ 2120 _argvec[1] = (unsigned long)(arg1); \ 2121 _argvec[2] = (unsigned long)(arg2); \ 2122 _argvec[3] = (unsigned long)(arg3); \ 2123 _argvec[4] = (unsigned long)(arg4); \ 2124 _argvec[5] = (unsigned long)(arg5); \ 2125 _argvec[6] = (unsigned long)(arg6); \ 2126 _argvec[7] = (unsigned long)(arg7); \ 2127 _argvec[8] = (unsigned long)(arg8); \ 2128 _argvec[9] = (unsigned long)(arg9); \ 2129 _argvec[10] = (unsigned long)(arg10); \ 2130 _argvec[11] = (unsigned long)(arg11); \ 2131 __asm__ volatile( \ 2132 VALGRIND_CFI_PROLOGUE \ 2133 VALGRIND_ALIGN_STACK \ 2134 "subq $136,%%rsp\n\t" \ 2135 "pushq 88(%%rax)\n\t" \ 2136 "pushq 80(%%rax)\n\t" \ 2137 "pushq 72(%%rax)\n\t" \ 2138 "pushq 64(%%rax)\n\t" \ 2139 "pushq 56(%%rax)\n\t" \ 2140 "movq 48(%%rax), %%r9\n\t" \ 2141 "movq 40(%%rax), %%r8\n\t" \ 2142 "movq 32(%%rax), %%rcx\n\t" \ 2143 "movq 24(%%rax), %%rdx\n\t" \ 2144 "movq 16(%%rax), %%rsi\n\t" \ 2145 "movq 8(%%rax), %%rdi\n\t" \ 2146 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 2147 VALGRIND_CALL_NOREDIR_RAX \ 2148 VALGRIND_RESTORE_STACK \ 2149 VALGRIND_CFI_EPILOGUE \ 2150 : /*out*/ "=a" (_res) \ 2151 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 2152 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 2153 ); \ 2154 lval = (__typeof__(lval)) _res; \ 2155 } while (0) 2156 2157 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2158 arg7,arg8,arg9,arg10,arg11,arg12) \ 2159 do { \ 2160 volatile OrigFn _orig = (orig); \ 2161 volatile unsigned long _argvec[13]; \ 2162 volatile unsigned long _res; \ 2163 _argvec[0] = (unsigned long)_orig.nraddr; \ 2164 _argvec[1] = (unsigned long)(arg1); \ 2165 _argvec[2] = (unsigned long)(arg2); \ 2166 _argvec[3] = (unsigned long)(arg3); \ 2167 _argvec[4] = (unsigned long)(arg4); \ 2168 _argvec[5] = (unsigned long)(arg5); \ 2169 _argvec[6] = (unsigned long)(arg6); \ 2170 _argvec[7] = (unsigned long)(arg7); \ 2171 _argvec[8] = (unsigned long)(arg8); \ 2172 _argvec[9] = (unsigned long)(arg9); \ 2173 _argvec[10] = (unsigned long)(arg10); \ 2174 _argvec[11] = (unsigned long)(arg11); \ 2175 _argvec[12] = (unsigned long)(arg12); \ 2176 __asm__ volatile( \ 2177 VALGRIND_CFI_PROLOGUE \ 2178 VALGRIND_ALIGN_STACK \ 2179 "subq $128,%%rsp\n\t" \ 2180 "pushq 96(%%rax)\n\t" \ 2181 "pushq 88(%%rax)\n\t" \ 2182 "pushq 80(%%rax)\n\t" \ 2183 "pushq 72(%%rax)\n\t" \ 2184 "pushq 64(%%rax)\n\t" \ 2185 "pushq 56(%%rax)\n\t" \ 2186 "movq 48(%%rax), %%r9\n\t" \ 2187 "movq 40(%%rax), %%r8\n\t" \ 2188 "movq 32(%%rax), %%rcx\n\t" \ 2189 "movq 24(%%rax), %%rdx\n\t" \ 2190 "movq 16(%%rax), %%rsi\n\t" \ 2191 "movq 8(%%rax), %%rdi\n\t" \ 2192 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 2193 VALGRIND_CALL_NOREDIR_RAX \ 2194 VALGRIND_RESTORE_STACK \ 2195 VALGRIND_CFI_EPILOGUE \ 2196 : /*out*/ "=a" (_res) \ 2197 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 2198 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 2199 ); \ 2200 lval = (__typeof__(lval)) _res; \ 2201 } while (0) 2202 2203 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */ 2204 2205 /* ------------------------ ppc32-linux ------------------------ */ 2206 2207 #if defined(PLAT_ppc32_linux) 2208 2209 /* This is useful for finding out about the on-stack stuff: 2210 2211 extern int f9 ( int,int,int,int,int,int,int,int,int ); 2212 extern int f10 ( int,int,int,int,int,int,int,int,int,int ); 2213 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int ); 2214 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int ); 2215 2216 int g9 ( void ) { 2217 return f9(11,22,33,44,55,66,77,88,99); 2218 } 2219 int g10 ( void ) { 2220 return f10(11,22,33,44,55,66,77,88,99,110); 2221 } 2222 int g11 ( void ) { 2223 return f11(11,22,33,44,55,66,77,88,99,110,121); 2224 } 2225 int g12 ( void ) { 2226 return f12(11,22,33,44,55,66,77,88,99,110,121,132); 2227 } 2228 */ 2229 2230 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */ 2231 2232 /* These regs are trashed by the hidden call. */ 2233 #define __CALLER_SAVED_REGS \ 2234 "lr", "ctr", "xer", \ 2235 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \ 2236 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \ 2237 "r11", "r12", "r13" 2238 2239 /* Macros to save and align the stack before making a function 2240 call and restore it afterwards as gcc may not keep the stack 2241 pointer aligned if it doesn't realise calls are being made 2242 to other functions. */ 2243 2244 #define VALGRIND_ALIGN_STACK \ 2245 "mr 28,1\n\t" \ 2246 "rlwinm 1,1,0,0,27\n\t" 2247 #define VALGRIND_RESTORE_STACK \ 2248 "mr 1,28\n\t" 2249 2250 /* These CALL_FN_ macros assume that on ppc32-linux, 2251 sizeof(unsigned long) == 4. */ 2252 2253 #define CALL_FN_W_v(lval, orig) \ 2254 do { \ 2255 volatile OrigFn _orig = (orig); \ 2256 volatile unsigned long _argvec[1]; \ 2257 volatile unsigned long _res; \ 2258 _argvec[0] = (unsigned long)_orig.nraddr; \ 2259 __asm__ volatile( \ 2260 VALGRIND_ALIGN_STACK \ 2261 "mr 11,%1\n\t" \ 2262 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2263 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2264 VALGRIND_RESTORE_STACK \ 2265 "mr %0,3" \ 2266 : /*out*/ "=r" (_res) \ 2267 : /*in*/ "r" (&_argvec[0]) \ 2268 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2269 ); \ 2270 lval = (__typeof__(lval)) _res; \ 2271 } while (0) 2272 2273 #define CALL_FN_W_W(lval, orig, arg1) \ 2274 do { \ 2275 volatile OrigFn _orig = (orig); \ 2276 volatile unsigned long _argvec[2]; \ 2277 volatile unsigned long _res; \ 2278 _argvec[0] = (unsigned long)_orig.nraddr; \ 2279 _argvec[1] = (unsigned long)arg1; \ 2280 __asm__ volatile( \ 2281 VALGRIND_ALIGN_STACK \ 2282 "mr 11,%1\n\t" \ 2283 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2284 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2285 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2286 VALGRIND_RESTORE_STACK \ 2287 "mr %0,3" \ 2288 : /*out*/ "=r" (_res) \ 2289 : /*in*/ "r" (&_argvec[0]) \ 2290 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2291 ); \ 2292 lval = (__typeof__(lval)) _res; \ 2293 } while (0) 2294 2295 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 2296 do { \ 2297 volatile OrigFn _orig = (orig); \ 2298 volatile unsigned long _argvec[3]; \ 2299 volatile unsigned long _res; \ 2300 _argvec[0] = (unsigned long)_orig.nraddr; \ 2301 _argvec[1] = (unsigned long)arg1; \ 2302 _argvec[2] = (unsigned long)arg2; \ 2303 __asm__ volatile( \ 2304 VALGRIND_ALIGN_STACK \ 2305 "mr 11,%1\n\t" \ 2306 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2307 "lwz 4,8(11)\n\t" \ 2308 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2309 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2310 VALGRIND_RESTORE_STACK \ 2311 "mr %0,3" \ 2312 : /*out*/ "=r" (_res) \ 2313 : /*in*/ "r" (&_argvec[0]) \ 2314 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2315 ); \ 2316 lval = (__typeof__(lval)) _res; \ 2317 } while (0) 2318 2319 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 2320 do { \ 2321 volatile OrigFn _orig = (orig); \ 2322 volatile unsigned long _argvec[4]; \ 2323 volatile unsigned long _res; \ 2324 _argvec[0] = (unsigned long)_orig.nraddr; \ 2325 _argvec[1] = (unsigned long)arg1; \ 2326 _argvec[2] = (unsigned long)arg2; \ 2327 _argvec[3] = (unsigned long)arg3; \ 2328 __asm__ volatile( \ 2329 VALGRIND_ALIGN_STACK \ 2330 "mr 11,%1\n\t" \ 2331 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2332 "lwz 4,8(11)\n\t" \ 2333 "lwz 5,12(11)\n\t" \ 2334 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2335 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2336 VALGRIND_RESTORE_STACK \ 2337 "mr %0,3" \ 2338 : /*out*/ "=r" (_res) \ 2339 : /*in*/ "r" (&_argvec[0]) \ 2340 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2341 ); \ 2342 lval = (__typeof__(lval)) _res; \ 2343 } while (0) 2344 2345 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 2346 do { \ 2347 volatile OrigFn _orig = (orig); \ 2348 volatile unsigned long _argvec[5]; \ 2349 volatile unsigned long _res; \ 2350 _argvec[0] = (unsigned long)_orig.nraddr; \ 2351 _argvec[1] = (unsigned long)arg1; \ 2352 _argvec[2] = (unsigned long)arg2; \ 2353 _argvec[3] = (unsigned long)arg3; \ 2354 _argvec[4] = (unsigned long)arg4; \ 2355 __asm__ volatile( \ 2356 VALGRIND_ALIGN_STACK \ 2357 "mr 11,%1\n\t" \ 2358 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2359 "lwz 4,8(11)\n\t" \ 2360 "lwz 5,12(11)\n\t" \ 2361 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2362 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2363 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2364 VALGRIND_RESTORE_STACK \ 2365 "mr %0,3" \ 2366 : /*out*/ "=r" (_res) \ 2367 : /*in*/ "r" (&_argvec[0]) \ 2368 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2369 ); \ 2370 lval = (__typeof__(lval)) _res; \ 2371 } while (0) 2372 2373 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 2374 do { \ 2375 volatile OrigFn _orig = (orig); \ 2376 volatile unsigned long _argvec[6]; \ 2377 volatile unsigned long _res; \ 2378 _argvec[0] = (unsigned long)_orig.nraddr; \ 2379 _argvec[1] = (unsigned long)arg1; \ 2380 _argvec[2] = (unsigned long)arg2; \ 2381 _argvec[3] = (unsigned long)arg3; \ 2382 _argvec[4] = (unsigned long)arg4; \ 2383 _argvec[5] = (unsigned long)arg5; \ 2384 __asm__ volatile( \ 2385 VALGRIND_ALIGN_STACK \ 2386 "mr 11,%1\n\t" \ 2387 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2388 "lwz 4,8(11)\n\t" \ 2389 "lwz 5,12(11)\n\t" \ 2390 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2391 "lwz 7,20(11)\n\t" \ 2392 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2393 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2394 VALGRIND_RESTORE_STACK \ 2395 "mr %0,3" \ 2396 : /*out*/ "=r" (_res) \ 2397 : /*in*/ "r" (&_argvec[0]) \ 2398 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2399 ); \ 2400 lval = (__typeof__(lval)) _res; \ 2401 } while (0) 2402 2403 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 2404 do { \ 2405 volatile OrigFn _orig = (orig); \ 2406 volatile unsigned long _argvec[7]; \ 2407 volatile unsigned long _res; \ 2408 _argvec[0] = (unsigned long)_orig.nraddr; \ 2409 _argvec[1] = (unsigned long)arg1; \ 2410 _argvec[2] = (unsigned long)arg2; \ 2411 _argvec[3] = (unsigned long)arg3; \ 2412 _argvec[4] = (unsigned long)arg4; \ 2413 _argvec[5] = (unsigned long)arg5; \ 2414 _argvec[6] = (unsigned long)arg6; \ 2415 __asm__ volatile( \ 2416 VALGRIND_ALIGN_STACK \ 2417 "mr 11,%1\n\t" \ 2418 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2419 "lwz 4,8(11)\n\t" \ 2420 "lwz 5,12(11)\n\t" \ 2421 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2422 "lwz 7,20(11)\n\t" \ 2423 "lwz 8,24(11)\n\t" \ 2424 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2425 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2426 VALGRIND_RESTORE_STACK \ 2427 "mr %0,3" \ 2428 : /*out*/ "=r" (_res) \ 2429 : /*in*/ "r" (&_argvec[0]) \ 2430 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2431 ); \ 2432 lval = (__typeof__(lval)) _res; \ 2433 } while (0) 2434 2435 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2436 arg7) \ 2437 do { \ 2438 volatile OrigFn _orig = (orig); \ 2439 volatile unsigned long _argvec[8]; \ 2440 volatile unsigned long _res; \ 2441 _argvec[0] = (unsigned long)_orig.nraddr; \ 2442 _argvec[1] = (unsigned long)arg1; \ 2443 _argvec[2] = (unsigned long)arg2; \ 2444 _argvec[3] = (unsigned long)arg3; \ 2445 _argvec[4] = (unsigned long)arg4; \ 2446 _argvec[5] = (unsigned long)arg5; \ 2447 _argvec[6] = (unsigned long)arg6; \ 2448 _argvec[7] = (unsigned long)arg7; \ 2449 __asm__ volatile( \ 2450 VALGRIND_ALIGN_STACK \ 2451 "mr 11,%1\n\t" \ 2452 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2453 "lwz 4,8(11)\n\t" \ 2454 "lwz 5,12(11)\n\t" \ 2455 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2456 "lwz 7,20(11)\n\t" \ 2457 "lwz 8,24(11)\n\t" \ 2458 "lwz 9,28(11)\n\t" \ 2459 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2460 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2461 VALGRIND_RESTORE_STACK \ 2462 "mr %0,3" \ 2463 : /*out*/ "=r" (_res) \ 2464 : /*in*/ "r" (&_argvec[0]) \ 2465 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2466 ); \ 2467 lval = (__typeof__(lval)) _res; \ 2468 } while (0) 2469 2470 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2471 arg7,arg8) \ 2472 do { \ 2473 volatile OrigFn _orig = (orig); \ 2474 volatile unsigned long _argvec[9]; \ 2475 volatile unsigned long _res; \ 2476 _argvec[0] = (unsigned long)_orig.nraddr; \ 2477 _argvec[1] = (unsigned long)arg1; \ 2478 _argvec[2] = (unsigned long)arg2; \ 2479 _argvec[3] = (unsigned long)arg3; \ 2480 _argvec[4] = (unsigned long)arg4; \ 2481 _argvec[5] = (unsigned long)arg5; \ 2482 _argvec[6] = (unsigned long)arg6; \ 2483 _argvec[7] = (unsigned long)arg7; \ 2484 _argvec[8] = (unsigned long)arg8; \ 2485 __asm__ volatile( \ 2486 VALGRIND_ALIGN_STACK \ 2487 "mr 11,%1\n\t" \ 2488 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2489 "lwz 4,8(11)\n\t" \ 2490 "lwz 5,12(11)\n\t" \ 2491 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2492 "lwz 7,20(11)\n\t" \ 2493 "lwz 8,24(11)\n\t" \ 2494 "lwz 9,28(11)\n\t" \ 2495 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2496 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2497 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2498 VALGRIND_RESTORE_STACK \ 2499 "mr %0,3" \ 2500 : /*out*/ "=r" (_res) \ 2501 : /*in*/ "r" (&_argvec[0]) \ 2502 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2503 ); \ 2504 lval = (__typeof__(lval)) _res; \ 2505 } while (0) 2506 2507 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2508 arg7,arg8,arg9) \ 2509 do { \ 2510 volatile OrigFn _orig = (orig); \ 2511 volatile unsigned long _argvec[10]; \ 2512 volatile unsigned long _res; \ 2513 _argvec[0] = (unsigned long)_orig.nraddr; \ 2514 _argvec[1] = (unsigned long)arg1; \ 2515 _argvec[2] = (unsigned long)arg2; \ 2516 _argvec[3] = (unsigned long)arg3; \ 2517 _argvec[4] = (unsigned long)arg4; \ 2518 _argvec[5] = (unsigned long)arg5; \ 2519 _argvec[6] = (unsigned long)arg6; \ 2520 _argvec[7] = (unsigned long)arg7; \ 2521 _argvec[8] = (unsigned long)arg8; \ 2522 _argvec[9] = (unsigned long)arg9; \ 2523 __asm__ volatile( \ 2524 VALGRIND_ALIGN_STACK \ 2525 "mr 11,%1\n\t" \ 2526 "addi 1,1,-16\n\t" \ 2527 /* arg9 */ \ 2528 "lwz 3,36(11)\n\t" \ 2529 "stw 3,8(1)\n\t" \ 2530 /* args1-8 */ \ 2531 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2532 "lwz 4,8(11)\n\t" \ 2533 "lwz 5,12(11)\n\t" \ 2534 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2535 "lwz 7,20(11)\n\t" \ 2536 "lwz 8,24(11)\n\t" \ 2537 "lwz 9,28(11)\n\t" \ 2538 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2539 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2540 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2541 VALGRIND_RESTORE_STACK \ 2542 "mr %0,3" \ 2543 : /*out*/ "=r" (_res) \ 2544 : /*in*/ "r" (&_argvec[0]) \ 2545 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2546 ); \ 2547 lval = (__typeof__(lval)) _res; \ 2548 } while (0) 2549 2550 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2551 arg7,arg8,arg9,arg10) \ 2552 do { \ 2553 volatile OrigFn _orig = (orig); \ 2554 volatile unsigned long _argvec[11]; \ 2555 volatile unsigned long _res; \ 2556 _argvec[0] = (unsigned long)_orig.nraddr; \ 2557 _argvec[1] = (unsigned long)arg1; \ 2558 _argvec[2] = (unsigned long)arg2; \ 2559 _argvec[3] = (unsigned long)arg3; \ 2560 _argvec[4] = (unsigned long)arg4; \ 2561 _argvec[5] = (unsigned long)arg5; \ 2562 _argvec[6] = (unsigned long)arg6; \ 2563 _argvec[7] = (unsigned long)arg7; \ 2564 _argvec[8] = (unsigned long)arg8; \ 2565 _argvec[9] = (unsigned long)arg9; \ 2566 _argvec[10] = (unsigned long)arg10; \ 2567 __asm__ volatile( \ 2568 VALGRIND_ALIGN_STACK \ 2569 "mr 11,%1\n\t" \ 2570 "addi 1,1,-16\n\t" \ 2571 /* arg10 */ \ 2572 "lwz 3,40(11)\n\t" \ 2573 "stw 3,12(1)\n\t" \ 2574 /* arg9 */ \ 2575 "lwz 3,36(11)\n\t" \ 2576 "stw 3,8(1)\n\t" \ 2577 /* args1-8 */ \ 2578 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2579 "lwz 4,8(11)\n\t" \ 2580 "lwz 5,12(11)\n\t" \ 2581 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2582 "lwz 7,20(11)\n\t" \ 2583 "lwz 8,24(11)\n\t" \ 2584 "lwz 9,28(11)\n\t" \ 2585 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2586 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2587 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2588 VALGRIND_RESTORE_STACK \ 2589 "mr %0,3" \ 2590 : /*out*/ "=r" (_res) \ 2591 : /*in*/ "r" (&_argvec[0]) \ 2592 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2593 ); \ 2594 lval = (__typeof__(lval)) _res; \ 2595 } while (0) 2596 2597 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2598 arg7,arg8,arg9,arg10,arg11) \ 2599 do { \ 2600 volatile OrigFn _orig = (orig); \ 2601 volatile unsigned long _argvec[12]; \ 2602 volatile unsigned long _res; \ 2603 _argvec[0] = (unsigned long)_orig.nraddr; \ 2604 _argvec[1] = (unsigned long)arg1; \ 2605 _argvec[2] = (unsigned long)arg2; \ 2606 _argvec[3] = (unsigned long)arg3; \ 2607 _argvec[4] = (unsigned long)arg4; \ 2608 _argvec[5] = (unsigned long)arg5; \ 2609 _argvec[6] = (unsigned long)arg6; \ 2610 _argvec[7] = (unsigned long)arg7; \ 2611 _argvec[8] = (unsigned long)arg8; \ 2612 _argvec[9] = (unsigned long)arg9; \ 2613 _argvec[10] = (unsigned long)arg10; \ 2614 _argvec[11] = (unsigned long)arg11; \ 2615 __asm__ volatile( \ 2616 VALGRIND_ALIGN_STACK \ 2617 "mr 11,%1\n\t" \ 2618 "addi 1,1,-32\n\t" \ 2619 /* arg11 */ \ 2620 "lwz 3,44(11)\n\t" \ 2621 "stw 3,16(1)\n\t" \ 2622 /* arg10 */ \ 2623 "lwz 3,40(11)\n\t" \ 2624 "stw 3,12(1)\n\t" \ 2625 /* arg9 */ \ 2626 "lwz 3,36(11)\n\t" \ 2627 "stw 3,8(1)\n\t" \ 2628 /* args1-8 */ \ 2629 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2630 "lwz 4,8(11)\n\t" \ 2631 "lwz 5,12(11)\n\t" \ 2632 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2633 "lwz 7,20(11)\n\t" \ 2634 "lwz 8,24(11)\n\t" \ 2635 "lwz 9,28(11)\n\t" \ 2636 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2637 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2638 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2639 VALGRIND_RESTORE_STACK \ 2640 "mr %0,3" \ 2641 : /*out*/ "=r" (_res) \ 2642 : /*in*/ "r" (&_argvec[0]) \ 2643 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2644 ); \ 2645 lval = (__typeof__(lval)) _res; \ 2646 } while (0) 2647 2648 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2649 arg7,arg8,arg9,arg10,arg11,arg12) \ 2650 do { \ 2651 volatile OrigFn _orig = (orig); \ 2652 volatile unsigned long _argvec[13]; \ 2653 volatile unsigned long _res; \ 2654 _argvec[0] = (unsigned long)_orig.nraddr; \ 2655 _argvec[1] = (unsigned long)arg1; \ 2656 _argvec[2] = (unsigned long)arg2; \ 2657 _argvec[3] = (unsigned long)arg3; \ 2658 _argvec[4] = (unsigned long)arg4; \ 2659 _argvec[5] = (unsigned long)arg5; \ 2660 _argvec[6] = (unsigned long)arg6; \ 2661 _argvec[7] = (unsigned long)arg7; \ 2662 _argvec[8] = (unsigned long)arg8; \ 2663 _argvec[9] = (unsigned long)arg9; \ 2664 _argvec[10] = (unsigned long)arg10; \ 2665 _argvec[11] = (unsigned long)arg11; \ 2666 _argvec[12] = (unsigned long)arg12; \ 2667 __asm__ volatile( \ 2668 VALGRIND_ALIGN_STACK \ 2669 "mr 11,%1\n\t" \ 2670 "addi 1,1,-32\n\t" \ 2671 /* arg12 */ \ 2672 "lwz 3,48(11)\n\t" \ 2673 "stw 3,20(1)\n\t" \ 2674 /* arg11 */ \ 2675 "lwz 3,44(11)\n\t" \ 2676 "stw 3,16(1)\n\t" \ 2677 /* arg10 */ \ 2678 "lwz 3,40(11)\n\t" \ 2679 "stw 3,12(1)\n\t" \ 2680 /* arg9 */ \ 2681 "lwz 3,36(11)\n\t" \ 2682 "stw 3,8(1)\n\t" \ 2683 /* args1-8 */ \ 2684 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2685 "lwz 4,8(11)\n\t" \ 2686 "lwz 5,12(11)\n\t" \ 2687 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2688 "lwz 7,20(11)\n\t" \ 2689 "lwz 8,24(11)\n\t" \ 2690 "lwz 9,28(11)\n\t" \ 2691 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2692 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2693 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2694 VALGRIND_RESTORE_STACK \ 2695 "mr %0,3" \ 2696 : /*out*/ "=r" (_res) \ 2697 : /*in*/ "r" (&_argvec[0]) \ 2698 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2699 ); \ 2700 lval = (__typeof__(lval)) _res; \ 2701 } while (0) 2702 2703 #endif /* PLAT_ppc32_linux */ 2704 2705 /* ------------------------ ppc64-linux ------------------------ */ 2706 2707 #if defined(PLAT_ppc64be_linux) 2708 2709 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */ 2710 2711 /* These regs are trashed by the hidden call. */ 2712 #define __CALLER_SAVED_REGS \ 2713 "lr", "ctr", "xer", \ 2714 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \ 2715 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \ 2716 "r11", "r12", "r13" 2717 2718 /* Macros to save and align the stack before making a function 2719 call and restore it afterwards as gcc may not keep the stack 2720 pointer aligned if it doesn't realise calls are being made 2721 to other functions. */ 2722 2723 #define VALGRIND_ALIGN_STACK \ 2724 "mr 28,1\n\t" \ 2725 "rldicr 1,1,0,59\n\t" 2726 #define VALGRIND_RESTORE_STACK \ 2727 "mr 1,28\n\t" 2728 2729 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned 2730 long) == 8. */ 2731 2732 #define CALL_FN_W_v(lval, orig) \ 2733 do { \ 2734 volatile OrigFn _orig = (orig); \ 2735 volatile unsigned long _argvec[3+0]; \ 2736 volatile unsigned long _res; \ 2737 /* _argvec[0] holds current r2 across the call */ \ 2738 _argvec[1] = (unsigned long)_orig.r2; \ 2739 _argvec[2] = (unsigned long)_orig.nraddr; \ 2740 __asm__ volatile( \ 2741 VALGRIND_ALIGN_STACK \ 2742 "mr 11,%1\n\t" \ 2743 "std 2,-16(11)\n\t" /* save tocptr */ \ 2744 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2745 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2746 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2747 "mr 11,%1\n\t" \ 2748 "mr %0,3\n\t" \ 2749 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2750 VALGRIND_RESTORE_STACK \ 2751 : /*out*/ "=r" (_res) \ 2752 : /*in*/ "r" (&_argvec[2]) \ 2753 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2754 ); \ 2755 lval = (__typeof__(lval)) _res; \ 2756 } while (0) 2757 2758 #define CALL_FN_W_W(lval, orig, arg1) \ 2759 do { \ 2760 volatile OrigFn _orig = (orig); \ 2761 volatile unsigned long _argvec[3+1]; \ 2762 volatile unsigned long _res; \ 2763 /* _argvec[0] holds current r2 across the call */ \ 2764 _argvec[1] = (unsigned long)_orig.r2; \ 2765 _argvec[2] = (unsigned long)_orig.nraddr; \ 2766 _argvec[2+1] = (unsigned long)arg1; \ 2767 __asm__ volatile( \ 2768 VALGRIND_ALIGN_STACK \ 2769 "mr 11,%1\n\t" \ 2770 "std 2,-16(11)\n\t" /* save tocptr */ \ 2771 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2772 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2773 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2774 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2775 "mr 11,%1\n\t" \ 2776 "mr %0,3\n\t" \ 2777 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2778 VALGRIND_RESTORE_STACK \ 2779 : /*out*/ "=r" (_res) \ 2780 : /*in*/ "r" (&_argvec[2]) \ 2781 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2782 ); \ 2783 lval = (__typeof__(lval)) _res; \ 2784 } while (0) 2785 2786 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 2787 do { \ 2788 volatile OrigFn _orig = (orig); \ 2789 volatile unsigned long _argvec[3+2]; \ 2790 volatile unsigned long _res; \ 2791 /* _argvec[0] holds current r2 across the call */ \ 2792 _argvec[1] = (unsigned long)_orig.r2; \ 2793 _argvec[2] = (unsigned long)_orig.nraddr; \ 2794 _argvec[2+1] = (unsigned long)arg1; \ 2795 _argvec[2+2] = (unsigned long)arg2; \ 2796 __asm__ volatile( \ 2797 VALGRIND_ALIGN_STACK \ 2798 "mr 11,%1\n\t" \ 2799 "std 2,-16(11)\n\t" /* save tocptr */ \ 2800 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2801 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2802 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2803 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2804 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2805 "mr 11,%1\n\t" \ 2806 "mr %0,3\n\t" \ 2807 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2808 VALGRIND_RESTORE_STACK \ 2809 : /*out*/ "=r" (_res) \ 2810 : /*in*/ "r" (&_argvec[2]) \ 2811 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2812 ); \ 2813 lval = (__typeof__(lval)) _res; \ 2814 } while (0) 2815 2816 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 2817 do { \ 2818 volatile OrigFn _orig = (orig); \ 2819 volatile unsigned long _argvec[3+3]; \ 2820 volatile unsigned long _res; \ 2821 /* _argvec[0] holds current r2 across the call */ \ 2822 _argvec[1] = (unsigned long)_orig.r2; \ 2823 _argvec[2] = (unsigned long)_orig.nraddr; \ 2824 _argvec[2+1] = (unsigned long)arg1; \ 2825 _argvec[2+2] = (unsigned long)arg2; \ 2826 _argvec[2+3] = (unsigned long)arg3; \ 2827 __asm__ volatile( \ 2828 VALGRIND_ALIGN_STACK \ 2829 "mr 11,%1\n\t" \ 2830 "std 2,-16(11)\n\t" /* save tocptr */ \ 2831 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2832 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2833 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2834 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2835 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2836 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2837 "mr 11,%1\n\t" \ 2838 "mr %0,3\n\t" \ 2839 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2840 VALGRIND_RESTORE_STACK \ 2841 : /*out*/ "=r" (_res) \ 2842 : /*in*/ "r" (&_argvec[2]) \ 2843 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2844 ); \ 2845 lval = (__typeof__(lval)) _res; \ 2846 } while (0) 2847 2848 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 2849 do { \ 2850 volatile OrigFn _orig = (orig); \ 2851 volatile unsigned long _argvec[3+4]; \ 2852 volatile unsigned long _res; \ 2853 /* _argvec[0] holds current r2 across the call */ \ 2854 _argvec[1] = (unsigned long)_orig.r2; \ 2855 _argvec[2] = (unsigned long)_orig.nraddr; \ 2856 _argvec[2+1] = (unsigned long)arg1; \ 2857 _argvec[2+2] = (unsigned long)arg2; \ 2858 _argvec[2+3] = (unsigned long)arg3; \ 2859 _argvec[2+4] = (unsigned long)arg4; \ 2860 __asm__ volatile( \ 2861 VALGRIND_ALIGN_STACK \ 2862 "mr 11,%1\n\t" \ 2863 "std 2,-16(11)\n\t" /* save tocptr */ \ 2864 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2865 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2866 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2867 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2868 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2869 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2870 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2871 "mr 11,%1\n\t" \ 2872 "mr %0,3\n\t" \ 2873 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2874 VALGRIND_RESTORE_STACK \ 2875 : /*out*/ "=r" (_res) \ 2876 : /*in*/ "r" (&_argvec[2]) \ 2877 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2878 ); \ 2879 lval = (__typeof__(lval)) _res; \ 2880 } while (0) 2881 2882 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 2883 do { \ 2884 volatile OrigFn _orig = (orig); \ 2885 volatile unsigned long _argvec[3+5]; \ 2886 volatile unsigned long _res; \ 2887 /* _argvec[0] holds current r2 across the call */ \ 2888 _argvec[1] = (unsigned long)_orig.r2; \ 2889 _argvec[2] = (unsigned long)_orig.nraddr; \ 2890 _argvec[2+1] = (unsigned long)arg1; \ 2891 _argvec[2+2] = (unsigned long)arg2; \ 2892 _argvec[2+3] = (unsigned long)arg3; \ 2893 _argvec[2+4] = (unsigned long)arg4; \ 2894 _argvec[2+5] = (unsigned long)arg5; \ 2895 __asm__ volatile( \ 2896 VALGRIND_ALIGN_STACK \ 2897 "mr 11,%1\n\t" \ 2898 "std 2,-16(11)\n\t" /* save tocptr */ \ 2899 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2900 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2901 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2902 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2903 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2904 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2905 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2906 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2907 "mr 11,%1\n\t" \ 2908 "mr %0,3\n\t" \ 2909 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2910 VALGRIND_RESTORE_STACK \ 2911 : /*out*/ "=r" (_res) \ 2912 : /*in*/ "r" (&_argvec[2]) \ 2913 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2914 ); \ 2915 lval = (__typeof__(lval)) _res; \ 2916 } while (0) 2917 2918 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 2919 do { \ 2920 volatile OrigFn _orig = (orig); \ 2921 volatile unsigned long _argvec[3+6]; \ 2922 volatile unsigned long _res; \ 2923 /* _argvec[0] holds current r2 across the call */ \ 2924 _argvec[1] = (unsigned long)_orig.r2; \ 2925 _argvec[2] = (unsigned long)_orig.nraddr; \ 2926 _argvec[2+1] = (unsigned long)arg1; \ 2927 _argvec[2+2] = (unsigned long)arg2; \ 2928 _argvec[2+3] = (unsigned long)arg3; \ 2929 _argvec[2+4] = (unsigned long)arg4; \ 2930 _argvec[2+5] = (unsigned long)arg5; \ 2931 _argvec[2+6] = (unsigned long)arg6; \ 2932 __asm__ volatile( \ 2933 VALGRIND_ALIGN_STACK \ 2934 "mr 11,%1\n\t" \ 2935 "std 2,-16(11)\n\t" /* save tocptr */ \ 2936 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2937 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2938 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2939 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2940 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2941 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2942 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2943 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2944 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2945 "mr 11,%1\n\t" \ 2946 "mr %0,3\n\t" \ 2947 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2948 VALGRIND_RESTORE_STACK \ 2949 : /*out*/ "=r" (_res) \ 2950 : /*in*/ "r" (&_argvec[2]) \ 2951 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2952 ); \ 2953 lval = (__typeof__(lval)) _res; \ 2954 } while (0) 2955 2956 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2957 arg7) \ 2958 do { \ 2959 volatile OrigFn _orig = (orig); \ 2960 volatile unsigned long _argvec[3+7]; \ 2961 volatile unsigned long _res; \ 2962 /* _argvec[0] holds current r2 across the call */ \ 2963 _argvec[1] = (unsigned long)_orig.r2; \ 2964 _argvec[2] = (unsigned long)_orig.nraddr; \ 2965 _argvec[2+1] = (unsigned long)arg1; \ 2966 _argvec[2+2] = (unsigned long)arg2; \ 2967 _argvec[2+3] = (unsigned long)arg3; \ 2968 _argvec[2+4] = (unsigned long)arg4; \ 2969 _argvec[2+5] = (unsigned long)arg5; \ 2970 _argvec[2+6] = (unsigned long)arg6; \ 2971 _argvec[2+7] = (unsigned long)arg7; \ 2972 __asm__ volatile( \ 2973 VALGRIND_ALIGN_STACK \ 2974 "mr 11,%1\n\t" \ 2975 "std 2,-16(11)\n\t" /* save tocptr */ \ 2976 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2977 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2978 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2979 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2980 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2981 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2982 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2983 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 2984 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2985 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2986 "mr 11,%1\n\t" \ 2987 "mr %0,3\n\t" \ 2988 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2989 VALGRIND_RESTORE_STACK \ 2990 : /*out*/ "=r" (_res) \ 2991 : /*in*/ "r" (&_argvec[2]) \ 2992 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2993 ); \ 2994 lval = (__typeof__(lval)) _res; \ 2995 } while (0) 2996 2997 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2998 arg7,arg8) \ 2999 do { \ 3000 volatile OrigFn _orig = (orig); \ 3001 volatile unsigned long _argvec[3+8]; \ 3002 volatile unsigned long _res; \ 3003 /* _argvec[0] holds current r2 across the call */ \ 3004 _argvec[1] = (unsigned long)_orig.r2; \ 3005 _argvec[2] = (unsigned long)_orig.nraddr; \ 3006 _argvec[2+1] = (unsigned long)arg1; \ 3007 _argvec[2+2] = (unsigned long)arg2; \ 3008 _argvec[2+3] = (unsigned long)arg3; \ 3009 _argvec[2+4] = (unsigned long)arg4; \ 3010 _argvec[2+5] = (unsigned long)arg5; \ 3011 _argvec[2+6] = (unsigned long)arg6; \ 3012 _argvec[2+7] = (unsigned long)arg7; \ 3013 _argvec[2+8] = (unsigned long)arg8; \ 3014 __asm__ volatile( \ 3015 VALGRIND_ALIGN_STACK \ 3016 "mr 11,%1\n\t" \ 3017 "std 2,-16(11)\n\t" /* save tocptr */ \ 3018 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 3019 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 3020 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 3021 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 3022 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 3023 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 3024 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 3025 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 3026 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 3027 "ld 11, 0(11)\n\t" /* target->r11 */ \ 3028 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 3029 "mr 11,%1\n\t" \ 3030 "mr %0,3\n\t" \ 3031 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 3032 VALGRIND_RESTORE_STACK \ 3033 : /*out*/ "=r" (_res) \ 3034 : /*in*/ "r" (&_argvec[2]) \ 3035 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3036 ); \ 3037 lval = (__typeof__(lval)) _res; \ 3038 } while (0) 3039 3040 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3041 arg7,arg8,arg9) \ 3042 do { \ 3043 volatile OrigFn _orig = (orig); \ 3044 volatile unsigned long _argvec[3+9]; \ 3045 volatile unsigned long _res; \ 3046 /* _argvec[0] holds current r2 across the call */ \ 3047 _argvec[1] = (unsigned long)_orig.r2; \ 3048 _argvec[2] = (unsigned long)_orig.nraddr; \ 3049 _argvec[2+1] = (unsigned long)arg1; \ 3050 _argvec[2+2] = (unsigned long)arg2; \ 3051 _argvec[2+3] = (unsigned long)arg3; \ 3052 _argvec[2+4] = (unsigned long)arg4; \ 3053 _argvec[2+5] = (unsigned long)arg5; \ 3054 _argvec[2+6] = (unsigned long)arg6; \ 3055 _argvec[2+7] = (unsigned long)arg7; \ 3056 _argvec[2+8] = (unsigned long)arg8; \ 3057 _argvec[2+9] = (unsigned long)arg9; \ 3058 __asm__ volatile( \ 3059 VALGRIND_ALIGN_STACK \ 3060 "mr 11,%1\n\t" \ 3061 "std 2,-16(11)\n\t" /* save tocptr */ \ 3062 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 3063 "addi 1,1,-128\n\t" /* expand stack frame */ \ 3064 /* arg9 */ \ 3065 "ld 3,72(11)\n\t" \ 3066 "std 3,112(1)\n\t" \ 3067 /* args1-8 */ \ 3068 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 3069 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 3070 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 3071 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 3072 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 3073 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 3074 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 3075 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 3076 "ld 11, 0(11)\n\t" /* target->r11 */ \ 3077 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 3078 "mr 11,%1\n\t" \ 3079 "mr %0,3\n\t" \ 3080 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 3081 VALGRIND_RESTORE_STACK \ 3082 : /*out*/ "=r" (_res) \ 3083 : /*in*/ "r" (&_argvec[2]) \ 3084 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3085 ); \ 3086 lval = (__typeof__(lval)) _res; \ 3087 } while (0) 3088 3089 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3090 arg7,arg8,arg9,arg10) \ 3091 do { \ 3092 volatile OrigFn _orig = (orig); \ 3093 volatile unsigned long _argvec[3+10]; \ 3094 volatile unsigned long _res; \ 3095 /* _argvec[0] holds current r2 across the call */ \ 3096 _argvec[1] = (unsigned long)_orig.r2; \ 3097 _argvec[2] = (unsigned long)_orig.nraddr; \ 3098 _argvec[2+1] = (unsigned long)arg1; \ 3099 _argvec[2+2] = (unsigned long)arg2; \ 3100 _argvec[2+3] = (unsigned long)arg3; \ 3101 _argvec[2+4] = (unsigned long)arg4; \ 3102 _argvec[2+5] = (unsigned long)arg5; \ 3103 _argvec[2+6] = (unsigned long)arg6; \ 3104 _argvec[2+7] = (unsigned long)arg7; \ 3105 _argvec[2+8] = (unsigned long)arg8; \ 3106 _argvec[2+9] = (unsigned long)arg9; \ 3107 _argvec[2+10] = (unsigned long)arg10; \ 3108 __asm__ volatile( \ 3109 VALGRIND_ALIGN_STACK \ 3110 "mr 11,%1\n\t" \ 3111 "std 2,-16(11)\n\t" /* save tocptr */ \ 3112 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 3113 "addi 1,1,-128\n\t" /* expand stack frame */ \ 3114 /* arg10 */ \ 3115 "ld 3,80(11)\n\t" \ 3116 "std 3,120(1)\n\t" \ 3117 /* arg9 */ \ 3118 "ld 3,72(11)\n\t" \ 3119 "std 3,112(1)\n\t" \ 3120 /* args1-8 */ \ 3121 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 3122 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 3123 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 3124 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 3125 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 3126 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 3127 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 3128 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 3129 "ld 11, 0(11)\n\t" /* target->r11 */ \ 3130 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 3131 "mr 11,%1\n\t" \ 3132 "mr %0,3\n\t" \ 3133 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 3134 VALGRIND_RESTORE_STACK \ 3135 : /*out*/ "=r" (_res) \ 3136 : /*in*/ "r" (&_argvec[2]) \ 3137 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3138 ); \ 3139 lval = (__typeof__(lval)) _res; \ 3140 } while (0) 3141 3142 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3143 arg7,arg8,arg9,arg10,arg11) \ 3144 do { \ 3145 volatile OrigFn _orig = (orig); \ 3146 volatile unsigned long _argvec[3+11]; \ 3147 volatile unsigned long _res; \ 3148 /* _argvec[0] holds current r2 across the call */ \ 3149 _argvec[1] = (unsigned long)_orig.r2; \ 3150 _argvec[2] = (unsigned long)_orig.nraddr; \ 3151 _argvec[2+1] = (unsigned long)arg1; \ 3152 _argvec[2+2] = (unsigned long)arg2; \ 3153 _argvec[2+3] = (unsigned long)arg3; \ 3154 _argvec[2+4] = (unsigned long)arg4; \ 3155 _argvec[2+5] = (unsigned long)arg5; \ 3156 _argvec[2+6] = (unsigned long)arg6; \ 3157 _argvec[2+7] = (unsigned long)arg7; \ 3158 _argvec[2+8] = (unsigned long)arg8; \ 3159 _argvec[2+9] = (unsigned long)arg9; \ 3160 _argvec[2+10] = (unsigned long)arg10; \ 3161 _argvec[2+11] = (unsigned long)arg11; \ 3162 __asm__ volatile( \ 3163 VALGRIND_ALIGN_STACK \ 3164 "mr 11,%1\n\t" \ 3165 "std 2,-16(11)\n\t" /* save tocptr */ \ 3166 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 3167 "addi 1,1,-144\n\t" /* expand stack frame */ \ 3168 /* arg11 */ \ 3169 "ld 3,88(11)\n\t" \ 3170 "std 3,128(1)\n\t" \ 3171 /* arg10 */ \ 3172 "ld 3,80(11)\n\t" \ 3173 "std 3,120(1)\n\t" \ 3174 /* arg9 */ \ 3175 "ld 3,72(11)\n\t" \ 3176 "std 3,112(1)\n\t" \ 3177 /* args1-8 */ \ 3178 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 3179 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 3180 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 3181 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 3182 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 3183 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 3184 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 3185 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 3186 "ld 11, 0(11)\n\t" /* target->r11 */ \ 3187 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 3188 "mr 11,%1\n\t" \ 3189 "mr %0,3\n\t" \ 3190 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 3191 VALGRIND_RESTORE_STACK \ 3192 : /*out*/ "=r" (_res) \ 3193 : /*in*/ "r" (&_argvec[2]) \ 3194 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3195 ); \ 3196 lval = (__typeof__(lval)) _res; \ 3197 } while (0) 3198 3199 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3200 arg7,arg8,arg9,arg10,arg11,arg12) \ 3201 do { \ 3202 volatile OrigFn _orig = (orig); \ 3203 volatile unsigned long _argvec[3+12]; \ 3204 volatile unsigned long _res; \ 3205 /* _argvec[0] holds current r2 across the call */ \ 3206 _argvec[1] = (unsigned long)_orig.r2; \ 3207 _argvec[2] = (unsigned long)_orig.nraddr; \ 3208 _argvec[2+1] = (unsigned long)arg1; \ 3209 _argvec[2+2] = (unsigned long)arg2; \ 3210 _argvec[2+3] = (unsigned long)arg3; \ 3211 _argvec[2+4] = (unsigned long)arg4; \ 3212 _argvec[2+5] = (unsigned long)arg5; \ 3213 _argvec[2+6] = (unsigned long)arg6; \ 3214 _argvec[2+7] = (unsigned long)arg7; \ 3215 _argvec[2+8] = (unsigned long)arg8; \ 3216 _argvec[2+9] = (unsigned long)arg9; \ 3217 _argvec[2+10] = (unsigned long)arg10; \ 3218 _argvec[2+11] = (unsigned long)arg11; \ 3219 _argvec[2+12] = (unsigned long)arg12; \ 3220 __asm__ volatile( \ 3221 VALGRIND_ALIGN_STACK \ 3222 "mr 11,%1\n\t" \ 3223 "std 2,-16(11)\n\t" /* save tocptr */ \ 3224 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 3225 "addi 1,1,-144\n\t" /* expand stack frame */ \ 3226 /* arg12 */ \ 3227 "ld 3,96(11)\n\t" \ 3228 "std 3,136(1)\n\t" \ 3229 /* arg11 */ \ 3230 "ld 3,88(11)\n\t" \ 3231 "std 3,128(1)\n\t" \ 3232 /* arg10 */ \ 3233 "ld 3,80(11)\n\t" \ 3234 "std 3,120(1)\n\t" \ 3235 /* arg9 */ \ 3236 "ld 3,72(11)\n\t" \ 3237 "std 3,112(1)\n\t" \ 3238 /* args1-8 */ \ 3239 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 3240 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 3241 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 3242 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 3243 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 3244 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 3245 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 3246 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 3247 "ld 11, 0(11)\n\t" /* target->r11 */ \ 3248 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 3249 "mr 11,%1\n\t" \ 3250 "mr %0,3\n\t" \ 3251 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 3252 VALGRIND_RESTORE_STACK \ 3253 : /*out*/ "=r" (_res) \ 3254 : /*in*/ "r" (&_argvec[2]) \ 3255 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3256 ); \ 3257 lval = (__typeof__(lval)) _res; \ 3258 } while (0) 3259 3260 #endif /* PLAT_ppc64be_linux */ 3261 3262 /* ------------------------- ppc64le-linux ----------------------- */ 3263 #if defined(PLAT_ppc64le_linux) 3264 3265 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */ 3266 3267 /* These regs are trashed by the hidden call. */ 3268 #define __CALLER_SAVED_REGS \ 3269 "lr", "ctr", "xer", \ 3270 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \ 3271 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \ 3272 "r11", "r12", "r13" 3273 3274 /* Macros to save and align the stack before making a function 3275 call and restore it afterwards as gcc may not keep the stack 3276 pointer aligned if it doesn't realise calls are being made 3277 to other functions. */ 3278 3279 #define VALGRIND_ALIGN_STACK \ 3280 "mr 28,1\n\t" \ 3281 "rldicr 1,1,0,59\n\t" 3282 #define VALGRIND_RESTORE_STACK \ 3283 "mr 1,28\n\t" 3284 3285 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned 3286 long) == 8. */ 3287 3288 #define CALL_FN_W_v(lval, orig) \ 3289 do { \ 3290 volatile OrigFn _orig = (orig); \ 3291 volatile unsigned long _argvec[3+0]; \ 3292 volatile unsigned long _res; \ 3293 /* _argvec[0] holds current r2 across the call */ \ 3294 _argvec[1] = (unsigned long)_orig.r2; \ 3295 _argvec[2] = (unsigned long)_orig.nraddr; \ 3296 __asm__ volatile( \ 3297 VALGRIND_ALIGN_STACK \ 3298 "mr 12,%1\n\t" \ 3299 "std 2,-16(12)\n\t" /* save tocptr */ \ 3300 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3301 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3302 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3303 "mr 12,%1\n\t" \ 3304 "mr %0,3\n\t" \ 3305 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3306 VALGRIND_RESTORE_STACK \ 3307 : /*out*/ "=r" (_res) \ 3308 : /*in*/ "r" (&_argvec[2]) \ 3309 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3310 ); \ 3311 lval = (__typeof__(lval)) _res; \ 3312 } while (0) 3313 3314 #define CALL_FN_W_W(lval, orig, arg1) \ 3315 do { \ 3316 volatile OrigFn _orig = (orig); \ 3317 volatile unsigned long _argvec[3+1]; \ 3318 volatile unsigned long _res; \ 3319 /* _argvec[0] holds current r2 across the call */ \ 3320 _argvec[1] = (unsigned long)_orig.r2; \ 3321 _argvec[2] = (unsigned long)_orig.nraddr; \ 3322 _argvec[2+1] = (unsigned long)arg1; \ 3323 __asm__ volatile( \ 3324 VALGRIND_ALIGN_STACK \ 3325 "mr 12,%1\n\t" \ 3326 "std 2,-16(12)\n\t" /* save tocptr */ \ 3327 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3328 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3329 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3330 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3331 "mr 12,%1\n\t" \ 3332 "mr %0,3\n\t" \ 3333 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3334 VALGRIND_RESTORE_STACK \ 3335 : /*out*/ "=r" (_res) \ 3336 : /*in*/ "r" (&_argvec[2]) \ 3337 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3338 ); \ 3339 lval = (__typeof__(lval)) _res; \ 3340 } while (0) 3341 3342 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 3343 do { \ 3344 volatile OrigFn _orig = (orig); \ 3345 volatile unsigned long _argvec[3+2]; \ 3346 volatile unsigned long _res; \ 3347 /* _argvec[0] holds current r2 across the call */ \ 3348 _argvec[1] = (unsigned long)_orig.r2; \ 3349 _argvec[2] = (unsigned long)_orig.nraddr; \ 3350 _argvec[2+1] = (unsigned long)arg1; \ 3351 _argvec[2+2] = (unsigned long)arg2; \ 3352 __asm__ volatile( \ 3353 VALGRIND_ALIGN_STACK \ 3354 "mr 12,%1\n\t" \ 3355 "std 2,-16(12)\n\t" /* save tocptr */ \ 3356 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3357 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3358 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3359 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3360 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3361 "mr 12,%1\n\t" \ 3362 "mr %0,3\n\t" \ 3363 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3364 VALGRIND_RESTORE_STACK \ 3365 : /*out*/ "=r" (_res) \ 3366 : /*in*/ "r" (&_argvec[2]) \ 3367 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3368 ); \ 3369 lval = (__typeof__(lval)) _res; \ 3370 } while (0) 3371 3372 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 3373 do { \ 3374 volatile OrigFn _orig = (orig); \ 3375 volatile unsigned long _argvec[3+3]; \ 3376 volatile unsigned long _res; \ 3377 /* _argvec[0] holds current r2 across the call */ \ 3378 _argvec[1] = (unsigned long)_orig.r2; \ 3379 _argvec[2] = (unsigned long)_orig.nraddr; \ 3380 _argvec[2+1] = (unsigned long)arg1; \ 3381 _argvec[2+2] = (unsigned long)arg2; \ 3382 _argvec[2+3] = (unsigned long)arg3; \ 3383 __asm__ volatile( \ 3384 VALGRIND_ALIGN_STACK \ 3385 "mr 12,%1\n\t" \ 3386 "std 2,-16(12)\n\t" /* save tocptr */ \ 3387 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3388 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3389 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3390 "ld 5, 24(12)\n\t" /* arg3->r5 */ \ 3391 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3392 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3393 "mr 12,%1\n\t" \ 3394 "mr %0,3\n\t" \ 3395 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3396 VALGRIND_RESTORE_STACK \ 3397 : /*out*/ "=r" (_res) \ 3398 : /*in*/ "r" (&_argvec[2]) \ 3399 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3400 ); \ 3401 lval = (__typeof__(lval)) _res; \ 3402 } while (0) 3403 3404 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 3405 do { \ 3406 volatile OrigFn _orig = (orig); \ 3407 volatile unsigned long _argvec[3+4]; \ 3408 volatile unsigned long _res; \ 3409 /* _argvec[0] holds current r2 across the call */ \ 3410 _argvec[1] = (unsigned long)_orig.r2; \ 3411 _argvec[2] = (unsigned long)_orig.nraddr; \ 3412 _argvec[2+1] = (unsigned long)arg1; \ 3413 _argvec[2+2] = (unsigned long)arg2; \ 3414 _argvec[2+3] = (unsigned long)arg3; \ 3415 _argvec[2+4] = (unsigned long)arg4; \ 3416 __asm__ volatile( \ 3417 VALGRIND_ALIGN_STACK \ 3418 "mr 12,%1\n\t" \ 3419 "std 2,-16(12)\n\t" /* save tocptr */ \ 3420 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3421 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3422 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3423 "ld 5, 24(12)\n\t" /* arg3->r5 */ \ 3424 "ld 6, 32(12)\n\t" /* arg4->r6 */ \ 3425 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3426 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3427 "mr 12,%1\n\t" \ 3428 "mr %0,3\n\t" \ 3429 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3430 VALGRIND_RESTORE_STACK \ 3431 : /*out*/ "=r" (_res) \ 3432 : /*in*/ "r" (&_argvec[2]) \ 3433 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3434 ); \ 3435 lval = (__typeof__(lval)) _res; \ 3436 } while (0) 3437 3438 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 3439 do { \ 3440 volatile OrigFn _orig = (orig); \ 3441 volatile unsigned long _argvec[3+5]; \ 3442 volatile unsigned long _res; \ 3443 /* _argvec[0] holds current r2 across the call */ \ 3444 _argvec[1] = (unsigned long)_orig.r2; \ 3445 _argvec[2] = (unsigned long)_orig.nraddr; \ 3446 _argvec[2+1] = (unsigned long)arg1; \ 3447 _argvec[2+2] = (unsigned long)arg2; \ 3448 _argvec[2+3] = (unsigned long)arg3; \ 3449 _argvec[2+4] = (unsigned long)arg4; \ 3450 _argvec[2+5] = (unsigned long)arg5; \ 3451 __asm__ volatile( \ 3452 VALGRIND_ALIGN_STACK \ 3453 "mr 12,%1\n\t" \ 3454 "std 2,-16(12)\n\t" /* save tocptr */ \ 3455 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3456 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3457 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3458 "ld 5, 24(12)\n\t" /* arg3->r5 */ \ 3459 "ld 6, 32(12)\n\t" /* arg4->r6 */ \ 3460 "ld 7, 40(12)\n\t" /* arg5->r7 */ \ 3461 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3462 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3463 "mr 12,%1\n\t" \ 3464 "mr %0,3\n\t" \ 3465 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3466 VALGRIND_RESTORE_STACK \ 3467 : /*out*/ "=r" (_res) \ 3468 : /*in*/ "r" (&_argvec[2]) \ 3469 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3470 ); \ 3471 lval = (__typeof__(lval)) _res; \ 3472 } while (0) 3473 3474 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 3475 do { \ 3476 volatile OrigFn _orig = (orig); \ 3477 volatile unsigned long _argvec[3+6]; \ 3478 volatile unsigned long _res; \ 3479 /* _argvec[0] holds current r2 across the call */ \ 3480 _argvec[1] = (unsigned long)_orig.r2; \ 3481 _argvec[2] = (unsigned long)_orig.nraddr; \ 3482 _argvec[2+1] = (unsigned long)arg1; \ 3483 _argvec[2+2] = (unsigned long)arg2; \ 3484 _argvec[2+3] = (unsigned long)arg3; \ 3485 _argvec[2+4] = (unsigned long)arg4; \ 3486 _argvec[2+5] = (unsigned long)arg5; \ 3487 _argvec[2+6] = (unsigned long)arg6; \ 3488 __asm__ volatile( \ 3489 VALGRIND_ALIGN_STACK \ 3490 "mr 12,%1\n\t" \ 3491 "std 2,-16(12)\n\t" /* save tocptr */ \ 3492 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3493 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3494 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3495 "ld 5, 24(12)\n\t" /* arg3->r5 */ \ 3496 "ld 6, 32(12)\n\t" /* arg4->r6 */ \ 3497 "ld 7, 40(12)\n\t" /* arg5->r7 */ \ 3498 "ld 8, 48(12)\n\t" /* arg6->r8 */ \ 3499 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3500 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3501 "mr 12,%1\n\t" \ 3502 "mr %0,3\n\t" \ 3503 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3504 VALGRIND_RESTORE_STACK \ 3505 : /*out*/ "=r" (_res) \ 3506 : /*in*/ "r" (&_argvec[2]) \ 3507 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3508 ); \ 3509 lval = (__typeof__(lval)) _res; \ 3510 } while (0) 3511 3512 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3513 arg7) \ 3514 do { \ 3515 volatile OrigFn _orig = (orig); \ 3516 volatile unsigned long _argvec[3+7]; \ 3517 volatile unsigned long _res; \ 3518 /* _argvec[0] holds current r2 across the call */ \ 3519 _argvec[1] = (unsigned long)_orig.r2; \ 3520 _argvec[2] = (unsigned long)_orig.nraddr; \ 3521 _argvec[2+1] = (unsigned long)arg1; \ 3522 _argvec[2+2] = (unsigned long)arg2; \ 3523 _argvec[2+3] = (unsigned long)arg3; \ 3524 _argvec[2+4] = (unsigned long)arg4; \ 3525 _argvec[2+5] = (unsigned long)arg5; \ 3526 _argvec[2+6] = (unsigned long)arg6; \ 3527 _argvec[2+7] = (unsigned long)arg7; \ 3528 __asm__ volatile( \ 3529 VALGRIND_ALIGN_STACK \ 3530 "mr 12,%1\n\t" \ 3531 "std 2,-16(12)\n\t" /* save tocptr */ \ 3532 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3533 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3534 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3535 "ld 5, 24(12)\n\t" /* arg3->r5 */ \ 3536 "ld 6, 32(12)\n\t" /* arg4->r6 */ \ 3537 "ld 7, 40(12)\n\t" /* arg5->r7 */ \ 3538 "ld 8, 48(12)\n\t" /* arg6->r8 */ \ 3539 "ld 9, 56(12)\n\t" /* arg7->r9 */ \ 3540 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3541 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3542 "mr 12,%1\n\t" \ 3543 "mr %0,3\n\t" \ 3544 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3545 VALGRIND_RESTORE_STACK \ 3546 : /*out*/ "=r" (_res) \ 3547 : /*in*/ "r" (&_argvec[2]) \ 3548 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3549 ); \ 3550 lval = (__typeof__(lval)) _res; \ 3551 } while (0) 3552 3553 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3554 arg7,arg8) \ 3555 do { \ 3556 volatile OrigFn _orig = (orig); \ 3557 volatile unsigned long _argvec[3+8]; \ 3558 volatile unsigned long _res; \ 3559 /* _argvec[0] holds current r2 across the call */ \ 3560 _argvec[1] = (unsigned long)_orig.r2; \ 3561 _argvec[2] = (unsigned long)_orig.nraddr; \ 3562 _argvec[2+1] = (unsigned long)arg1; \ 3563 _argvec[2+2] = (unsigned long)arg2; \ 3564 _argvec[2+3] = (unsigned long)arg3; \ 3565 _argvec[2+4] = (unsigned long)arg4; \ 3566 _argvec[2+5] = (unsigned long)arg5; \ 3567 _argvec[2+6] = (unsigned long)arg6; \ 3568 _argvec[2+7] = (unsigned long)arg7; \ 3569 _argvec[2+8] = (unsigned long)arg8; \ 3570 __asm__ volatile( \ 3571 VALGRIND_ALIGN_STACK \ 3572 "mr 12,%1\n\t" \ 3573 "std 2,-16(12)\n\t" /* save tocptr */ \ 3574 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3575 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3576 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3577 "ld 5, 24(12)\n\t" /* arg3->r5 */ \ 3578 "ld 6, 32(12)\n\t" /* arg4->r6 */ \ 3579 "ld 7, 40(12)\n\t" /* arg5->r7 */ \ 3580 "ld 8, 48(12)\n\t" /* arg6->r8 */ \ 3581 "ld 9, 56(12)\n\t" /* arg7->r9 */ \ 3582 "ld 10, 64(12)\n\t" /* arg8->r10 */ \ 3583 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3584 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3585 "mr 12,%1\n\t" \ 3586 "mr %0,3\n\t" \ 3587 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3588 VALGRIND_RESTORE_STACK \ 3589 : /*out*/ "=r" (_res) \ 3590 : /*in*/ "r" (&_argvec[2]) \ 3591 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3592 ); \ 3593 lval = (__typeof__(lval)) _res; \ 3594 } while (0) 3595 3596 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3597 arg7,arg8,arg9) \ 3598 do { \ 3599 volatile OrigFn _orig = (orig); \ 3600 volatile unsigned long _argvec[3+9]; \ 3601 volatile unsigned long _res; \ 3602 /* _argvec[0] holds current r2 across the call */ \ 3603 _argvec[1] = (unsigned long)_orig.r2; \ 3604 _argvec[2] = (unsigned long)_orig.nraddr; \ 3605 _argvec[2+1] = (unsigned long)arg1; \ 3606 _argvec[2+2] = (unsigned long)arg2; \ 3607 _argvec[2+3] = (unsigned long)arg3; \ 3608 _argvec[2+4] = (unsigned long)arg4; \ 3609 _argvec[2+5] = (unsigned long)arg5; \ 3610 _argvec[2+6] = (unsigned long)arg6; \ 3611 _argvec[2+7] = (unsigned long)arg7; \ 3612 _argvec[2+8] = (unsigned long)arg8; \ 3613 _argvec[2+9] = (unsigned long)arg9; \ 3614 __asm__ volatile( \ 3615 VALGRIND_ALIGN_STACK \ 3616 "mr 12,%1\n\t" \ 3617 "std 2,-16(12)\n\t" /* save tocptr */ \ 3618 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3619 "addi 1,1,-128\n\t" /* expand stack frame */ \ 3620 /* arg9 */ \ 3621 "ld 3,72(12)\n\t" \ 3622 "std 3,96(1)\n\t" \ 3623 /* args1-8 */ \ 3624 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3625 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3626 "ld 5, 24(12)\n\t" /* arg3->r5 */ \ 3627 "ld 6, 32(12)\n\t" /* arg4->r6 */ \ 3628 "ld 7, 40(12)\n\t" /* arg5->r7 */ \ 3629 "ld 8, 48(12)\n\t" /* arg6->r8 */ \ 3630 "ld 9, 56(12)\n\t" /* arg7->r9 */ \ 3631 "ld 10, 64(12)\n\t" /* arg8->r10 */ \ 3632 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3633 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3634 "mr 12,%1\n\t" \ 3635 "mr %0,3\n\t" \ 3636 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3637 VALGRIND_RESTORE_STACK \ 3638 : /*out*/ "=r" (_res) \ 3639 : /*in*/ "r" (&_argvec[2]) \ 3640 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3641 ); \ 3642 lval = (__typeof__(lval)) _res; \ 3643 } while (0) 3644 3645 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3646 arg7,arg8,arg9,arg10) \ 3647 do { \ 3648 volatile OrigFn _orig = (orig); \ 3649 volatile unsigned long _argvec[3+10]; \ 3650 volatile unsigned long _res; \ 3651 /* _argvec[0] holds current r2 across the call */ \ 3652 _argvec[1] = (unsigned long)_orig.r2; \ 3653 _argvec[2] = (unsigned long)_orig.nraddr; \ 3654 _argvec[2+1] = (unsigned long)arg1; \ 3655 _argvec[2+2] = (unsigned long)arg2; \ 3656 _argvec[2+3] = (unsigned long)arg3; \ 3657 _argvec[2+4] = (unsigned long)arg4; \ 3658 _argvec[2+5] = (unsigned long)arg5; \ 3659 _argvec[2+6] = (unsigned long)arg6; \ 3660 _argvec[2+7] = (unsigned long)arg7; \ 3661 _argvec[2+8] = (unsigned long)arg8; \ 3662 _argvec[2+9] = (unsigned long)arg9; \ 3663 _argvec[2+10] = (unsigned long)arg10; \ 3664 __asm__ volatile( \ 3665 VALGRIND_ALIGN_STACK \ 3666 "mr 12,%1\n\t" \ 3667 "std 2,-16(12)\n\t" /* save tocptr */ \ 3668 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3669 "addi 1,1,-128\n\t" /* expand stack frame */ \ 3670 /* arg10 */ \ 3671 "ld 3,80(12)\n\t" \ 3672 "std 3,104(1)\n\t" \ 3673 /* arg9 */ \ 3674 "ld 3,72(12)\n\t" \ 3675 "std 3,96(1)\n\t" \ 3676 /* args1-8 */ \ 3677 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3678 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3679 "ld 5, 24(12)\n\t" /* arg3->r5 */ \ 3680 "ld 6, 32(12)\n\t" /* arg4->r6 */ \ 3681 "ld 7, 40(12)\n\t" /* arg5->r7 */ \ 3682 "ld 8, 48(12)\n\t" /* arg6->r8 */ \ 3683 "ld 9, 56(12)\n\t" /* arg7->r9 */ \ 3684 "ld 10, 64(12)\n\t" /* arg8->r10 */ \ 3685 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3686 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3687 "mr 12,%1\n\t" \ 3688 "mr %0,3\n\t" \ 3689 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3690 VALGRIND_RESTORE_STACK \ 3691 : /*out*/ "=r" (_res) \ 3692 : /*in*/ "r" (&_argvec[2]) \ 3693 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3694 ); \ 3695 lval = (__typeof__(lval)) _res; \ 3696 } while (0) 3697 3698 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3699 arg7,arg8,arg9,arg10,arg11) \ 3700 do { \ 3701 volatile OrigFn _orig = (orig); \ 3702 volatile unsigned long _argvec[3+11]; \ 3703 volatile unsigned long _res; \ 3704 /* _argvec[0] holds current r2 across the call */ \ 3705 _argvec[1] = (unsigned long)_orig.r2; \ 3706 _argvec[2] = (unsigned long)_orig.nraddr; \ 3707 _argvec[2+1] = (unsigned long)arg1; \ 3708 _argvec[2+2] = (unsigned long)arg2; \ 3709 _argvec[2+3] = (unsigned long)arg3; \ 3710 _argvec[2+4] = (unsigned long)arg4; \ 3711 _argvec[2+5] = (unsigned long)arg5; \ 3712 _argvec[2+6] = (unsigned long)arg6; \ 3713 _argvec[2+7] = (unsigned long)arg7; \ 3714 _argvec[2+8] = (unsigned long)arg8; \ 3715 _argvec[2+9] = (unsigned long)arg9; \ 3716 _argvec[2+10] = (unsigned long)arg10; \ 3717 _argvec[2+11] = (unsigned long)arg11; \ 3718 __asm__ volatile( \ 3719 VALGRIND_ALIGN_STACK \ 3720 "mr 12,%1\n\t" \ 3721 "std 2,-16(12)\n\t" /* save tocptr */ \ 3722 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3723 "addi 1,1,-144\n\t" /* expand stack frame */ \ 3724 /* arg11 */ \ 3725 "ld 3,88(12)\n\t" \ 3726 "std 3,112(1)\n\t" \ 3727 /* arg10 */ \ 3728 "ld 3,80(12)\n\t" \ 3729 "std 3,104(1)\n\t" \ 3730 /* arg9 */ \ 3731 "ld 3,72(12)\n\t" \ 3732 "std 3,96(1)\n\t" \ 3733 /* args1-8 */ \ 3734 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3735 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3736 "ld 5, 24(12)\n\t" /* arg3->r5 */ \ 3737 "ld 6, 32(12)\n\t" /* arg4->r6 */ \ 3738 "ld 7, 40(12)\n\t" /* arg5->r7 */ \ 3739 "ld 8, 48(12)\n\t" /* arg6->r8 */ \ 3740 "ld 9, 56(12)\n\t" /* arg7->r9 */ \ 3741 "ld 10, 64(12)\n\t" /* arg8->r10 */ \ 3742 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3743 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3744 "mr 12,%1\n\t" \ 3745 "mr %0,3\n\t" \ 3746 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3747 VALGRIND_RESTORE_STACK \ 3748 : /*out*/ "=r" (_res) \ 3749 : /*in*/ "r" (&_argvec[2]) \ 3750 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3751 ); \ 3752 lval = (__typeof__(lval)) _res; \ 3753 } while (0) 3754 3755 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3756 arg7,arg8,arg9,arg10,arg11,arg12) \ 3757 do { \ 3758 volatile OrigFn _orig = (orig); \ 3759 volatile unsigned long _argvec[3+12]; \ 3760 volatile unsigned long _res; \ 3761 /* _argvec[0] holds current r2 across the call */ \ 3762 _argvec[1] = (unsigned long)_orig.r2; \ 3763 _argvec[2] = (unsigned long)_orig.nraddr; \ 3764 _argvec[2+1] = (unsigned long)arg1; \ 3765 _argvec[2+2] = (unsigned long)arg2; \ 3766 _argvec[2+3] = (unsigned long)arg3; \ 3767 _argvec[2+4] = (unsigned long)arg4; \ 3768 _argvec[2+5] = (unsigned long)arg5; \ 3769 _argvec[2+6] = (unsigned long)arg6; \ 3770 _argvec[2+7] = (unsigned long)arg7; \ 3771 _argvec[2+8] = (unsigned long)arg8; \ 3772 _argvec[2+9] = (unsigned long)arg9; \ 3773 _argvec[2+10] = (unsigned long)arg10; \ 3774 _argvec[2+11] = (unsigned long)arg11; \ 3775 _argvec[2+12] = (unsigned long)arg12; \ 3776 __asm__ volatile( \ 3777 VALGRIND_ALIGN_STACK \ 3778 "mr 12,%1\n\t" \ 3779 "std 2,-16(12)\n\t" /* save tocptr */ \ 3780 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \ 3781 "addi 1,1,-144\n\t" /* expand stack frame */ \ 3782 /* arg12 */ \ 3783 "ld 3,96(12)\n\t" \ 3784 "std 3,120(1)\n\t" \ 3785 /* arg11 */ \ 3786 "ld 3,88(12)\n\t" \ 3787 "std 3,112(1)\n\t" \ 3788 /* arg10 */ \ 3789 "ld 3,80(12)\n\t" \ 3790 "std 3,104(1)\n\t" \ 3791 /* arg9 */ \ 3792 "ld 3,72(12)\n\t" \ 3793 "std 3,96(1)\n\t" \ 3794 /* args1-8 */ \ 3795 "ld 3, 8(12)\n\t" /* arg1->r3 */ \ 3796 "ld 4, 16(12)\n\t" /* arg2->r4 */ \ 3797 "ld 5, 24(12)\n\t" /* arg3->r5 */ \ 3798 "ld 6, 32(12)\n\t" /* arg4->r6 */ \ 3799 "ld 7, 40(12)\n\t" /* arg5->r7 */ \ 3800 "ld 8, 48(12)\n\t" /* arg6->r8 */ \ 3801 "ld 9, 56(12)\n\t" /* arg7->r9 */ \ 3802 "ld 10, 64(12)\n\t" /* arg8->r10 */ \ 3803 "ld 12, 0(12)\n\t" /* target->r12 */ \ 3804 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \ 3805 "mr 12,%1\n\t" \ 3806 "mr %0,3\n\t" \ 3807 "ld 2,-16(12)\n\t" /* restore tocptr */ \ 3808 VALGRIND_RESTORE_STACK \ 3809 : /*out*/ "=r" (_res) \ 3810 : /*in*/ "r" (&_argvec[2]) \ 3811 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3812 ); \ 3813 lval = (__typeof__(lval)) _res; \ 3814 } while (0) 3815 3816 #endif /* PLAT_ppc64le_linux */ 3817 3818 /* ------------------------- arm-linux ------------------------- */ 3819 3820 #if defined(PLAT_arm_linux) 3821 3822 /* These regs are trashed by the hidden call. */ 3823 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14" 3824 3825 /* Macros to save and align the stack before making a function 3826 call and restore it afterwards as gcc may not keep the stack 3827 pointer aligned if it doesn't realise calls are being made 3828 to other functions. */ 3829 3830 /* This is a bit tricky. We store the original stack pointer in r10 3831 as it is callee-saves. gcc doesn't allow the use of r11 for some 3832 reason. Also, we can't directly "bic" the stack pointer in thumb 3833 mode since r13 isn't an allowed register number in that context. 3834 So use r4 as a temporary, since that is about to get trashed 3835 anyway, just after each use of this macro. Side effect is we need 3836 to be very careful about any future changes, since 3837 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */ 3838 #define VALGRIND_ALIGN_STACK \ 3839 "mov r10, sp\n\t" \ 3840 "mov r4, sp\n\t" \ 3841 "bic r4, r4, #7\n\t" \ 3842 "mov sp, r4\n\t" 3843 #define VALGRIND_RESTORE_STACK \ 3844 "mov sp, r10\n\t" 3845 3846 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned 3847 long) == 4. */ 3848 3849 #define CALL_FN_W_v(lval, orig) \ 3850 do { \ 3851 volatile OrigFn _orig = (orig); \ 3852 volatile unsigned long _argvec[1]; \ 3853 volatile unsigned long _res; \ 3854 _argvec[0] = (unsigned long)_orig.nraddr; \ 3855 __asm__ volatile( \ 3856 VALGRIND_ALIGN_STACK \ 3857 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3858 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3859 VALGRIND_RESTORE_STACK \ 3860 "mov %0, r0\n" \ 3861 : /*out*/ "=r" (_res) \ 3862 : /*in*/ "0" (&_argvec[0]) \ 3863 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3864 ); \ 3865 lval = (__typeof__(lval)) _res; \ 3866 } while (0) 3867 3868 #define CALL_FN_W_W(lval, orig, arg1) \ 3869 do { \ 3870 volatile OrigFn _orig = (orig); \ 3871 volatile unsigned long _argvec[2]; \ 3872 volatile unsigned long _res; \ 3873 _argvec[0] = (unsigned long)_orig.nraddr; \ 3874 _argvec[1] = (unsigned long)(arg1); \ 3875 __asm__ volatile( \ 3876 VALGRIND_ALIGN_STACK \ 3877 "ldr r0, [%1, #4] \n\t" \ 3878 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3879 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3880 VALGRIND_RESTORE_STACK \ 3881 "mov %0, r0\n" \ 3882 : /*out*/ "=r" (_res) \ 3883 : /*in*/ "0" (&_argvec[0]) \ 3884 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3885 ); \ 3886 lval = (__typeof__(lval)) _res; \ 3887 } while (0) 3888 3889 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 3890 do { \ 3891 volatile OrigFn _orig = (orig); \ 3892 volatile unsigned long _argvec[3]; \ 3893 volatile unsigned long _res; \ 3894 _argvec[0] = (unsigned long)_orig.nraddr; \ 3895 _argvec[1] = (unsigned long)(arg1); \ 3896 _argvec[2] = (unsigned long)(arg2); \ 3897 __asm__ volatile( \ 3898 VALGRIND_ALIGN_STACK \ 3899 "ldr r0, [%1, #4] \n\t" \ 3900 "ldr r1, [%1, #8] \n\t" \ 3901 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3902 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3903 VALGRIND_RESTORE_STACK \ 3904 "mov %0, r0\n" \ 3905 : /*out*/ "=r" (_res) \ 3906 : /*in*/ "0" (&_argvec[0]) \ 3907 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3908 ); \ 3909 lval = (__typeof__(lval)) _res; \ 3910 } while (0) 3911 3912 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 3913 do { \ 3914 volatile OrigFn _orig = (orig); \ 3915 volatile unsigned long _argvec[4]; \ 3916 volatile unsigned long _res; \ 3917 _argvec[0] = (unsigned long)_orig.nraddr; \ 3918 _argvec[1] = (unsigned long)(arg1); \ 3919 _argvec[2] = (unsigned long)(arg2); \ 3920 _argvec[3] = (unsigned long)(arg3); \ 3921 __asm__ volatile( \ 3922 VALGRIND_ALIGN_STACK \ 3923 "ldr r0, [%1, #4] \n\t" \ 3924 "ldr r1, [%1, #8] \n\t" \ 3925 "ldr r2, [%1, #12] \n\t" \ 3926 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3927 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3928 VALGRIND_RESTORE_STACK \ 3929 "mov %0, r0\n" \ 3930 : /*out*/ "=r" (_res) \ 3931 : /*in*/ "0" (&_argvec[0]) \ 3932 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3933 ); \ 3934 lval = (__typeof__(lval)) _res; \ 3935 } while (0) 3936 3937 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 3938 do { \ 3939 volatile OrigFn _orig = (orig); \ 3940 volatile unsigned long _argvec[5]; \ 3941 volatile unsigned long _res; \ 3942 _argvec[0] = (unsigned long)_orig.nraddr; \ 3943 _argvec[1] = (unsigned long)(arg1); \ 3944 _argvec[2] = (unsigned long)(arg2); \ 3945 _argvec[3] = (unsigned long)(arg3); \ 3946 _argvec[4] = (unsigned long)(arg4); \ 3947 __asm__ volatile( \ 3948 VALGRIND_ALIGN_STACK \ 3949 "ldr r0, [%1, #4] \n\t" \ 3950 "ldr r1, [%1, #8] \n\t" \ 3951 "ldr r2, [%1, #12] \n\t" \ 3952 "ldr r3, [%1, #16] \n\t" \ 3953 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3954 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3955 VALGRIND_RESTORE_STACK \ 3956 "mov %0, r0" \ 3957 : /*out*/ "=r" (_res) \ 3958 : /*in*/ "0" (&_argvec[0]) \ 3959 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3960 ); \ 3961 lval = (__typeof__(lval)) _res; \ 3962 } while (0) 3963 3964 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 3965 do { \ 3966 volatile OrigFn _orig = (orig); \ 3967 volatile unsigned long _argvec[6]; \ 3968 volatile unsigned long _res; \ 3969 _argvec[0] = (unsigned long)_orig.nraddr; \ 3970 _argvec[1] = (unsigned long)(arg1); \ 3971 _argvec[2] = (unsigned long)(arg2); \ 3972 _argvec[3] = (unsigned long)(arg3); \ 3973 _argvec[4] = (unsigned long)(arg4); \ 3974 _argvec[5] = (unsigned long)(arg5); \ 3975 __asm__ volatile( \ 3976 VALGRIND_ALIGN_STACK \ 3977 "sub sp, sp, #4 \n\t" \ 3978 "ldr r0, [%1, #20] \n\t" \ 3979 "push {r0} \n\t" \ 3980 "ldr r0, [%1, #4] \n\t" \ 3981 "ldr r1, [%1, #8] \n\t" \ 3982 "ldr r2, [%1, #12] \n\t" \ 3983 "ldr r3, [%1, #16] \n\t" \ 3984 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3985 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3986 VALGRIND_RESTORE_STACK \ 3987 "mov %0, r0" \ 3988 : /*out*/ "=r" (_res) \ 3989 : /*in*/ "0" (&_argvec[0]) \ 3990 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3991 ); \ 3992 lval = (__typeof__(lval)) _res; \ 3993 } while (0) 3994 3995 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 3996 do { \ 3997 volatile OrigFn _orig = (orig); \ 3998 volatile unsigned long _argvec[7]; \ 3999 volatile unsigned long _res; \ 4000 _argvec[0] = (unsigned long)_orig.nraddr; \ 4001 _argvec[1] = (unsigned long)(arg1); \ 4002 _argvec[2] = (unsigned long)(arg2); \ 4003 _argvec[3] = (unsigned long)(arg3); \ 4004 _argvec[4] = (unsigned long)(arg4); \ 4005 _argvec[5] = (unsigned long)(arg5); \ 4006 _argvec[6] = (unsigned long)(arg6); \ 4007 __asm__ volatile( \ 4008 VALGRIND_ALIGN_STACK \ 4009 "ldr r0, [%1, #20] \n\t" \ 4010 "ldr r1, [%1, #24] \n\t" \ 4011 "push {r0, r1} \n\t" \ 4012 "ldr r0, [%1, #4] \n\t" \ 4013 "ldr r1, [%1, #8] \n\t" \ 4014 "ldr r2, [%1, #12] \n\t" \ 4015 "ldr r3, [%1, #16] \n\t" \ 4016 "ldr r4, [%1] \n\t" /* target->r4 */ \ 4017 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 4018 VALGRIND_RESTORE_STACK \ 4019 "mov %0, r0" \ 4020 : /*out*/ "=r" (_res) \ 4021 : /*in*/ "0" (&_argvec[0]) \ 4022 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 4023 ); \ 4024 lval = (__typeof__(lval)) _res; \ 4025 } while (0) 4026 4027 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4028 arg7) \ 4029 do { \ 4030 volatile OrigFn _orig = (orig); \ 4031 volatile unsigned long _argvec[8]; \ 4032 volatile unsigned long _res; \ 4033 _argvec[0] = (unsigned long)_orig.nraddr; \ 4034 _argvec[1] = (unsigned long)(arg1); \ 4035 _argvec[2] = (unsigned long)(arg2); \ 4036 _argvec[3] = (unsigned long)(arg3); \ 4037 _argvec[4] = (unsigned long)(arg4); \ 4038 _argvec[5] = (unsigned long)(arg5); \ 4039 _argvec[6] = (unsigned long)(arg6); \ 4040 _argvec[7] = (unsigned long)(arg7); \ 4041 __asm__ volatile( \ 4042 VALGRIND_ALIGN_STACK \ 4043 "sub sp, sp, #4 \n\t" \ 4044 "ldr r0, [%1, #20] \n\t" \ 4045 "ldr r1, [%1, #24] \n\t" \ 4046 "ldr r2, [%1, #28] \n\t" \ 4047 "push {r0, r1, r2} \n\t" \ 4048 "ldr r0, [%1, #4] \n\t" \ 4049 "ldr r1, [%1, #8] \n\t" \ 4050 "ldr r2, [%1, #12] \n\t" \ 4051 "ldr r3, [%1, #16] \n\t" \ 4052 "ldr r4, [%1] \n\t" /* target->r4 */ \ 4053 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 4054 VALGRIND_RESTORE_STACK \ 4055 "mov %0, r0" \ 4056 : /*out*/ "=r" (_res) \ 4057 : /*in*/ "0" (&_argvec[0]) \ 4058 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 4059 ); \ 4060 lval = (__typeof__(lval)) _res; \ 4061 } while (0) 4062 4063 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4064 arg7,arg8) \ 4065 do { \ 4066 volatile OrigFn _orig = (orig); \ 4067 volatile unsigned long _argvec[9]; \ 4068 volatile unsigned long _res; \ 4069 _argvec[0] = (unsigned long)_orig.nraddr; \ 4070 _argvec[1] = (unsigned long)(arg1); \ 4071 _argvec[2] = (unsigned long)(arg2); \ 4072 _argvec[3] = (unsigned long)(arg3); \ 4073 _argvec[4] = (unsigned long)(arg4); \ 4074 _argvec[5] = (unsigned long)(arg5); \ 4075 _argvec[6] = (unsigned long)(arg6); \ 4076 _argvec[7] = (unsigned long)(arg7); \ 4077 _argvec[8] = (unsigned long)(arg8); \ 4078 __asm__ volatile( \ 4079 VALGRIND_ALIGN_STACK \ 4080 "ldr r0, [%1, #20] \n\t" \ 4081 "ldr r1, [%1, #24] \n\t" \ 4082 "ldr r2, [%1, #28] \n\t" \ 4083 "ldr r3, [%1, #32] \n\t" \ 4084 "push {r0, r1, r2, r3} \n\t" \ 4085 "ldr r0, [%1, #4] \n\t" \ 4086 "ldr r1, [%1, #8] \n\t" \ 4087 "ldr r2, [%1, #12] \n\t" \ 4088 "ldr r3, [%1, #16] \n\t" \ 4089 "ldr r4, [%1] \n\t" /* target->r4 */ \ 4090 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 4091 VALGRIND_RESTORE_STACK \ 4092 "mov %0, r0" \ 4093 : /*out*/ "=r" (_res) \ 4094 : /*in*/ "0" (&_argvec[0]) \ 4095 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 4096 ); \ 4097 lval = (__typeof__(lval)) _res; \ 4098 } while (0) 4099 4100 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4101 arg7,arg8,arg9) \ 4102 do { \ 4103 volatile OrigFn _orig = (orig); \ 4104 volatile unsigned long _argvec[10]; \ 4105 volatile unsigned long _res; \ 4106 _argvec[0] = (unsigned long)_orig.nraddr; \ 4107 _argvec[1] = (unsigned long)(arg1); \ 4108 _argvec[2] = (unsigned long)(arg2); \ 4109 _argvec[3] = (unsigned long)(arg3); \ 4110 _argvec[4] = (unsigned long)(arg4); \ 4111 _argvec[5] = (unsigned long)(arg5); \ 4112 _argvec[6] = (unsigned long)(arg6); \ 4113 _argvec[7] = (unsigned long)(arg7); \ 4114 _argvec[8] = (unsigned long)(arg8); \ 4115 _argvec[9] = (unsigned long)(arg9); \ 4116 __asm__ volatile( \ 4117 VALGRIND_ALIGN_STACK \ 4118 "sub sp, sp, #4 \n\t" \ 4119 "ldr r0, [%1, #20] \n\t" \ 4120 "ldr r1, [%1, #24] \n\t" \ 4121 "ldr r2, [%1, #28] \n\t" \ 4122 "ldr r3, [%1, #32] \n\t" \ 4123 "ldr r4, [%1, #36] \n\t" \ 4124 "push {r0, r1, r2, r3, r4} \n\t" \ 4125 "ldr r0, [%1, #4] \n\t" \ 4126 "ldr r1, [%1, #8] \n\t" \ 4127 "ldr r2, [%1, #12] \n\t" \ 4128 "ldr r3, [%1, #16] \n\t" \ 4129 "ldr r4, [%1] \n\t" /* target->r4 */ \ 4130 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 4131 VALGRIND_RESTORE_STACK \ 4132 "mov %0, r0" \ 4133 : /*out*/ "=r" (_res) \ 4134 : /*in*/ "0" (&_argvec[0]) \ 4135 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 4136 ); \ 4137 lval = (__typeof__(lval)) _res; \ 4138 } while (0) 4139 4140 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4141 arg7,arg8,arg9,arg10) \ 4142 do { \ 4143 volatile OrigFn _orig = (orig); \ 4144 volatile unsigned long _argvec[11]; \ 4145 volatile unsigned long _res; \ 4146 _argvec[0] = (unsigned long)_orig.nraddr; \ 4147 _argvec[1] = (unsigned long)(arg1); \ 4148 _argvec[2] = (unsigned long)(arg2); \ 4149 _argvec[3] = (unsigned long)(arg3); \ 4150 _argvec[4] = (unsigned long)(arg4); \ 4151 _argvec[5] = (unsigned long)(arg5); \ 4152 _argvec[6] = (unsigned long)(arg6); \ 4153 _argvec[7] = (unsigned long)(arg7); \ 4154 _argvec[8] = (unsigned long)(arg8); \ 4155 _argvec[9] = (unsigned long)(arg9); \ 4156 _argvec[10] = (unsigned long)(arg10); \ 4157 __asm__ volatile( \ 4158 VALGRIND_ALIGN_STACK \ 4159 "ldr r0, [%1, #40] \n\t" \ 4160 "push {r0} \n\t" \ 4161 "ldr r0, [%1, #20] \n\t" \ 4162 "ldr r1, [%1, #24] \n\t" \ 4163 "ldr r2, [%1, #28] \n\t" \ 4164 "ldr r3, [%1, #32] \n\t" \ 4165 "ldr r4, [%1, #36] \n\t" \ 4166 "push {r0, r1, r2, r3, r4} \n\t" \ 4167 "ldr r0, [%1, #4] \n\t" \ 4168 "ldr r1, [%1, #8] \n\t" \ 4169 "ldr r2, [%1, #12] \n\t" \ 4170 "ldr r3, [%1, #16] \n\t" \ 4171 "ldr r4, [%1] \n\t" /* target->r4 */ \ 4172 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 4173 VALGRIND_RESTORE_STACK \ 4174 "mov %0, r0" \ 4175 : /*out*/ "=r" (_res) \ 4176 : /*in*/ "0" (&_argvec[0]) \ 4177 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 4178 ); \ 4179 lval = (__typeof__(lval)) _res; \ 4180 } while (0) 4181 4182 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 4183 arg6,arg7,arg8,arg9,arg10, \ 4184 arg11) \ 4185 do { \ 4186 volatile OrigFn _orig = (orig); \ 4187 volatile unsigned long _argvec[12]; \ 4188 volatile unsigned long _res; \ 4189 _argvec[0] = (unsigned long)_orig.nraddr; \ 4190 _argvec[1] = (unsigned long)(arg1); \ 4191 _argvec[2] = (unsigned long)(arg2); \ 4192 _argvec[3] = (unsigned long)(arg3); \ 4193 _argvec[4] = (unsigned long)(arg4); \ 4194 _argvec[5] = (unsigned long)(arg5); \ 4195 _argvec[6] = (unsigned long)(arg6); \ 4196 _argvec[7] = (unsigned long)(arg7); \ 4197 _argvec[8] = (unsigned long)(arg8); \ 4198 _argvec[9] = (unsigned long)(arg9); \ 4199 _argvec[10] = (unsigned long)(arg10); \ 4200 _argvec[11] = (unsigned long)(arg11); \ 4201 __asm__ volatile( \ 4202 VALGRIND_ALIGN_STACK \ 4203 "sub sp, sp, #4 \n\t" \ 4204 "ldr r0, [%1, #40] \n\t" \ 4205 "ldr r1, [%1, #44] \n\t" \ 4206 "push {r0, r1} \n\t" \ 4207 "ldr r0, [%1, #20] \n\t" \ 4208 "ldr r1, [%1, #24] \n\t" \ 4209 "ldr r2, [%1, #28] \n\t" \ 4210 "ldr r3, [%1, #32] \n\t" \ 4211 "ldr r4, [%1, #36] \n\t" \ 4212 "push {r0, r1, r2, r3, r4} \n\t" \ 4213 "ldr r0, [%1, #4] \n\t" \ 4214 "ldr r1, [%1, #8] \n\t" \ 4215 "ldr r2, [%1, #12] \n\t" \ 4216 "ldr r3, [%1, #16] \n\t" \ 4217 "ldr r4, [%1] \n\t" /* target->r4 */ \ 4218 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 4219 VALGRIND_RESTORE_STACK \ 4220 "mov %0, r0" \ 4221 : /*out*/ "=r" (_res) \ 4222 : /*in*/ "0" (&_argvec[0]) \ 4223 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 4224 ); \ 4225 lval = (__typeof__(lval)) _res; \ 4226 } while (0) 4227 4228 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 4229 arg6,arg7,arg8,arg9,arg10, \ 4230 arg11,arg12) \ 4231 do { \ 4232 volatile OrigFn _orig = (orig); \ 4233 volatile unsigned long _argvec[13]; \ 4234 volatile unsigned long _res; \ 4235 _argvec[0] = (unsigned long)_orig.nraddr; \ 4236 _argvec[1] = (unsigned long)(arg1); \ 4237 _argvec[2] = (unsigned long)(arg2); \ 4238 _argvec[3] = (unsigned long)(arg3); \ 4239 _argvec[4] = (unsigned long)(arg4); \ 4240 _argvec[5] = (unsigned long)(arg5); \ 4241 _argvec[6] = (unsigned long)(arg6); \ 4242 _argvec[7] = (unsigned long)(arg7); \ 4243 _argvec[8] = (unsigned long)(arg8); \ 4244 _argvec[9] = (unsigned long)(arg9); \ 4245 _argvec[10] = (unsigned long)(arg10); \ 4246 _argvec[11] = (unsigned long)(arg11); \ 4247 _argvec[12] = (unsigned long)(arg12); \ 4248 __asm__ volatile( \ 4249 VALGRIND_ALIGN_STACK \ 4250 "ldr r0, [%1, #40] \n\t" \ 4251 "ldr r1, [%1, #44] \n\t" \ 4252 "ldr r2, [%1, #48] \n\t" \ 4253 "push {r0, r1, r2} \n\t" \ 4254 "ldr r0, [%1, #20] \n\t" \ 4255 "ldr r1, [%1, #24] \n\t" \ 4256 "ldr r2, [%1, #28] \n\t" \ 4257 "ldr r3, [%1, #32] \n\t" \ 4258 "ldr r4, [%1, #36] \n\t" \ 4259 "push {r0, r1, r2, r3, r4} \n\t" \ 4260 "ldr r0, [%1, #4] \n\t" \ 4261 "ldr r1, [%1, #8] \n\t" \ 4262 "ldr r2, [%1, #12] \n\t" \ 4263 "ldr r3, [%1, #16] \n\t" \ 4264 "ldr r4, [%1] \n\t" /* target->r4 */ \ 4265 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 4266 VALGRIND_RESTORE_STACK \ 4267 "mov %0, r0" \ 4268 : /*out*/ "=r" (_res) \ 4269 : /*in*/ "0" (&_argvec[0]) \ 4270 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 4271 ); \ 4272 lval = (__typeof__(lval)) _res; \ 4273 } while (0) 4274 4275 #endif /* PLAT_arm_linux */ 4276 4277 /* ------------------------ arm64-linux ------------------------ */ 4278 4279 #if defined(PLAT_arm64_linux) 4280 4281 /* These regs are trashed by the hidden call. */ 4282 #define __CALLER_SAVED_REGS \ 4283 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \ 4284 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \ 4285 "x18", "x19", "x20", "x30", \ 4286 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \ 4287 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \ 4288 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \ 4289 "v26", "v27", "v28", "v29", "v30", "v31" 4290 4291 /* x21 is callee-saved, so we can use it to save and restore SP around 4292 the hidden call. */ 4293 #define VALGRIND_ALIGN_STACK \ 4294 "mov x21, sp\n\t" \ 4295 "bic sp, x21, #15\n\t" 4296 #define VALGRIND_RESTORE_STACK \ 4297 "mov sp, x21\n\t" 4298 4299 /* These CALL_FN_ macros assume that on arm64-linux, 4300 sizeof(unsigned long) == 8. */ 4301 4302 #define CALL_FN_W_v(lval, orig) \ 4303 do { \ 4304 volatile OrigFn _orig = (orig); \ 4305 volatile unsigned long _argvec[1]; \ 4306 volatile unsigned long _res; \ 4307 _argvec[0] = (unsigned long)_orig.nraddr; \ 4308 __asm__ volatile( \ 4309 VALGRIND_ALIGN_STACK \ 4310 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4311 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4312 VALGRIND_RESTORE_STACK \ 4313 "mov %0, x0\n" \ 4314 : /*out*/ "=r" (_res) \ 4315 : /*in*/ "0" (&_argvec[0]) \ 4316 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4317 ); \ 4318 lval = (__typeof__(lval)) _res; \ 4319 } while (0) 4320 4321 #define CALL_FN_W_W(lval, orig, arg1) \ 4322 do { \ 4323 volatile OrigFn _orig = (orig); \ 4324 volatile unsigned long _argvec[2]; \ 4325 volatile unsigned long _res; \ 4326 _argvec[0] = (unsigned long)_orig.nraddr; \ 4327 _argvec[1] = (unsigned long)(arg1); \ 4328 __asm__ volatile( \ 4329 VALGRIND_ALIGN_STACK \ 4330 "ldr x0, [%1, #8] \n\t" \ 4331 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4332 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4333 VALGRIND_RESTORE_STACK \ 4334 "mov %0, x0\n" \ 4335 : /*out*/ "=r" (_res) \ 4336 : /*in*/ "0" (&_argvec[0]) \ 4337 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4338 ); \ 4339 lval = (__typeof__(lval)) _res; \ 4340 } while (0) 4341 4342 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 4343 do { \ 4344 volatile OrigFn _orig = (orig); \ 4345 volatile unsigned long _argvec[3]; \ 4346 volatile unsigned long _res; \ 4347 _argvec[0] = (unsigned long)_orig.nraddr; \ 4348 _argvec[1] = (unsigned long)(arg1); \ 4349 _argvec[2] = (unsigned long)(arg2); \ 4350 __asm__ volatile( \ 4351 VALGRIND_ALIGN_STACK \ 4352 "ldr x0, [%1, #8] \n\t" \ 4353 "ldr x1, [%1, #16] \n\t" \ 4354 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4355 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4356 VALGRIND_RESTORE_STACK \ 4357 "mov %0, x0\n" \ 4358 : /*out*/ "=r" (_res) \ 4359 : /*in*/ "0" (&_argvec[0]) \ 4360 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4361 ); \ 4362 lval = (__typeof__(lval)) _res; \ 4363 } while (0) 4364 4365 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 4366 do { \ 4367 volatile OrigFn _orig = (orig); \ 4368 volatile unsigned long _argvec[4]; \ 4369 volatile unsigned long _res; \ 4370 _argvec[0] = (unsigned long)_orig.nraddr; \ 4371 _argvec[1] = (unsigned long)(arg1); \ 4372 _argvec[2] = (unsigned long)(arg2); \ 4373 _argvec[3] = (unsigned long)(arg3); \ 4374 __asm__ volatile( \ 4375 VALGRIND_ALIGN_STACK \ 4376 "ldr x0, [%1, #8] \n\t" \ 4377 "ldr x1, [%1, #16] \n\t" \ 4378 "ldr x2, [%1, #24] \n\t" \ 4379 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4380 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4381 VALGRIND_RESTORE_STACK \ 4382 "mov %0, x0\n" \ 4383 : /*out*/ "=r" (_res) \ 4384 : /*in*/ "0" (&_argvec[0]) \ 4385 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4386 ); \ 4387 lval = (__typeof__(lval)) _res; \ 4388 } while (0) 4389 4390 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 4391 do { \ 4392 volatile OrigFn _orig = (orig); \ 4393 volatile unsigned long _argvec[5]; \ 4394 volatile unsigned long _res; \ 4395 _argvec[0] = (unsigned long)_orig.nraddr; \ 4396 _argvec[1] = (unsigned long)(arg1); \ 4397 _argvec[2] = (unsigned long)(arg2); \ 4398 _argvec[3] = (unsigned long)(arg3); \ 4399 _argvec[4] = (unsigned long)(arg4); \ 4400 __asm__ volatile( \ 4401 VALGRIND_ALIGN_STACK \ 4402 "ldr x0, [%1, #8] \n\t" \ 4403 "ldr x1, [%1, #16] \n\t" \ 4404 "ldr x2, [%1, #24] \n\t" \ 4405 "ldr x3, [%1, #32] \n\t" \ 4406 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4407 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4408 VALGRIND_RESTORE_STACK \ 4409 "mov %0, x0" \ 4410 : /*out*/ "=r" (_res) \ 4411 : /*in*/ "0" (&_argvec[0]) \ 4412 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4413 ); \ 4414 lval = (__typeof__(lval)) _res; \ 4415 } while (0) 4416 4417 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 4418 do { \ 4419 volatile OrigFn _orig = (orig); \ 4420 volatile unsigned long _argvec[6]; \ 4421 volatile unsigned long _res; \ 4422 _argvec[0] = (unsigned long)_orig.nraddr; \ 4423 _argvec[1] = (unsigned long)(arg1); \ 4424 _argvec[2] = (unsigned long)(arg2); \ 4425 _argvec[3] = (unsigned long)(arg3); \ 4426 _argvec[4] = (unsigned long)(arg4); \ 4427 _argvec[5] = (unsigned long)(arg5); \ 4428 __asm__ volatile( \ 4429 VALGRIND_ALIGN_STACK \ 4430 "ldr x0, [%1, #8] \n\t" \ 4431 "ldr x1, [%1, #16] \n\t" \ 4432 "ldr x2, [%1, #24] \n\t" \ 4433 "ldr x3, [%1, #32] \n\t" \ 4434 "ldr x4, [%1, #40] \n\t" \ 4435 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4436 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4437 VALGRIND_RESTORE_STACK \ 4438 "mov %0, x0" \ 4439 : /*out*/ "=r" (_res) \ 4440 : /*in*/ "0" (&_argvec[0]) \ 4441 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4442 ); \ 4443 lval = (__typeof__(lval)) _res; \ 4444 } while (0) 4445 4446 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 4447 do { \ 4448 volatile OrigFn _orig = (orig); \ 4449 volatile unsigned long _argvec[7]; \ 4450 volatile unsigned long _res; \ 4451 _argvec[0] = (unsigned long)_orig.nraddr; \ 4452 _argvec[1] = (unsigned long)(arg1); \ 4453 _argvec[2] = (unsigned long)(arg2); \ 4454 _argvec[3] = (unsigned long)(arg3); \ 4455 _argvec[4] = (unsigned long)(arg4); \ 4456 _argvec[5] = (unsigned long)(arg5); \ 4457 _argvec[6] = (unsigned long)(arg6); \ 4458 __asm__ volatile( \ 4459 VALGRIND_ALIGN_STACK \ 4460 "ldr x0, [%1, #8] \n\t" \ 4461 "ldr x1, [%1, #16] \n\t" \ 4462 "ldr x2, [%1, #24] \n\t" \ 4463 "ldr x3, [%1, #32] \n\t" \ 4464 "ldr x4, [%1, #40] \n\t" \ 4465 "ldr x5, [%1, #48] \n\t" \ 4466 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4467 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4468 VALGRIND_RESTORE_STACK \ 4469 "mov %0, x0" \ 4470 : /*out*/ "=r" (_res) \ 4471 : /*in*/ "0" (&_argvec[0]) \ 4472 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4473 ); \ 4474 lval = (__typeof__(lval)) _res; \ 4475 } while (0) 4476 4477 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4478 arg7) \ 4479 do { \ 4480 volatile OrigFn _orig = (orig); \ 4481 volatile unsigned long _argvec[8]; \ 4482 volatile unsigned long _res; \ 4483 _argvec[0] = (unsigned long)_orig.nraddr; \ 4484 _argvec[1] = (unsigned long)(arg1); \ 4485 _argvec[2] = (unsigned long)(arg2); \ 4486 _argvec[3] = (unsigned long)(arg3); \ 4487 _argvec[4] = (unsigned long)(arg4); \ 4488 _argvec[5] = (unsigned long)(arg5); \ 4489 _argvec[6] = (unsigned long)(arg6); \ 4490 _argvec[7] = (unsigned long)(arg7); \ 4491 __asm__ volatile( \ 4492 VALGRIND_ALIGN_STACK \ 4493 "ldr x0, [%1, #8] \n\t" \ 4494 "ldr x1, [%1, #16] \n\t" \ 4495 "ldr x2, [%1, #24] \n\t" \ 4496 "ldr x3, [%1, #32] \n\t" \ 4497 "ldr x4, [%1, #40] \n\t" \ 4498 "ldr x5, [%1, #48] \n\t" \ 4499 "ldr x6, [%1, #56] \n\t" \ 4500 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4501 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4502 VALGRIND_RESTORE_STACK \ 4503 "mov %0, x0" \ 4504 : /*out*/ "=r" (_res) \ 4505 : /*in*/ "0" (&_argvec[0]) \ 4506 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4507 ); \ 4508 lval = (__typeof__(lval)) _res; \ 4509 } while (0) 4510 4511 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4512 arg7,arg8) \ 4513 do { \ 4514 volatile OrigFn _orig = (orig); \ 4515 volatile unsigned long _argvec[9]; \ 4516 volatile unsigned long _res; \ 4517 _argvec[0] = (unsigned long)_orig.nraddr; \ 4518 _argvec[1] = (unsigned long)(arg1); \ 4519 _argvec[2] = (unsigned long)(arg2); \ 4520 _argvec[3] = (unsigned long)(arg3); \ 4521 _argvec[4] = (unsigned long)(arg4); \ 4522 _argvec[5] = (unsigned long)(arg5); \ 4523 _argvec[6] = (unsigned long)(arg6); \ 4524 _argvec[7] = (unsigned long)(arg7); \ 4525 _argvec[8] = (unsigned long)(arg8); \ 4526 __asm__ volatile( \ 4527 VALGRIND_ALIGN_STACK \ 4528 "ldr x0, [%1, #8] \n\t" \ 4529 "ldr x1, [%1, #16] \n\t" \ 4530 "ldr x2, [%1, #24] \n\t" \ 4531 "ldr x3, [%1, #32] \n\t" \ 4532 "ldr x4, [%1, #40] \n\t" \ 4533 "ldr x5, [%1, #48] \n\t" \ 4534 "ldr x6, [%1, #56] \n\t" \ 4535 "ldr x7, [%1, #64] \n\t" \ 4536 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4537 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4538 VALGRIND_RESTORE_STACK \ 4539 "mov %0, x0" \ 4540 : /*out*/ "=r" (_res) \ 4541 : /*in*/ "0" (&_argvec[0]) \ 4542 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4543 ); \ 4544 lval = (__typeof__(lval)) _res; \ 4545 } while (0) 4546 4547 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4548 arg7,arg8,arg9) \ 4549 do { \ 4550 volatile OrigFn _orig = (orig); \ 4551 volatile unsigned long _argvec[10]; \ 4552 volatile unsigned long _res; \ 4553 _argvec[0] = (unsigned long)_orig.nraddr; \ 4554 _argvec[1] = (unsigned long)(arg1); \ 4555 _argvec[2] = (unsigned long)(arg2); \ 4556 _argvec[3] = (unsigned long)(arg3); \ 4557 _argvec[4] = (unsigned long)(arg4); \ 4558 _argvec[5] = (unsigned long)(arg5); \ 4559 _argvec[6] = (unsigned long)(arg6); \ 4560 _argvec[7] = (unsigned long)(arg7); \ 4561 _argvec[8] = (unsigned long)(arg8); \ 4562 _argvec[9] = (unsigned long)(arg9); \ 4563 __asm__ volatile( \ 4564 VALGRIND_ALIGN_STACK \ 4565 "sub sp, sp, #0x20 \n\t" \ 4566 "ldr x0, [%1, #8] \n\t" \ 4567 "ldr x1, [%1, #16] \n\t" \ 4568 "ldr x2, [%1, #24] \n\t" \ 4569 "ldr x3, [%1, #32] \n\t" \ 4570 "ldr x4, [%1, #40] \n\t" \ 4571 "ldr x5, [%1, #48] \n\t" \ 4572 "ldr x6, [%1, #56] \n\t" \ 4573 "ldr x7, [%1, #64] \n\t" \ 4574 "ldr x8, [%1, #72] \n\t" \ 4575 "str x8, [sp, #0] \n\t" \ 4576 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4577 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4578 VALGRIND_RESTORE_STACK \ 4579 "mov %0, x0" \ 4580 : /*out*/ "=r" (_res) \ 4581 : /*in*/ "0" (&_argvec[0]) \ 4582 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4583 ); \ 4584 lval = (__typeof__(lval)) _res; \ 4585 } while (0) 4586 4587 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4588 arg7,arg8,arg9,arg10) \ 4589 do { \ 4590 volatile OrigFn _orig = (orig); \ 4591 volatile unsigned long _argvec[11]; \ 4592 volatile unsigned long _res; \ 4593 _argvec[0] = (unsigned long)_orig.nraddr; \ 4594 _argvec[1] = (unsigned long)(arg1); \ 4595 _argvec[2] = (unsigned long)(arg2); \ 4596 _argvec[3] = (unsigned long)(arg3); \ 4597 _argvec[4] = (unsigned long)(arg4); \ 4598 _argvec[5] = (unsigned long)(arg5); \ 4599 _argvec[6] = (unsigned long)(arg6); \ 4600 _argvec[7] = (unsigned long)(arg7); \ 4601 _argvec[8] = (unsigned long)(arg8); \ 4602 _argvec[9] = (unsigned long)(arg9); \ 4603 _argvec[10] = (unsigned long)(arg10); \ 4604 __asm__ volatile( \ 4605 VALGRIND_ALIGN_STACK \ 4606 "sub sp, sp, #0x20 \n\t" \ 4607 "ldr x0, [%1, #8] \n\t" \ 4608 "ldr x1, [%1, #16] \n\t" \ 4609 "ldr x2, [%1, #24] \n\t" \ 4610 "ldr x3, [%1, #32] \n\t" \ 4611 "ldr x4, [%1, #40] \n\t" \ 4612 "ldr x5, [%1, #48] \n\t" \ 4613 "ldr x6, [%1, #56] \n\t" \ 4614 "ldr x7, [%1, #64] \n\t" \ 4615 "ldr x8, [%1, #72] \n\t" \ 4616 "str x8, [sp, #0] \n\t" \ 4617 "ldr x8, [%1, #80] \n\t" \ 4618 "str x8, [sp, #8] \n\t" \ 4619 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4620 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4621 VALGRIND_RESTORE_STACK \ 4622 "mov %0, x0" \ 4623 : /*out*/ "=r" (_res) \ 4624 : /*in*/ "0" (&_argvec[0]) \ 4625 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4626 ); \ 4627 lval = (__typeof__(lval)) _res; \ 4628 } while (0) 4629 4630 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4631 arg7,arg8,arg9,arg10,arg11) \ 4632 do { \ 4633 volatile OrigFn _orig = (orig); \ 4634 volatile unsigned long _argvec[12]; \ 4635 volatile unsigned long _res; \ 4636 _argvec[0] = (unsigned long)_orig.nraddr; \ 4637 _argvec[1] = (unsigned long)(arg1); \ 4638 _argvec[2] = (unsigned long)(arg2); \ 4639 _argvec[3] = (unsigned long)(arg3); \ 4640 _argvec[4] = (unsigned long)(arg4); \ 4641 _argvec[5] = (unsigned long)(arg5); \ 4642 _argvec[6] = (unsigned long)(arg6); \ 4643 _argvec[7] = (unsigned long)(arg7); \ 4644 _argvec[8] = (unsigned long)(arg8); \ 4645 _argvec[9] = (unsigned long)(arg9); \ 4646 _argvec[10] = (unsigned long)(arg10); \ 4647 _argvec[11] = (unsigned long)(arg11); \ 4648 __asm__ volatile( \ 4649 VALGRIND_ALIGN_STACK \ 4650 "sub sp, sp, #0x30 \n\t" \ 4651 "ldr x0, [%1, #8] \n\t" \ 4652 "ldr x1, [%1, #16] \n\t" \ 4653 "ldr x2, [%1, #24] \n\t" \ 4654 "ldr x3, [%1, #32] \n\t" \ 4655 "ldr x4, [%1, #40] \n\t" \ 4656 "ldr x5, [%1, #48] \n\t" \ 4657 "ldr x6, [%1, #56] \n\t" \ 4658 "ldr x7, [%1, #64] \n\t" \ 4659 "ldr x8, [%1, #72] \n\t" \ 4660 "str x8, [sp, #0] \n\t" \ 4661 "ldr x8, [%1, #80] \n\t" \ 4662 "str x8, [sp, #8] \n\t" \ 4663 "ldr x8, [%1, #88] \n\t" \ 4664 "str x8, [sp, #16] \n\t" \ 4665 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4666 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4667 VALGRIND_RESTORE_STACK \ 4668 "mov %0, x0" \ 4669 : /*out*/ "=r" (_res) \ 4670 : /*in*/ "0" (&_argvec[0]) \ 4671 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4672 ); \ 4673 lval = (__typeof__(lval)) _res; \ 4674 } while (0) 4675 4676 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4677 arg7,arg8,arg9,arg10,arg11, \ 4678 arg12) \ 4679 do { \ 4680 volatile OrigFn _orig = (orig); \ 4681 volatile unsigned long _argvec[13]; \ 4682 volatile unsigned long _res; \ 4683 _argvec[0] = (unsigned long)_orig.nraddr; \ 4684 _argvec[1] = (unsigned long)(arg1); \ 4685 _argvec[2] = (unsigned long)(arg2); \ 4686 _argvec[3] = (unsigned long)(arg3); \ 4687 _argvec[4] = (unsigned long)(arg4); \ 4688 _argvec[5] = (unsigned long)(arg5); \ 4689 _argvec[6] = (unsigned long)(arg6); \ 4690 _argvec[7] = (unsigned long)(arg7); \ 4691 _argvec[8] = (unsigned long)(arg8); \ 4692 _argvec[9] = (unsigned long)(arg9); \ 4693 _argvec[10] = (unsigned long)(arg10); \ 4694 _argvec[11] = (unsigned long)(arg11); \ 4695 _argvec[12] = (unsigned long)(arg12); \ 4696 __asm__ volatile( \ 4697 VALGRIND_ALIGN_STACK \ 4698 "sub sp, sp, #0x30 \n\t" \ 4699 "ldr x0, [%1, #8] \n\t" \ 4700 "ldr x1, [%1, #16] \n\t" \ 4701 "ldr x2, [%1, #24] \n\t" \ 4702 "ldr x3, [%1, #32] \n\t" \ 4703 "ldr x4, [%1, #40] \n\t" \ 4704 "ldr x5, [%1, #48] \n\t" \ 4705 "ldr x6, [%1, #56] \n\t" \ 4706 "ldr x7, [%1, #64] \n\t" \ 4707 "ldr x8, [%1, #72] \n\t" \ 4708 "str x8, [sp, #0] \n\t" \ 4709 "ldr x8, [%1, #80] \n\t" \ 4710 "str x8, [sp, #8] \n\t" \ 4711 "ldr x8, [%1, #88] \n\t" \ 4712 "str x8, [sp, #16] \n\t" \ 4713 "ldr x8, [%1, #96] \n\t" \ 4714 "str x8, [sp, #24] \n\t" \ 4715 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4716 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4717 VALGRIND_RESTORE_STACK \ 4718 "mov %0, x0" \ 4719 : /*out*/ "=r" (_res) \ 4720 : /*in*/ "0" (&_argvec[0]) \ 4721 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4722 ); \ 4723 lval = (__typeof__(lval)) _res; \ 4724 } while (0) 4725 4726 #endif /* PLAT_arm64_linux */ 4727 4728 /* ------------------------- s390x-linux ------------------------- */ 4729 4730 #if defined(PLAT_s390x_linux) 4731 4732 /* Similar workaround as amd64 (see above), but we use r11 as frame 4733 pointer and save the old r11 in r7. r11 might be used for 4734 argvec, therefore we copy argvec in r1 since r1 is clobbered 4735 after the call anyway. */ 4736 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM) 4737 # define __FRAME_POINTER \ 4738 ,"d"(__builtin_dwarf_cfa()) 4739 # define VALGRIND_CFI_PROLOGUE \ 4740 ".cfi_remember_state\n\t" \ 4741 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \ 4742 "lgr 7,11\n\t" \ 4743 "lgr 11,%2\n\t" \ 4744 ".cfi_def_cfa r11, 0\n\t" 4745 # define VALGRIND_CFI_EPILOGUE \ 4746 "lgr 11, 7\n\t" \ 4747 ".cfi_restore_state\n\t" 4748 #else 4749 # define __FRAME_POINTER 4750 # define VALGRIND_CFI_PROLOGUE \ 4751 "lgr 1,%1\n\t" 4752 # define VALGRIND_CFI_EPILOGUE 4753 #endif 4754 4755 /* Nb: On s390 the stack pointer is properly aligned *at all times* 4756 according to the s390 GCC maintainer. (The ABI specification is not 4757 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and 4758 VALGRIND_RESTORE_STACK are not defined here. */ 4759 4760 /* These regs are trashed by the hidden call. Note that we overwrite 4761 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the 4762 function a proper return address. All others are ABI defined call 4763 clobbers. */ 4764 #if defined(__VX__) || defined(__S390_VX__) 4765 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \ 4766 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \ 4767 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \ 4768 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \ 4769 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31" 4770 #else 4771 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \ 4772 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7" 4773 #endif 4774 4775 /* Nb: Although r11 is modified in the asm snippets below (inside 4776 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for 4777 two reasons: 4778 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not 4779 modified 4780 (2) GCC will complain that r11 cannot appear inside a clobber section, 4781 when compiled with -O -fno-omit-frame-pointer 4782 */ 4783 4784 #define CALL_FN_W_v(lval, orig) \ 4785 do { \ 4786 volatile OrigFn _orig = (orig); \ 4787 volatile unsigned long _argvec[1]; \ 4788 volatile unsigned long _res; \ 4789 _argvec[0] = (unsigned long)_orig.nraddr; \ 4790 __asm__ volatile( \ 4791 VALGRIND_CFI_PROLOGUE \ 4792 "aghi 15,-160\n\t" \ 4793 "lg 1, 0(1)\n\t" /* target->r1 */ \ 4794 VALGRIND_CALL_NOREDIR_R1 \ 4795 "aghi 15,160\n\t" \ 4796 VALGRIND_CFI_EPILOGUE \ 4797 "lgr %0, 2\n\t" \ 4798 : /*out*/ "=d" (_res) \ 4799 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \ 4800 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4801 ); \ 4802 lval = (__typeof__(lval)) _res; \ 4803 } while (0) 4804 4805 /* The call abi has the arguments in r2-r6 and stack */ 4806 #define CALL_FN_W_W(lval, orig, arg1) \ 4807 do { \ 4808 volatile OrigFn _orig = (orig); \ 4809 volatile unsigned long _argvec[2]; \ 4810 volatile unsigned long _res; \ 4811 _argvec[0] = (unsigned long)_orig.nraddr; \ 4812 _argvec[1] = (unsigned long)arg1; \ 4813 __asm__ volatile( \ 4814 VALGRIND_CFI_PROLOGUE \ 4815 "aghi 15,-160\n\t" \ 4816 "lg 2, 8(1)\n\t" \ 4817 "lg 1, 0(1)\n\t" \ 4818 VALGRIND_CALL_NOREDIR_R1 \ 4819 "aghi 15,160\n\t" \ 4820 VALGRIND_CFI_EPILOGUE \ 4821 "lgr %0, 2\n\t" \ 4822 : /*out*/ "=d" (_res) \ 4823 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4824 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4825 ); \ 4826 lval = (__typeof__(lval)) _res; \ 4827 } while (0) 4828 4829 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \ 4830 do { \ 4831 volatile OrigFn _orig = (orig); \ 4832 volatile unsigned long _argvec[3]; \ 4833 volatile unsigned long _res; \ 4834 _argvec[0] = (unsigned long)_orig.nraddr; \ 4835 _argvec[1] = (unsigned long)arg1; \ 4836 _argvec[2] = (unsigned long)arg2; \ 4837 __asm__ volatile( \ 4838 VALGRIND_CFI_PROLOGUE \ 4839 "aghi 15,-160\n\t" \ 4840 "lg 2, 8(1)\n\t" \ 4841 "lg 3,16(1)\n\t" \ 4842 "lg 1, 0(1)\n\t" \ 4843 VALGRIND_CALL_NOREDIR_R1 \ 4844 "aghi 15,160\n\t" \ 4845 VALGRIND_CFI_EPILOGUE \ 4846 "lgr %0, 2\n\t" \ 4847 : /*out*/ "=d" (_res) \ 4848 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4849 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4850 ); \ 4851 lval = (__typeof__(lval)) _res; \ 4852 } while (0) 4853 4854 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \ 4855 do { \ 4856 volatile OrigFn _orig = (orig); \ 4857 volatile unsigned long _argvec[4]; \ 4858 volatile unsigned long _res; \ 4859 _argvec[0] = (unsigned long)_orig.nraddr; \ 4860 _argvec[1] = (unsigned long)arg1; \ 4861 _argvec[2] = (unsigned long)arg2; \ 4862 _argvec[3] = (unsigned long)arg3; \ 4863 __asm__ volatile( \ 4864 VALGRIND_CFI_PROLOGUE \ 4865 "aghi 15,-160\n\t" \ 4866 "lg 2, 8(1)\n\t" \ 4867 "lg 3,16(1)\n\t" \ 4868 "lg 4,24(1)\n\t" \ 4869 "lg 1, 0(1)\n\t" \ 4870 VALGRIND_CALL_NOREDIR_R1 \ 4871 "aghi 15,160\n\t" \ 4872 VALGRIND_CFI_EPILOGUE \ 4873 "lgr %0, 2\n\t" \ 4874 : /*out*/ "=d" (_res) \ 4875 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4876 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4877 ); \ 4878 lval = (__typeof__(lval)) _res; \ 4879 } while (0) 4880 4881 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \ 4882 do { \ 4883 volatile OrigFn _orig = (orig); \ 4884 volatile unsigned long _argvec[5]; \ 4885 volatile unsigned long _res; \ 4886 _argvec[0] = (unsigned long)_orig.nraddr; \ 4887 _argvec[1] = (unsigned long)arg1; \ 4888 _argvec[2] = (unsigned long)arg2; \ 4889 _argvec[3] = (unsigned long)arg3; \ 4890 _argvec[4] = (unsigned long)arg4; \ 4891 __asm__ volatile( \ 4892 VALGRIND_CFI_PROLOGUE \ 4893 "aghi 15,-160\n\t" \ 4894 "lg 2, 8(1)\n\t" \ 4895 "lg 3,16(1)\n\t" \ 4896 "lg 4,24(1)\n\t" \ 4897 "lg 5,32(1)\n\t" \ 4898 "lg 1, 0(1)\n\t" \ 4899 VALGRIND_CALL_NOREDIR_R1 \ 4900 "aghi 15,160\n\t" \ 4901 VALGRIND_CFI_EPILOGUE \ 4902 "lgr %0, 2\n\t" \ 4903 : /*out*/ "=d" (_res) \ 4904 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4905 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4906 ); \ 4907 lval = (__typeof__(lval)) _res; \ 4908 } while (0) 4909 4910 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \ 4911 do { \ 4912 volatile OrigFn _orig = (orig); \ 4913 volatile unsigned long _argvec[6]; \ 4914 volatile unsigned long _res; \ 4915 _argvec[0] = (unsigned long)_orig.nraddr; \ 4916 _argvec[1] = (unsigned long)arg1; \ 4917 _argvec[2] = (unsigned long)arg2; \ 4918 _argvec[3] = (unsigned long)arg3; \ 4919 _argvec[4] = (unsigned long)arg4; \ 4920 _argvec[5] = (unsigned long)arg5; \ 4921 __asm__ volatile( \ 4922 VALGRIND_CFI_PROLOGUE \ 4923 "aghi 15,-160\n\t" \ 4924 "lg 2, 8(1)\n\t" \ 4925 "lg 3,16(1)\n\t" \ 4926 "lg 4,24(1)\n\t" \ 4927 "lg 5,32(1)\n\t" \ 4928 "lg 6,40(1)\n\t" \ 4929 "lg 1, 0(1)\n\t" \ 4930 VALGRIND_CALL_NOREDIR_R1 \ 4931 "aghi 15,160\n\t" \ 4932 VALGRIND_CFI_EPILOGUE \ 4933 "lgr %0, 2\n\t" \ 4934 : /*out*/ "=d" (_res) \ 4935 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4936 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4937 ); \ 4938 lval = (__typeof__(lval)) _res; \ 4939 } while (0) 4940 4941 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4942 arg6) \ 4943 do { \ 4944 volatile OrigFn _orig = (orig); \ 4945 volatile unsigned long _argvec[7]; \ 4946 volatile unsigned long _res; \ 4947 _argvec[0] = (unsigned long)_orig.nraddr; \ 4948 _argvec[1] = (unsigned long)arg1; \ 4949 _argvec[2] = (unsigned long)arg2; \ 4950 _argvec[3] = (unsigned long)arg3; \ 4951 _argvec[4] = (unsigned long)arg4; \ 4952 _argvec[5] = (unsigned long)arg5; \ 4953 _argvec[6] = (unsigned long)arg6; \ 4954 __asm__ volatile( \ 4955 VALGRIND_CFI_PROLOGUE \ 4956 "aghi 15,-168\n\t" \ 4957 "lg 2, 8(1)\n\t" \ 4958 "lg 3,16(1)\n\t" \ 4959 "lg 4,24(1)\n\t" \ 4960 "lg 5,32(1)\n\t" \ 4961 "lg 6,40(1)\n\t" \ 4962 "mvc 160(8,15), 48(1)\n\t" \ 4963 "lg 1, 0(1)\n\t" \ 4964 VALGRIND_CALL_NOREDIR_R1 \ 4965 "aghi 15,168\n\t" \ 4966 VALGRIND_CFI_EPILOGUE \ 4967 "lgr %0, 2\n\t" \ 4968 : /*out*/ "=d" (_res) \ 4969 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4970 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4971 ); \ 4972 lval = (__typeof__(lval)) _res; \ 4973 } while (0) 4974 4975 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4976 arg6, arg7) \ 4977 do { \ 4978 volatile OrigFn _orig = (orig); \ 4979 volatile unsigned long _argvec[8]; \ 4980 volatile unsigned long _res; \ 4981 _argvec[0] = (unsigned long)_orig.nraddr; \ 4982 _argvec[1] = (unsigned long)arg1; \ 4983 _argvec[2] = (unsigned long)arg2; \ 4984 _argvec[3] = (unsigned long)arg3; \ 4985 _argvec[4] = (unsigned long)arg4; \ 4986 _argvec[5] = (unsigned long)arg5; \ 4987 _argvec[6] = (unsigned long)arg6; \ 4988 _argvec[7] = (unsigned long)arg7; \ 4989 __asm__ volatile( \ 4990 VALGRIND_CFI_PROLOGUE \ 4991 "aghi 15,-176\n\t" \ 4992 "lg 2, 8(1)\n\t" \ 4993 "lg 3,16(1)\n\t" \ 4994 "lg 4,24(1)\n\t" \ 4995 "lg 5,32(1)\n\t" \ 4996 "lg 6,40(1)\n\t" \ 4997 "mvc 160(8,15), 48(1)\n\t" \ 4998 "mvc 168(8,15), 56(1)\n\t" \ 4999 "lg 1, 0(1)\n\t" \ 5000 VALGRIND_CALL_NOREDIR_R1 \ 5001 "aghi 15,176\n\t" \ 5002 VALGRIND_CFI_EPILOGUE \ 5003 "lgr %0, 2\n\t" \ 5004 : /*out*/ "=d" (_res) \ 5005 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 5006 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 5007 ); \ 5008 lval = (__typeof__(lval)) _res; \ 5009 } while (0) 5010 5011 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 5012 arg6, arg7 ,arg8) \ 5013 do { \ 5014 volatile OrigFn _orig = (orig); \ 5015 volatile unsigned long _argvec[9]; \ 5016 volatile unsigned long _res; \ 5017 _argvec[0] = (unsigned long)_orig.nraddr; \ 5018 _argvec[1] = (unsigned long)arg1; \ 5019 _argvec[2] = (unsigned long)arg2; \ 5020 _argvec[3] = (unsigned long)arg3; \ 5021 _argvec[4] = (unsigned long)arg4; \ 5022 _argvec[5] = (unsigned long)arg5; \ 5023 _argvec[6] = (unsigned long)arg6; \ 5024 _argvec[7] = (unsigned long)arg7; \ 5025 _argvec[8] = (unsigned long)arg8; \ 5026 __asm__ volatile( \ 5027 VALGRIND_CFI_PROLOGUE \ 5028 "aghi 15,-184\n\t" \ 5029 "lg 2, 8(1)\n\t" \ 5030 "lg 3,16(1)\n\t" \ 5031 "lg 4,24(1)\n\t" \ 5032 "lg 5,32(1)\n\t" \ 5033 "lg 6,40(1)\n\t" \ 5034 "mvc 160(8,15), 48(1)\n\t" \ 5035 "mvc 168(8,15), 56(1)\n\t" \ 5036 "mvc 176(8,15), 64(1)\n\t" \ 5037 "lg 1, 0(1)\n\t" \ 5038 VALGRIND_CALL_NOREDIR_R1 \ 5039 "aghi 15,184\n\t" \ 5040 VALGRIND_CFI_EPILOGUE \ 5041 "lgr %0, 2\n\t" \ 5042 : /*out*/ "=d" (_res) \ 5043 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 5044 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 5045 ); \ 5046 lval = (__typeof__(lval)) _res; \ 5047 } while (0) 5048 5049 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 5050 arg6, arg7 ,arg8, arg9) \ 5051 do { \ 5052 volatile OrigFn _orig = (orig); \ 5053 volatile unsigned long _argvec[10]; \ 5054 volatile unsigned long _res; \ 5055 _argvec[0] = (unsigned long)_orig.nraddr; \ 5056 _argvec[1] = (unsigned long)arg1; \ 5057 _argvec[2] = (unsigned long)arg2; \ 5058 _argvec[3] = (unsigned long)arg3; \ 5059 _argvec[4] = (unsigned long)arg4; \ 5060 _argvec[5] = (unsigned long)arg5; \ 5061 _argvec[6] = (unsigned long)arg6; \ 5062 _argvec[7] = (unsigned long)arg7; \ 5063 _argvec[8] = (unsigned long)arg8; \ 5064 _argvec[9] = (unsigned long)arg9; \ 5065 __asm__ volatile( \ 5066 VALGRIND_CFI_PROLOGUE \ 5067 "aghi 15,-192\n\t" \ 5068 "lg 2, 8(1)\n\t" \ 5069 "lg 3,16(1)\n\t" \ 5070 "lg 4,24(1)\n\t" \ 5071 "lg 5,32(1)\n\t" \ 5072 "lg 6,40(1)\n\t" \ 5073 "mvc 160(8,15), 48(1)\n\t" \ 5074 "mvc 168(8,15), 56(1)\n\t" \ 5075 "mvc 176(8,15), 64(1)\n\t" \ 5076 "mvc 184(8,15), 72(1)\n\t" \ 5077 "lg 1, 0(1)\n\t" \ 5078 VALGRIND_CALL_NOREDIR_R1 \ 5079 "aghi 15,192\n\t" \ 5080 VALGRIND_CFI_EPILOGUE \ 5081 "lgr %0, 2\n\t" \ 5082 : /*out*/ "=d" (_res) \ 5083 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 5084 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 5085 ); \ 5086 lval = (__typeof__(lval)) _res; \ 5087 } while (0) 5088 5089 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 5090 arg6, arg7 ,arg8, arg9, arg10) \ 5091 do { \ 5092 volatile OrigFn _orig = (orig); \ 5093 volatile unsigned long _argvec[11]; \ 5094 volatile unsigned long _res; \ 5095 _argvec[0] = (unsigned long)_orig.nraddr; \ 5096 _argvec[1] = (unsigned long)arg1; \ 5097 _argvec[2] = (unsigned long)arg2; \ 5098 _argvec[3] = (unsigned long)arg3; \ 5099 _argvec[4] = (unsigned long)arg4; \ 5100 _argvec[5] = (unsigned long)arg5; \ 5101 _argvec[6] = (unsigned long)arg6; \ 5102 _argvec[7] = (unsigned long)arg7; \ 5103 _argvec[8] = (unsigned long)arg8; \ 5104 _argvec[9] = (unsigned long)arg9; \ 5105 _argvec[10] = (unsigned long)arg10; \ 5106 __asm__ volatile( \ 5107 VALGRIND_CFI_PROLOGUE \ 5108 "aghi 15,-200\n\t" \ 5109 "lg 2, 8(1)\n\t" \ 5110 "lg 3,16(1)\n\t" \ 5111 "lg 4,24(1)\n\t" \ 5112 "lg 5,32(1)\n\t" \ 5113 "lg 6,40(1)\n\t" \ 5114 "mvc 160(8,15), 48(1)\n\t" \ 5115 "mvc 168(8,15), 56(1)\n\t" \ 5116 "mvc 176(8,15), 64(1)\n\t" \ 5117 "mvc 184(8,15), 72(1)\n\t" \ 5118 "mvc 192(8,15), 80(1)\n\t" \ 5119 "lg 1, 0(1)\n\t" \ 5120 VALGRIND_CALL_NOREDIR_R1 \ 5121 "aghi 15,200\n\t" \ 5122 VALGRIND_CFI_EPILOGUE \ 5123 "lgr %0, 2\n\t" \ 5124 : /*out*/ "=d" (_res) \ 5125 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 5126 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 5127 ); \ 5128 lval = (__typeof__(lval)) _res; \ 5129 } while (0) 5130 5131 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 5132 arg6, arg7 ,arg8, arg9, arg10, arg11) \ 5133 do { \ 5134 volatile OrigFn _orig = (orig); \ 5135 volatile unsigned long _argvec[12]; \ 5136 volatile unsigned long _res; \ 5137 _argvec[0] = (unsigned long)_orig.nraddr; \ 5138 _argvec[1] = (unsigned long)arg1; \ 5139 _argvec[2] = (unsigned long)arg2; \ 5140 _argvec[3] = (unsigned long)arg3; \ 5141 _argvec[4] = (unsigned long)arg4; \ 5142 _argvec[5] = (unsigned long)arg5; \ 5143 _argvec[6] = (unsigned long)arg6; \ 5144 _argvec[7] = (unsigned long)arg7; \ 5145 _argvec[8] = (unsigned long)arg8; \ 5146 _argvec[9] = (unsigned long)arg9; \ 5147 _argvec[10] = (unsigned long)arg10; \ 5148 _argvec[11] = (unsigned long)arg11; \ 5149 __asm__ volatile( \ 5150 VALGRIND_CFI_PROLOGUE \ 5151 "aghi 15,-208\n\t" \ 5152 "lg 2, 8(1)\n\t" \ 5153 "lg 3,16(1)\n\t" \ 5154 "lg 4,24(1)\n\t" \ 5155 "lg 5,32(1)\n\t" \ 5156 "lg 6,40(1)\n\t" \ 5157 "mvc 160(8,15), 48(1)\n\t" \ 5158 "mvc 168(8,15), 56(1)\n\t" \ 5159 "mvc 176(8,15), 64(1)\n\t" \ 5160 "mvc 184(8,15), 72(1)\n\t" \ 5161 "mvc 192(8,15), 80(1)\n\t" \ 5162 "mvc 200(8,15), 88(1)\n\t" \ 5163 "lg 1, 0(1)\n\t" \ 5164 VALGRIND_CALL_NOREDIR_R1 \ 5165 "aghi 15,208\n\t" \ 5166 VALGRIND_CFI_EPILOGUE \ 5167 "lgr %0, 2\n\t" \ 5168 : /*out*/ "=d" (_res) \ 5169 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 5170 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 5171 ); \ 5172 lval = (__typeof__(lval)) _res; \ 5173 } while (0) 5174 5175 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 5176 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\ 5177 do { \ 5178 volatile OrigFn _orig = (orig); \ 5179 volatile unsigned long _argvec[13]; \ 5180 volatile unsigned long _res; \ 5181 _argvec[0] = (unsigned long)_orig.nraddr; \ 5182 _argvec[1] = (unsigned long)arg1; \ 5183 _argvec[2] = (unsigned long)arg2; \ 5184 _argvec[3] = (unsigned long)arg3; \ 5185 _argvec[4] = (unsigned long)arg4; \ 5186 _argvec[5] = (unsigned long)arg5; \ 5187 _argvec[6] = (unsigned long)arg6; \ 5188 _argvec[7] = (unsigned long)arg7; \ 5189 _argvec[8] = (unsigned long)arg8; \ 5190 _argvec[9] = (unsigned long)arg9; \ 5191 _argvec[10] = (unsigned long)arg10; \ 5192 _argvec[11] = (unsigned long)arg11; \ 5193 _argvec[12] = (unsigned long)arg12; \ 5194 __asm__ volatile( \ 5195 VALGRIND_CFI_PROLOGUE \ 5196 "aghi 15,-216\n\t" \ 5197 "lg 2, 8(1)\n\t" \ 5198 "lg 3,16(1)\n\t" \ 5199 "lg 4,24(1)\n\t" \ 5200 "lg 5,32(1)\n\t" \ 5201 "lg 6,40(1)\n\t" \ 5202 "mvc 160(8,15), 48(1)\n\t" \ 5203 "mvc 168(8,15), 56(1)\n\t" \ 5204 "mvc 176(8,15), 64(1)\n\t" \ 5205 "mvc 184(8,15), 72(1)\n\t" \ 5206 "mvc 192(8,15), 80(1)\n\t" \ 5207 "mvc 200(8,15), 88(1)\n\t" \ 5208 "mvc 208(8,15), 96(1)\n\t" \ 5209 "lg 1, 0(1)\n\t" \ 5210 VALGRIND_CALL_NOREDIR_R1 \ 5211 "aghi 15,216\n\t" \ 5212 VALGRIND_CFI_EPILOGUE \ 5213 "lgr %0, 2\n\t" \ 5214 : /*out*/ "=d" (_res) \ 5215 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 5216 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 5217 ); \ 5218 lval = (__typeof__(lval)) _res; \ 5219 } while (0) 5220 5221 5222 #endif /* PLAT_s390x_linux */ 5223 5224 /* ------------------------- mips32-linux ----------------------- */ 5225 5226 #if defined(PLAT_mips32_linux) 5227 5228 /* These regs are trashed by the hidden call. */ 5229 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \ 5230 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \ 5231 "$25", "$31" 5232 5233 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned 5234 long) == 4. */ 5235 5236 #define CALL_FN_W_v(lval, orig) \ 5237 do { \ 5238 volatile OrigFn _orig = (orig); \ 5239 volatile unsigned long _argvec[1]; \ 5240 volatile unsigned long _res; \ 5241 _argvec[0] = (unsigned long)_orig.nraddr; \ 5242 __asm__ volatile( \ 5243 "subu $29, $29, 8 \n\t" \ 5244 "sw $28, 0($29) \n\t" \ 5245 "sw $31, 4($29) \n\t" \ 5246 "subu $29, $29, 16 \n\t" \ 5247 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5248 VALGRIND_CALL_NOREDIR_T9 \ 5249 "addu $29, $29, 16\n\t" \ 5250 "lw $28, 0($29) \n\t" \ 5251 "lw $31, 4($29) \n\t" \ 5252 "addu $29, $29, 8 \n\t" \ 5253 "move %0, $2\n" \ 5254 : /*out*/ "=r" (_res) \ 5255 : /*in*/ "0" (&_argvec[0]) \ 5256 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5257 ); \ 5258 lval = (__typeof__(lval)) _res; \ 5259 } while (0) 5260 5261 #define CALL_FN_W_W(lval, orig, arg1) \ 5262 do { \ 5263 volatile OrigFn _orig = (orig); \ 5264 volatile unsigned long _argvec[2]; \ 5265 volatile unsigned long _res; \ 5266 _argvec[0] = (unsigned long)_orig.nraddr; \ 5267 _argvec[1] = (unsigned long)(arg1); \ 5268 __asm__ volatile( \ 5269 "subu $29, $29, 8 \n\t" \ 5270 "sw $28, 0($29) \n\t" \ 5271 "sw $31, 4($29) \n\t" \ 5272 "subu $29, $29, 16 \n\t" \ 5273 "lw $4, 4(%1) \n\t" /* arg1*/ \ 5274 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5275 VALGRIND_CALL_NOREDIR_T9 \ 5276 "addu $29, $29, 16 \n\t" \ 5277 "lw $28, 0($29) \n\t" \ 5278 "lw $31, 4($29) \n\t" \ 5279 "addu $29, $29, 8 \n\t" \ 5280 "move %0, $2\n" \ 5281 : /*out*/ "=r" (_res) \ 5282 : /*in*/ "0" (&_argvec[0]) \ 5283 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5284 ); \ 5285 lval = (__typeof__(lval)) _res; \ 5286 } while (0) 5287 5288 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 5289 do { \ 5290 volatile OrigFn _orig = (orig); \ 5291 volatile unsigned long _argvec[3]; \ 5292 volatile unsigned long _res; \ 5293 _argvec[0] = (unsigned long)_orig.nraddr; \ 5294 _argvec[1] = (unsigned long)(arg1); \ 5295 _argvec[2] = (unsigned long)(arg2); \ 5296 __asm__ volatile( \ 5297 "subu $29, $29, 8 \n\t" \ 5298 "sw $28, 0($29) \n\t" \ 5299 "sw $31, 4($29) \n\t" \ 5300 "subu $29, $29, 16 \n\t" \ 5301 "lw $4, 4(%1) \n\t" \ 5302 "lw $5, 8(%1) \n\t" \ 5303 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5304 VALGRIND_CALL_NOREDIR_T9 \ 5305 "addu $29, $29, 16 \n\t" \ 5306 "lw $28, 0($29) \n\t" \ 5307 "lw $31, 4($29) \n\t" \ 5308 "addu $29, $29, 8 \n\t" \ 5309 "move %0, $2\n" \ 5310 : /*out*/ "=r" (_res) \ 5311 : /*in*/ "0" (&_argvec[0]) \ 5312 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5313 ); \ 5314 lval = (__typeof__(lval)) _res; \ 5315 } while (0) 5316 5317 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 5318 do { \ 5319 volatile OrigFn _orig = (orig); \ 5320 volatile unsigned long _argvec[4]; \ 5321 volatile unsigned long _res; \ 5322 _argvec[0] = (unsigned long)_orig.nraddr; \ 5323 _argvec[1] = (unsigned long)(arg1); \ 5324 _argvec[2] = (unsigned long)(arg2); \ 5325 _argvec[3] = (unsigned long)(arg3); \ 5326 __asm__ volatile( \ 5327 "subu $29, $29, 8 \n\t" \ 5328 "sw $28, 0($29) \n\t" \ 5329 "sw $31, 4($29) \n\t" \ 5330 "subu $29, $29, 16 \n\t" \ 5331 "lw $4, 4(%1) \n\t" \ 5332 "lw $5, 8(%1) \n\t" \ 5333 "lw $6, 12(%1) \n\t" \ 5334 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5335 VALGRIND_CALL_NOREDIR_T9 \ 5336 "addu $29, $29, 16 \n\t" \ 5337 "lw $28, 0($29) \n\t" \ 5338 "lw $31, 4($29) \n\t" \ 5339 "addu $29, $29, 8 \n\t" \ 5340 "move %0, $2\n" \ 5341 : /*out*/ "=r" (_res) \ 5342 : /*in*/ "0" (&_argvec[0]) \ 5343 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5344 ); \ 5345 lval = (__typeof__(lval)) _res; \ 5346 } while (0) 5347 5348 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 5349 do { \ 5350 volatile OrigFn _orig = (orig); \ 5351 volatile unsigned long _argvec[5]; \ 5352 volatile unsigned long _res; \ 5353 _argvec[0] = (unsigned long)_orig.nraddr; \ 5354 _argvec[1] = (unsigned long)(arg1); \ 5355 _argvec[2] = (unsigned long)(arg2); \ 5356 _argvec[3] = (unsigned long)(arg3); \ 5357 _argvec[4] = (unsigned long)(arg4); \ 5358 __asm__ volatile( \ 5359 "subu $29, $29, 8 \n\t" \ 5360 "sw $28, 0($29) \n\t" \ 5361 "sw $31, 4($29) \n\t" \ 5362 "subu $29, $29, 16 \n\t" \ 5363 "lw $4, 4(%1) \n\t" \ 5364 "lw $5, 8(%1) \n\t" \ 5365 "lw $6, 12(%1) \n\t" \ 5366 "lw $7, 16(%1) \n\t" \ 5367 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5368 VALGRIND_CALL_NOREDIR_T9 \ 5369 "addu $29, $29, 16 \n\t" \ 5370 "lw $28, 0($29) \n\t" \ 5371 "lw $31, 4($29) \n\t" \ 5372 "addu $29, $29, 8 \n\t" \ 5373 "move %0, $2\n" \ 5374 : /*out*/ "=r" (_res) \ 5375 : /*in*/ "0" (&_argvec[0]) \ 5376 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5377 ); \ 5378 lval = (__typeof__(lval)) _res; \ 5379 } while (0) 5380 5381 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 5382 do { \ 5383 volatile OrigFn _orig = (orig); \ 5384 volatile unsigned long _argvec[6]; \ 5385 volatile unsigned long _res; \ 5386 _argvec[0] = (unsigned long)_orig.nraddr; \ 5387 _argvec[1] = (unsigned long)(arg1); \ 5388 _argvec[2] = (unsigned long)(arg2); \ 5389 _argvec[3] = (unsigned long)(arg3); \ 5390 _argvec[4] = (unsigned long)(arg4); \ 5391 _argvec[5] = (unsigned long)(arg5); \ 5392 __asm__ volatile( \ 5393 "subu $29, $29, 8 \n\t" \ 5394 "sw $28, 0($29) \n\t" \ 5395 "sw $31, 4($29) \n\t" \ 5396 "lw $4, 20(%1) \n\t" \ 5397 "subu $29, $29, 24\n\t" \ 5398 "sw $4, 16($29) \n\t" \ 5399 "lw $4, 4(%1) \n\t" \ 5400 "lw $5, 8(%1) \n\t" \ 5401 "lw $6, 12(%1) \n\t" \ 5402 "lw $7, 16(%1) \n\t" \ 5403 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5404 VALGRIND_CALL_NOREDIR_T9 \ 5405 "addu $29, $29, 24 \n\t" \ 5406 "lw $28, 0($29) \n\t" \ 5407 "lw $31, 4($29) \n\t" \ 5408 "addu $29, $29, 8 \n\t" \ 5409 "move %0, $2\n" \ 5410 : /*out*/ "=r" (_res) \ 5411 : /*in*/ "0" (&_argvec[0]) \ 5412 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5413 ); \ 5414 lval = (__typeof__(lval)) _res; \ 5415 } while (0) 5416 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 5417 do { \ 5418 volatile OrigFn _orig = (orig); \ 5419 volatile unsigned long _argvec[7]; \ 5420 volatile unsigned long _res; \ 5421 _argvec[0] = (unsigned long)_orig.nraddr; \ 5422 _argvec[1] = (unsigned long)(arg1); \ 5423 _argvec[2] = (unsigned long)(arg2); \ 5424 _argvec[3] = (unsigned long)(arg3); \ 5425 _argvec[4] = (unsigned long)(arg4); \ 5426 _argvec[5] = (unsigned long)(arg5); \ 5427 _argvec[6] = (unsigned long)(arg6); \ 5428 __asm__ volatile( \ 5429 "subu $29, $29, 8 \n\t" \ 5430 "sw $28, 0($29) \n\t" \ 5431 "sw $31, 4($29) \n\t" \ 5432 "lw $4, 20(%1) \n\t" \ 5433 "subu $29, $29, 32\n\t" \ 5434 "sw $4, 16($29) \n\t" \ 5435 "lw $4, 24(%1) \n\t" \ 5436 "nop\n\t" \ 5437 "sw $4, 20($29) \n\t" \ 5438 "lw $4, 4(%1) \n\t" \ 5439 "lw $5, 8(%1) \n\t" \ 5440 "lw $6, 12(%1) \n\t" \ 5441 "lw $7, 16(%1) \n\t" \ 5442 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5443 VALGRIND_CALL_NOREDIR_T9 \ 5444 "addu $29, $29, 32 \n\t" \ 5445 "lw $28, 0($29) \n\t" \ 5446 "lw $31, 4($29) \n\t" \ 5447 "addu $29, $29, 8 \n\t" \ 5448 "move %0, $2\n" \ 5449 : /*out*/ "=r" (_res) \ 5450 : /*in*/ "0" (&_argvec[0]) \ 5451 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5452 ); \ 5453 lval = (__typeof__(lval)) _res; \ 5454 } while (0) 5455 5456 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5457 arg7) \ 5458 do { \ 5459 volatile OrigFn _orig = (orig); \ 5460 volatile unsigned long _argvec[8]; \ 5461 volatile unsigned long _res; \ 5462 _argvec[0] = (unsigned long)_orig.nraddr; \ 5463 _argvec[1] = (unsigned long)(arg1); \ 5464 _argvec[2] = (unsigned long)(arg2); \ 5465 _argvec[3] = (unsigned long)(arg3); \ 5466 _argvec[4] = (unsigned long)(arg4); \ 5467 _argvec[5] = (unsigned long)(arg5); \ 5468 _argvec[6] = (unsigned long)(arg6); \ 5469 _argvec[7] = (unsigned long)(arg7); \ 5470 __asm__ volatile( \ 5471 "subu $29, $29, 8 \n\t" \ 5472 "sw $28, 0($29) \n\t" \ 5473 "sw $31, 4($29) \n\t" \ 5474 "lw $4, 20(%1) \n\t" \ 5475 "subu $29, $29, 32\n\t" \ 5476 "sw $4, 16($29) \n\t" \ 5477 "lw $4, 24(%1) \n\t" \ 5478 "sw $4, 20($29) \n\t" \ 5479 "lw $4, 28(%1) \n\t" \ 5480 "sw $4, 24($29) \n\t" \ 5481 "lw $4, 4(%1) \n\t" \ 5482 "lw $5, 8(%1) \n\t" \ 5483 "lw $6, 12(%1) \n\t" \ 5484 "lw $7, 16(%1) \n\t" \ 5485 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5486 VALGRIND_CALL_NOREDIR_T9 \ 5487 "addu $29, $29, 32 \n\t" \ 5488 "lw $28, 0($29) \n\t" \ 5489 "lw $31, 4($29) \n\t" \ 5490 "addu $29, $29, 8 \n\t" \ 5491 "move %0, $2\n" \ 5492 : /*out*/ "=r" (_res) \ 5493 : /*in*/ "0" (&_argvec[0]) \ 5494 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5495 ); \ 5496 lval = (__typeof__(lval)) _res; \ 5497 } while (0) 5498 5499 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5500 arg7,arg8) \ 5501 do { \ 5502 volatile OrigFn _orig = (orig); \ 5503 volatile unsigned long _argvec[9]; \ 5504 volatile unsigned long _res; \ 5505 _argvec[0] = (unsigned long)_orig.nraddr; \ 5506 _argvec[1] = (unsigned long)(arg1); \ 5507 _argvec[2] = (unsigned long)(arg2); \ 5508 _argvec[3] = (unsigned long)(arg3); \ 5509 _argvec[4] = (unsigned long)(arg4); \ 5510 _argvec[5] = (unsigned long)(arg5); \ 5511 _argvec[6] = (unsigned long)(arg6); \ 5512 _argvec[7] = (unsigned long)(arg7); \ 5513 _argvec[8] = (unsigned long)(arg8); \ 5514 __asm__ volatile( \ 5515 "subu $29, $29, 8 \n\t" \ 5516 "sw $28, 0($29) \n\t" \ 5517 "sw $31, 4($29) \n\t" \ 5518 "lw $4, 20(%1) \n\t" \ 5519 "subu $29, $29, 40\n\t" \ 5520 "sw $4, 16($29) \n\t" \ 5521 "lw $4, 24(%1) \n\t" \ 5522 "sw $4, 20($29) \n\t" \ 5523 "lw $4, 28(%1) \n\t" \ 5524 "sw $4, 24($29) \n\t" \ 5525 "lw $4, 32(%1) \n\t" \ 5526 "sw $4, 28($29) \n\t" \ 5527 "lw $4, 4(%1) \n\t" \ 5528 "lw $5, 8(%1) \n\t" \ 5529 "lw $6, 12(%1) \n\t" \ 5530 "lw $7, 16(%1) \n\t" \ 5531 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5532 VALGRIND_CALL_NOREDIR_T9 \ 5533 "addu $29, $29, 40 \n\t" \ 5534 "lw $28, 0($29) \n\t" \ 5535 "lw $31, 4($29) \n\t" \ 5536 "addu $29, $29, 8 \n\t" \ 5537 "move %0, $2\n" \ 5538 : /*out*/ "=r" (_res) \ 5539 : /*in*/ "0" (&_argvec[0]) \ 5540 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5541 ); \ 5542 lval = (__typeof__(lval)) _res; \ 5543 } while (0) 5544 5545 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5546 arg7,arg8,arg9) \ 5547 do { \ 5548 volatile OrigFn _orig = (orig); \ 5549 volatile unsigned long _argvec[10]; \ 5550 volatile unsigned long _res; \ 5551 _argvec[0] = (unsigned long)_orig.nraddr; \ 5552 _argvec[1] = (unsigned long)(arg1); \ 5553 _argvec[2] = (unsigned long)(arg2); \ 5554 _argvec[3] = (unsigned long)(arg3); \ 5555 _argvec[4] = (unsigned long)(arg4); \ 5556 _argvec[5] = (unsigned long)(arg5); \ 5557 _argvec[6] = (unsigned long)(arg6); \ 5558 _argvec[7] = (unsigned long)(arg7); \ 5559 _argvec[8] = (unsigned long)(arg8); \ 5560 _argvec[9] = (unsigned long)(arg9); \ 5561 __asm__ volatile( \ 5562 "subu $29, $29, 8 \n\t" \ 5563 "sw $28, 0($29) \n\t" \ 5564 "sw $31, 4($29) \n\t" \ 5565 "lw $4, 20(%1) \n\t" \ 5566 "subu $29, $29, 40\n\t" \ 5567 "sw $4, 16($29) \n\t" \ 5568 "lw $4, 24(%1) \n\t" \ 5569 "sw $4, 20($29) \n\t" \ 5570 "lw $4, 28(%1) \n\t" \ 5571 "sw $4, 24($29) \n\t" \ 5572 "lw $4, 32(%1) \n\t" \ 5573 "sw $4, 28($29) \n\t" \ 5574 "lw $4, 36(%1) \n\t" \ 5575 "sw $4, 32($29) \n\t" \ 5576 "lw $4, 4(%1) \n\t" \ 5577 "lw $5, 8(%1) \n\t" \ 5578 "lw $6, 12(%1) \n\t" \ 5579 "lw $7, 16(%1) \n\t" \ 5580 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5581 VALGRIND_CALL_NOREDIR_T9 \ 5582 "addu $29, $29, 40 \n\t" \ 5583 "lw $28, 0($29) \n\t" \ 5584 "lw $31, 4($29) \n\t" \ 5585 "addu $29, $29, 8 \n\t" \ 5586 "move %0, $2\n" \ 5587 : /*out*/ "=r" (_res) \ 5588 : /*in*/ "0" (&_argvec[0]) \ 5589 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5590 ); \ 5591 lval = (__typeof__(lval)) _res; \ 5592 } while (0) 5593 5594 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5595 arg7,arg8,arg9,arg10) \ 5596 do { \ 5597 volatile OrigFn _orig = (orig); \ 5598 volatile unsigned long _argvec[11]; \ 5599 volatile unsigned long _res; \ 5600 _argvec[0] = (unsigned long)_orig.nraddr; \ 5601 _argvec[1] = (unsigned long)(arg1); \ 5602 _argvec[2] = (unsigned long)(arg2); \ 5603 _argvec[3] = (unsigned long)(arg3); \ 5604 _argvec[4] = (unsigned long)(arg4); \ 5605 _argvec[5] = (unsigned long)(arg5); \ 5606 _argvec[6] = (unsigned long)(arg6); \ 5607 _argvec[7] = (unsigned long)(arg7); \ 5608 _argvec[8] = (unsigned long)(arg8); \ 5609 _argvec[9] = (unsigned long)(arg9); \ 5610 _argvec[10] = (unsigned long)(arg10); \ 5611 __asm__ volatile( \ 5612 "subu $29, $29, 8 \n\t" \ 5613 "sw $28, 0($29) \n\t" \ 5614 "sw $31, 4($29) \n\t" \ 5615 "lw $4, 20(%1) \n\t" \ 5616 "subu $29, $29, 48\n\t" \ 5617 "sw $4, 16($29) \n\t" \ 5618 "lw $4, 24(%1) \n\t" \ 5619 "sw $4, 20($29) \n\t" \ 5620 "lw $4, 28(%1) \n\t" \ 5621 "sw $4, 24($29) \n\t" \ 5622 "lw $4, 32(%1) \n\t" \ 5623 "sw $4, 28($29) \n\t" \ 5624 "lw $4, 36(%1) \n\t" \ 5625 "sw $4, 32($29) \n\t" \ 5626 "lw $4, 40(%1) \n\t" \ 5627 "sw $4, 36($29) \n\t" \ 5628 "lw $4, 4(%1) \n\t" \ 5629 "lw $5, 8(%1) \n\t" \ 5630 "lw $6, 12(%1) \n\t" \ 5631 "lw $7, 16(%1) \n\t" \ 5632 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5633 VALGRIND_CALL_NOREDIR_T9 \ 5634 "addu $29, $29, 48 \n\t" \ 5635 "lw $28, 0($29) \n\t" \ 5636 "lw $31, 4($29) \n\t" \ 5637 "addu $29, $29, 8 \n\t" \ 5638 "move %0, $2\n" \ 5639 : /*out*/ "=r" (_res) \ 5640 : /*in*/ "0" (&_argvec[0]) \ 5641 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5642 ); \ 5643 lval = (__typeof__(lval)) _res; \ 5644 } while (0) 5645 5646 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 5647 arg6,arg7,arg8,arg9,arg10, \ 5648 arg11) \ 5649 do { \ 5650 volatile OrigFn _orig = (orig); \ 5651 volatile unsigned long _argvec[12]; \ 5652 volatile unsigned long _res; \ 5653 _argvec[0] = (unsigned long)_orig.nraddr; \ 5654 _argvec[1] = (unsigned long)(arg1); \ 5655 _argvec[2] = (unsigned long)(arg2); \ 5656 _argvec[3] = (unsigned long)(arg3); \ 5657 _argvec[4] = (unsigned long)(arg4); \ 5658 _argvec[5] = (unsigned long)(arg5); \ 5659 _argvec[6] = (unsigned long)(arg6); \ 5660 _argvec[7] = (unsigned long)(arg7); \ 5661 _argvec[8] = (unsigned long)(arg8); \ 5662 _argvec[9] = (unsigned long)(arg9); \ 5663 _argvec[10] = (unsigned long)(arg10); \ 5664 _argvec[11] = (unsigned long)(arg11); \ 5665 __asm__ volatile( \ 5666 "subu $29, $29, 8 \n\t" \ 5667 "sw $28, 0($29) \n\t" \ 5668 "sw $31, 4($29) \n\t" \ 5669 "lw $4, 20(%1) \n\t" \ 5670 "subu $29, $29, 48\n\t" \ 5671 "sw $4, 16($29) \n\t" \ 5672 "lw $4, 24(%1) \n\t" \ 5673 "sw $4, 20($29) \n\t" \ 5674 "lw $4, 28(%1) \n\t" \ 5675 "sw $4, 24($29) \n\t" \ 5676 "lw $4, 32(%1) \n\t" \ 5677 "sw $4, 28($29) \n\t" \ 5678 "lw $4, 36(%1) \n\t" \ 5679 "sw $4, 32($29) \n\t" \ 5680 "lw $4, 40(%1) \n\t" \ 5681 "sw $4, 36($29) \n\t" \ 5682 "lw $4, 44(%1) \n\t" \ 5683 "sw $4, 40($29) \n\t" \ 5684 "lw $4, 4(%1) \n\t" \ 5685 "lw $5, 8(%1) \n\t" \ 5686 "lw $6, 12(%1) \n\t" \ 5687 "lw $7, 16(%1) \n\t" \ 5688 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5689 VALGRIND_CALL_NOREDIR_T9 \ 5690 "addu $29, $29, 48 \n\t" \ 5691 "lw $28, 0($29) \n\t" \ 5692 "lw $31, 4($29) \n\t" \ 5693 "addu $29, $29, 8 \n\t" \ 5694 "move %0, $2\n" \ 5695 : /*out*/ "=r" (_res) \ 5696 : /*in*/ "0" (&_argvec[0]) \ 5697 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5698 ); \ 5699 lval = (__typeof__(lval)) _res; \ 5700 } while (0) 5701 5702 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 5703 arg6,arg7,arg8,arg9,arg10, \ 5704 arg11,arg12) \ 5705 do { \ 5706 volatile OrigFn _orig = (orig); \ 5707 volatile unsigned long _argvec[13]; \ 5708 volatile unsigned long _res; \ 5709 _argvec[0] = (unsigned long)_orig.nraddr; \ 5710 _argvec[1] = (unsigned long)(arg1); \ 5711 _argvec[2] = (unsigned long)(arg2); \ 5712 _argvec[3] = (unsigned long)(arg3); \ 5713 _argvec[4] = (unsigned long)(arg4); \ 5714 _argvec[5] = (unsigned long)(arg5); \ 5715 _argvec[6] = (unsigned long)(arg6); \ 5716 _argvec[7] = (unsigned long)(arg7); \ 5717 _argvec[8] = (unsigned long)(arg8); \ 5718 _argvec[9] = (unsigned long)(arg9); \ 5719 _argvec[10] = (unsigned long)(arg10); \ 5720 _argvec[11] = (unsigned long)(arg11); \ 5721 _argvec[12] = (unsigned long)(arg12); \ 5722 __asm__ volatile( \ 5723 "subu $29, $29, 8 \n\t" \ 5724 "sw $28, 0($29) \n\t" \ 5725 "sw $31, 4($29) \n\t" \ 5726 "lw $4, 20(%1) \n\t" \ 5727 "subu $29, $29, 56\n\t" \ 5728 "sw $4, 16($29) \n\t" \ 5729 "lw $4, 24(%1) \n\t" \ 5730 "sw $4, 20($29) \n\t" \ 5731 "lw $4, 28(%1) \n\t" \ 5732 "sw $4, 24($29) \n\t" \ 5733 "lw $4, 32(%1) \n\t" \ 5734 "sw $4, 28($29) \n\t" \ 5735 "lw $4, 36(%1) \n\t" \ 5736 "sw $4, 32($29) \n\t" \ 5737 "lw $4, 40(%1) \n\t" \ 5738 "sw $4, 36($29) \n\t" \ 5739 "lw $4, 44(%1) \n\t" \ 5740 "sw $4, 40($29) \n\t" \ 5741 "lw $4, 48(%1) \n\t" \ 5742 "sw $4, 44($29) \n\t" \ 5743 "lw $4, 4(%1) \n\t" \ 5744 "lw $5, 8(%1) \n\t" \ 5745 "lw $6, 12(%1) \n\t" \ 5746 "lw $7, 16(%1) \n\t" \ 5747 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5748 VALGRIND_CALL_NOREDIR_T9 \ 5749 "addu $29, $29, 56 \n\t" \ 5750 "lw $28, 0($29) \n\t" \ 5751 "lw $31, 4($29) \n\t" \ 5752 "addu $29, $29, 8 \n\t" \ 5753 "move %0, $2\n" \ 5754 : /*out*/ "=r" (_res) \ 5755 : /*in*/ "r" (&_argvec[0]) \ 5756 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5757 ); \ 5758 lval = (__typeof__(lval)) _res; \ 5759 } while (0) 5760 5761 #endif /* PLAT_mips32_linux */ 5762 5763 /* ------------------------- nanomips-linux -------------------- */ 5764 5765 #if defined(PLAT_nanomips_linux) 5766 5767 /* These regs are trashed by the hidden call. */ 5768 #define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \ 5769 "$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \ 5770 "$t8","$t9", "$at" 5771 5772 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned 5773 long) == 4. */ 5774 5775 #define CALL_FN_W_v(lval, orig) \ 5776 do { \ 5777 volatile OrigFn _orig = (orig); \ 5778 volatile unsigned long _argvec[1]; \ 5779 volatile unsigned long _res; \ 5780 _argvec[0] = (unsigned long)_orig.nraddr; \ 5781 __asm__ volatile( \ 5782 "lw $t9, 0(%1)\n\t" \ 5783 VALGRIND_CALL_NOREDIR_T9 \ 5784 "move %0, $a0\n" \ 5785 : /*out*/ "=r" (_res) \ 5786 : /*in*/ "r" (&_argvec[0]) \ 5787 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5788 ); \ 5789 lval = (__typeof__(lval)) _res; \ 5790 } while (0) 5791 5792 #define CALL_FN_W_W(lval, orig, arg1) \ 5793 do { \ 5794 volatile OrigFn _orig = (orig); \ 5795 volatile unsigned long _argvec[2]; \ 5796 volatile unsigned long _res; \ 5797 _argvec[0] = (unsigned long)_orig.nraddr; \ 5798 _argvec[1] = (unsigned long)(arg1); \ 5799 __asm__ volatile( \ 5800 "lw $t9, 0(%1)\n\t" \ 5801 "lw $a0, 4(%1)\n\t" \ 5802 VALGRIND_CALL_NOREDIR_T9 \ 5803 "move %0, $a0\n" \ 5804 : /*out*/ "=r" (_res) \ 5805 : /*in*/ "r" (&_argvec[0]) \ 5806 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5807 ); \ 5808 lval = (__typeof__(lval)) _res; \ 5809 } while (0) 5810 5811 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 5812 do { \ 5813 volatile OrigFn _orig = (orig); \ 5814 volatile unsigned long _argvec[3]; \ 5815 volatile unsigned long _res; \ 5816 _argvec[0] = (unsigned long)_orig.nraddr; \ 5817 _argvec[1] = (unsigned long)(arg1); \ 5818 _argvec[2] = (unsigned long)(arg2); \ 5819 __asm__ volatile( \ 5820 "lw $t9, 0(%1)\n\t" \ 5821 "lw $a0, 4(%1)\n\t" \ 5822 "lw $a1, 8(%1)\n\t" \ 5823 VALGRIND_CALL_NOREDIR_T9 \ 5824 "move %0, $a0\n" \ 5825 : /*out*/ "=r" (_res) \ 5826 : /*in*/ "r" (&_argvec[0]) \ 5827 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5828 ); \ 5829 lval = (__typeof__(lval)) _res; \ 5830 } while (0) 5831 5832 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 5833 do { \ 5834 volatile OrigFn _orig = (orig); \ 5835 volatile unsigned long _argvec[4]; \ 5836 volatile unsigned long _res; \ 5837 _argvec[0] = (unsigned long)_orig.nraddr; \ 5838 _argvec[1] = (unsigned long)(arg1); \ 5839 _argvec[2] = (unsigned long)(arg2); \ 5840 _argvec[3] = (unsigned long)(arg3); \ 5841 __asm__ volatile( \ 5842 "lw $t9, 0(%1)\n\t" \ 5843 "lw $a0, 4(%1)\n\t" \ 5844 "lw $a1, 8(%1)\n\t" \ 5845 "lw $a2,12(%1)\n\t" \ 5846 VALGRIND_CALL_NOREDIR_T9 \ 5847 "move %0, $a0\n" \ 5848 : /*out*/ "=r" (_res) \ 5849 : /*in*/ "r" (&_argvec[0]) \ 5850 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5851 ); \ 5852 lval = (__typeof__(lval)) _res; \ 5853 } while (0) 5854 5855 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 5856 do { \ 5857 volatile OrigFn _orig = (orig); \ 5858 volatile unsigned long _argvec[5]; \ 5859 volatile unsigned long _res; \ 5860 _argvec[0] = (unsigned long)_orig.nraddr; \ 5861 _argvec[1] = (unsigned long)(arg1); \ 5862 _argvec[2] = (unsigned long)(arg2); \ 5863 _argvec[3] = (unsigned long)(arg3); \ 5864 _argvec[4] = (unsigned long)(arg4); \ 5865 __asm__ volatile( \ 5866 "lw $t9, 0(%1)\n\t" \ 5867 "lw $a0, 4(%1)\n\t" \ 5868 "lw $a1, 8(%1)\n\t" \ 5869 "lw $a2,12(%1)\n\t" \ 5870 "lw $a3,16(%1)\n\t" \ 5871 VALGRIND_CALL_NOREDIR_T9 \ 5872 "move %0, $a0\n" \ 5873 : /*out*/ "=r" (_res) \ 5874 : /*in*/ "r" (&_argvec[0]) \ 5875 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5876 ); \ 5877 lval = (__typeof__(lval)) _res; \ 5878 } while (0) 5879 5880 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 5881 do { \ 5882 volatile OrigFn _orig = (orig); \ 5883 volatile unsigned long _argvec[6]; \ 5884 volatile unsigned long _res; \ 5885 _argvec[0] = (unsigned long)_orig.nraddr; \ 5886 _argvec[1] = (unsigned long)(arg1); \ 5887 _argvec[2] = (unsigned long)(arg2); \ 5888 _argvec[3] = (unsigned long)(arg3); \ 5889 _argvec[4] = (unsigned long)(arg4); \ 5890 _argvec[5] = (unsigned long)(arg5); \ 5891 __asm__ volatile( \ 5892 "lw $t9, 0(%1)\n\t" \ 5893 "lw $a0, 4(%1)\n\t" \ 5894 "lw $a1, 8(%1)\n\t" \ 5895 "lw $a2,12(%1)\n\t" \ 5896 "lw $a3,16(%1)\n\t" \ 5897 "lw $a4,20(%1)\n\t" \ 5898 VALGRIND_CALL_NOREDIR_T9 \ 5899 "move %0, $a0\n" \ 5900 : /*out*/ "=r" (_res) \ 5901 : /*in*/ "r" (&_argvec[0]) \ 5902 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5903 ); \ 5904 lval = (__typeof__(lval)) _res; \ 5905 } while (0) 5906 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 5907 do { \ 5908 volatile OrigFn _orig = (orig); \ 5909 volatile unsigned long _argvec[7]; \ 5910 volatile unsigned long _res; \ 5911 _argvec[0] = (unsigned long)_orig.nraddr; \ 5912 _argvec[1] = (unsigned long)(arg1); \ 5913 _argvec[2] = (unsigned long)(arg2); \ 5914 _argvec[3] = (unsigned long)(arg3); \ 5915 _argvec[4] = (unsigned long)(arg4); \ 5916 _argvec[5] = (unsigned long)(arg5); \ 5917 _argvec[6] = (unsigned long)(arg6); \ 5918 __asm__ volatile( \ 5919 "lw $t9, 0(%1)\n\t" \ 5920 "lw $a0, 4(%1)\n\t" \ 5921 "lw $a1, 8(%1)\n\t" \ 5922 "lw $a2,12(%1)\n\t" \ 5923 "lw $a3,16(%1)\n\t" \ 5924 "lw $a4,20(%1)\n\t" \ 5925 "lw $a5,24(%1)\n\t" \ 5926 VALGRIND_CALL_NOREDIR_T9 \ 5927 "move %0, $a0\n" \ 5928 : /*out*/ "=r" (_res) \ 5929 : /*in*/ "r" (&_argvec[0]) \ 5930 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5931 ); \ 5932 lval = (__typeof__(lval)) _res; \ 5933 } while (0) 5934 5935 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5936 arg7) \ 5937 do { \ 5938 volatile OrigFn _orig = (orig); \ 5939 volatile unsigned long _argvec[8]; \ 5940 volatile unsigned long _res; \ 5941 _argvec[0] = (unsigned long)_orig.nraddr; \ 5942 _argvec[1] = (unsigned long)(arg1); \ 5943 _argvec[2] = (unsigned long)(arg2); \ 5944 _argvec[3] = (unsigned long)(arg3); \ 5945 _argvec[4] = (unsigned long)(arg4); \ 5946 _argvec[5] = (unsigned long)(arg5); \ 5947 _argvec[6] = (unsigned long)(arg6); \ 5948 _argvec[7] = (unsigned long)(arg7); \ 5949 __asm__ volatile( \ 5950 "lw $t9, 0(%1)\n\t" \ 5951 "lw $a0, 4(%1)\n\t" \ 5952 "lw $a1, 8(%1)\n\t" \ 5953 "lw $a2,12(%1)\n\t" \ 5954 "lw $a3,16(%1)\n\t" \ 5955 "lw $a4,20(%1)\n\t" \ 5956 "lw $a5,24(%1)\n\t" \ 5957 "lw $a6,28(%1)\n\t" \ 5958 VALGRIND_CALL_NOREDIR_T9 \ 5959 "move %0, $a0\n" \ 5960 : /*out*/ "=r" (_res) \ 5961 : /*in*/ "r" (&_argvec[0]) \ 5962 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5963 ); \ 5964 lval = (__typeof__(lval)) _res; \ 5965 } while (0) 5966 5967 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5968 arg7,arg8) \ 5969 do { \ 5970 volatile OrigFn _orig = (orig); \ 5971 volatile unsigned long _argvec[9]; \ 5972 volatile unsigned long _res; \ 5973 _argvec[0] = (unsigned long)_orig.nraddr; \ 5974 _argvec[1] = (unsigned long)(arg1); \ 5975 _argvec[2] = (unsigned long)(arg2); \ 5976 _argvec[3] = (unsigned long)(arg3); \ 5977 _argvec[4] = (unsigned long)(arg4); \ 5978 _argvec[5] = (unsigned long)(arg5); \ 5979 _argvec[6] = (unsigned long)(arg6); \ 5980 _argvec[7] = (unsigned long)(arg7); \ 5981 _argvec[8] = (unsigned long)(arg8); \ 5982 __asm__ volatile( \ 5983 "lw $t9, 0(%1)\n\t" \ 5984 "lw $a0, 4(%1)\n\t" \ 5985 "lw $a1, 8(%1)\n\t" \ 5986 "lw $a2,12(%1)\n\t" \ 5987 "lw $a3,16(%1)\n\t" \ 5988 "lw $a4,20(%1)\n\t" \ 5989 "lw $a5,24(%1)\n\t" \ 5990 "lw $a6,28(%1)\n\t" \ 5991 "lw $a7,32(%1)\n\t" \ 5992 VALGRIND_CALL_NOREDIR_T9 \ 5993 "move %0, $a0\n" \ 5994 : /*out*/ "=r" (_res) \ 5995 : /*in*/ "r" (&_argvec[0]) \ 5996 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5997 ); \ 5998 lval = (__typeof__(lval)) _res; \ 5999 } while (0) 6000 6001 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 6002 arg7,arg8,arg9) \ 6003 do { \ 6004 volatile OrigFn _orig = (orig); \ 6005 volatile unsigned long _argvec[10]; \ 6006 volatile unsigned long _res; \ 6007 _argvec[0] = (unsigned long)_orig.nraddr; \ 6008 _argvec[1] = (unsigned long)(arg1); \ 6009 _argvec[2] = (unsigned long)(arg2); \ 6010 _argvec[3] = (unsigned long)(arg3); \ 6011 _argvec[4] = (unsigned long)(arg4); \ 6012 _argvec[5] = (unsigned long)(arg5); \ 6013 _argvec[6] = (unsigned long)(arg6); \ 6014 _argvec[7] = (unsigned long)(arg7); \ 6015 _argvec[8] = (unsigned long)(arg8); \ 6016 _argvec[9] = (unsigned long)(arg9); \ 6017 __asm__ volatile( \ 6018 "addiu $sp, $sp, -16 \n\t" \ 6019 "lw $t9,36(%1) \n\t" \ 6020 "sw $t9, 0($sp) \n\t" \ 6021 "lw $t9, 0(%1) \n\t" \ 6022 "lw $a0, 4(%1) \n\t" \ 6023 "lw $a1, 8(%1) \n\t" \ 6024 "lw $a2,12(%1) \n\t" \ 6025 "lw $a3,16(%1) \n\t" \ 6026 "lw $a4,20(%1) \n\t" \ 6027 "lw $a5,24(%1) \n\t" \ 6028 "lw $a6,28(%1) \n\t" \ 6029 "lw $a7,32(%1) \n\t" \ 6030 VALGRIND_CALL_NOREDIR_T9 \ 6031 "move %0, $a0 \n\t" \ 6032 "addiu $sp, $sp, 16 \n\t" \ 6033 : /*out*/ "=r" (_res) \ 6034 : /*in*/ "r" (&_argvec[0]) \ 6035 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6036 ); \ 6037 lval = (__typeof__(lval)) _res; \ 6038 } while (0) 6039 6040 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 6041 arg7,arg8,arg9,arg10) \ 6042 do { \ 6043 volatile OrigFn _orig = (orig); \ 6044 volatile unsigned long _argvec[11]; \ 6045 volatile unsigned long _res; \ 6046 _argvec[0] = (unsigned long)_orig.nraddr; \ 6047 _argvec[1] = (unsigned long)(arg1); \ 6048 _argvec[2] = (unsigned long)(arg2); \ 6049 _argvec[3] = (unsigned long)(arg3); \ 6050 _argvec[4] = (unsigned long)(arg4); \ 6051 _argvec[5] = (unsigned long)(arg5); \ 6052 _argvec[6] = (unsigned long)(arg6); \ 6053 _argvec[7] = (unsigned long)(arg7); \ 6054 _argvec[8] = (unsigned long)(arg8); \ 6055 _argvec[9] = (unsigned long)(arg9); \ 6056 _argvec[10] = (unsigned long)(arg10); \ 6057 __asm__ volatile( \ 6058 "addiu $sp, $sp, -16 \n\t" \ 6059 "lw $t9,36(%1) \n\t" \ 6060 "sw $t9, 0($sp) \n\t" \ 6061 "lw $t9,40(%1) \n\t" \ 6062 "sw $t9, 4($sp) \n\t" \ 6063 "lw $t9, 0(%1) \n\t" \ 6064 "lw $a0, 4(%1) \n\t" \ 6065 "lw $a1, 8(%1) \n\t" \ 6066 "lw $a2,12(%1) \n\t" \ 6067 "lw $a3,16(%1) \n\t" \ 6068 "lw $a4,20(%1) \n\t" \ 6069 "lw $a5,24(%1) \n\t" \ 6070 "lw $a6,28(%1) \n\t" \ 6071 "lw $a7,32(%1) \n\t" \ 6072 VALGRIND_CALL_NOREDIR_T9 \ 6073 "move %0, $a0 \n\t" \ 6074 "addiu $sp, $sp, 16 \n\t" \ 6075 : /*out*/ "=r" (_res) \ 6076 : /*in*/ "r" (&_argvec[0]) \ 6077 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6078 ); \ 6079 lval = (__typeof__(lval)) _res; \ 6080 } while (0) 6081 6082 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 6083 arg6,arg7,arg8,arg9,arg10, \ 6084 arg11) \ 6085 do { \ 6086 volatile OrigFn _orig = (orig); \ 6087 volatile unsigned long _argvec[12]; \ 6088 volatile unsigned long _res; \ 6089 _argvec[0] = (unsigned long)_orig.nraddr; \ 6090 _argvec[1] = (unsigned long)(arg1); \ 6091 _argvec[2] = (unsigned long)(arg2); \ 6092 _argvec[3] = (unsigned long)(arg3); \ 6093 _argvec[4] = (unsigned long)(arg4); \ 6094 _argvec[5] = (unsigned long)(arg5); \ 6095 _argvec[6] = (unsigned long)(arg6); \ 6096 _argvec[7] = (unsigned long)(arg7); \ 6097 _argvec[8] = (unsigned long)(arg8); \ 6098 _argvec[9] = (unsigned long)(arg9); \ 6099 _argvec[10] = (unsigned long)(arg10); \ 6100 _argvec[11] = (unsigned long)(arg11); \ 6101 __asm__ volatile( \ 6102 "addiu $sp, $sp, -16 \n\t" \ 6103 "lw $t9,36(%1) \n\t" \ 6104 "sw $t9, 0($sp) \n\t" \ 6105 "lw $t9,40(%1) \n\t" \ 6106 "sw $t9, 4($sp) \n\t" \ 6107 "lw $t9,44(%1) \n\t" \ 6108 "sw $t9, 8($sp) \n\t" \ 6109 "lw $t9, 0(%1) \n\t" \ 6110 "lw $a0, 4(%1) \n\t" \ 6111 "lw $a1, 8(%1) \n\t" \ 6112 "lw $a2,12(%1) \n\t" \ 6113 "lw $a3,16(%1) \n\t" \ 6114 "lw $a4,20(%1) \n\t" \ 6115 "lw $a5,24(%1) \n\t" \ 6116 "lw $a6,28(%1) \n\t" \ 6117 "lw $a7,32(%1) \n\t" \ 6118 VALGRIND_CALL_NOREDIR_T9 \ 6119 "move %0, $a0 \n\t" \ 6120 "addiu $sp, $sp, 16 \n\t" \ 6121 : /*out*/ "=r" (_res) \ 6122 : /*in*/ "r" (&_argvec[0]) \ 6123 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6124 ); \ 6125 lval = (__typeof__(lval)) _res; \ 6126 } while (0) 6127 6128 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 6129 arg6,arg7,arg8,arg9,arg10, \ 6130 arg11,arg12) \ 6131 do { \ 6132 volatile OrigFn _orig = (orig); \ 6133 volatile unsigned long _argvec[13]; \ 6134 volatile unsigned long _res; \ 6135 _argvec[0] = (unsigned long)_orig.nraddr; \ 6136 _argvec[1] = (unsigned long)(arg1); \ 6137 _argvec[2] = (unsigned long)(arg2); \ 6138 _argvec[3] = (unsigned long)(arg3); \ 6139 _argvec[4] = (unsigned long)(arg4); \ 6140 _argvec[5] = (unsigned long)(arg5); \ 6141 _argvec[6] = (unsigned long)(arg6); \ 6142 _argvec[7] = (unsigned long)(arg7); \ 6143 _argvec[8] = (unsigned long)(arg8); \ 6144 _argvec[9] = (unsigned long)(arg9); \ 6145 _argvec[10] = (unsigned long)(arg10); \ 6146 _argvec[11] = (unsigned long)(arg11); \ 6147 _argvec[12] = (unsigned long)(arg12); \ 6148 __asm__ volatile( \ 6149 "addiu $sp, $sp, -16 \n\t" \ 6150 "lw $t9,36(%1) \n\t" \ 6151 "sw $t9, 0($sp) \n\t" \ 6152 "lw $t9,40(%1) \n\t" \ 6153 "sw $t9, 4($sp) \n\t" \ 6154 "lw $t9,44(%1) \n\t" \ 6155 "sw $t9, 8($sp) \n\t" \ 6156 "lw $t9,48(%1) \n\t" \ 6157 "sw $t9,12($sp) \n\t" \ 6158 "lw $t9, 0(%1) \n\t" \ 6159 "lw $a0, 4(%1) \n\t" \ 6160 "lw $a1, 8(%1) \n\t" \ 6161 "lw $a2,12(%1) \n\t" \ 6162 "lw $a3,16(%1) \n\t" \ 6163 "lw $a4,20(%1) \n\t" \ 6164 "lw $a5,24(%1) \n\t" \ 6165 "lw $a6,28(%1) \n\t" \ 6166 "lw $a7,32(%1) \n\t" \ 6167 VALGRIND_CALL_NOREDIR_T9 \ 6168 "move %0, $a0 \n\t" \ 6169 "addiu $sp, $sp, 16 \n\t" \ 6170 : /*out*/ "=r" (_res) \ 6171 : /*in*/ "r" (&_argvec[0]) \ 6172 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6173 ); \ 6174 lval = (__typeof__(lval)) _res; \ 6175 } while (0) 6176 6177 #endif /* PLAT_nanomips_linux */ 6178 6179 /* ------------------------- mips64-linux ------------------------- */ 6180 6181 #if defined(PLAT_mips64_linux) 6182 6183 /* These regs are trashed by the hidden call. */ 6184 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \ 6185 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \ 6186 "$25", "$31" 6187 6188 /* These CALL_FN_ macros assume that on mips64-linux, 6189 sizeof(long long) == 8. */ 6190 6191 #define MIPS64_LONG2REG_CAST(x) ((long long)(long)x) 6192 6193 #define CALL_FN_W_v(lval, orig) \ 6194 do { \ 6195 volatile OrigFn _orig = (orig); \ 6196 volatile unsigned long long _argvec[1]; \ 6197 volatile unsigned long long _res; \ 6198 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6199 __asm__ volatile( \ 6200 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6201 VALGRIND_CALL_NOREDIR_T9 \ 6202 "move %0, $2\n" \ 6203 : /*out*/ "=r" (_res) \ 6204 : /*in*/ "0" (&_argvec[0]) \ 6205 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6206 ); \ 6207 lval = (__typeof__(lval)) (long)_res; \ 6208 } while (0) 6209 6210 #define CALL_FN_W_W(lval, orig, arg1) \ 6211 do { \ 6212 volatile OrigFn _orig = (orig); \ 6213 volatile unsigned long long _argvec[2]; \ 6214 volatile unsigned long long _res; \ 6215 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6216 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6217 __asm__ volatile( \ 6218 "ld $4, 8(%1)\n\t" /* arg1*/ \ 6219 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6220 VALGRIND_CALL_NOREDIR_T9 \ 6221 "move %0, $2\n" \ 6222 : /*out*/ "=r" (_res) \ 6223 : /*in*/ "r" (&_argvec[0]) \ 6224 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6225 ); \ 6226 lval = (__typeof__(lval)) (long)_res; \ 6227 } while (0) 6228 6229 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 6230 do { \ 6231 volatile OrigFn _orig = (orig); \ 6232 volatile unsigned long long _argvec[3]; \ 6233 volatile unsigned long long _res; \ 6234 _argvec[0] = _orig.nraddr; \ 6235 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6236 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6237 __asm__ volatile( \ 6238 "ld $4, 8(%1)\n\t" \ 6239 "ld $5, 16(%1)\n\t" \ 6240 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6241 VALGRIND_CALL_NOREDIR_T9 \ 6242 "move %0, $2\n" \ 6243 : /*out*/ "=r" (_res) \ 6244 : /*in*/ "r" (&_argvec[0]) \ 6245 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6246 ); \ 6247 lval = (__typeof__(lval)) (long)_res; \ 6248 } while (0) 6249 6250 6251 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 6252 do { \ 6253 volatile OrigFn _orig = (orig); \ 6254 volatile unsigned long long _argvec[4]; \ 6255 volatile unsigned long long _res; \ 6256 _argvec[0] = _orig.nraddr; \ 6257 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6258 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6259 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \ 6260 __asm__ volatile( \ 6261 "ld $4, 8(%1)\n\t" \ 6262 "ld $5, 16(%1)\n\t" \ 6263 "ld $6, 24(%1)\n\t" \ 6264 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6265 VALGRIND_CALL_NOREDIR_T9 \ 6266 "move %0, $2\n" \ 6267 : /*out*/ "=r" (_res) \ 6268 : /*in*/ "r" (&_argvec[0]) \ 6269 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6270 ); \ 6271 lval = (__typeof__(lval)) (long)_res; \ 6272 } while (0) 6273 6274 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 6275 do { \ 6276 volatile OrigFn _orig = (orig); \ 6277 volatile unsigned long long _argvec[5]; \ 6278 volatile unsigned long long _res; \ 6279 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6280 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6281 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6282 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \ 6283 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \ 6284 __asm__ volatile( \ 6285 "ld $4, 8(%1)\n\t" \ 6286 "ld $5, 16(%1)\n\t" \ 6287 "ld $6, 24(%1)\n\t" \ 6288 "ld $7, 32(%1)\n\t" \ 6289 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6290 VALGRIND_CALL_NOREDIR_T9 \ 6291 "move %0, $2\n" \ 6292 : /*out*/ "=r" (_res) \ 6293 : /*in*/ "r" (&_argvec[0]) \ 6294 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6295 ); \ 6296 lval = (__typeof__(lval)) (long)_res; \ 6297 } while (0) 6298 6299 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 6300 do { \ 6301 volatile OrigFn _orig = (orig); \ 6302 volatile unsigned long long _argvec[6]; \ 6303 volatile unsigned long long _res; \ 6304 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6305 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6306 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6307 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \ 6308 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \ 6309 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \ 6310 __asm__ volatile( \ 6311 "ld $4, 8(%1)\n\t" \ 6312 "ld $5, 16(%1)\n\t" \ 6313 "ld $6, 24(%1)\n\t" \ 6314 "ld $7, 32(%1)\n\t" \ 6315 "ld $8, 40(%1)\n\t" \ 6316 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6317 VALGRIND_CALL_NOREDIR_T9 \ 6318 "move %0, $2\n" \ 6319 : /*out*/ "=r" (_res) \ 6320 : /*in*/ "r" (&_argvec[0]) \ 6321 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6322 ); \ 6323 lval = (__typeof__(lval)) (long)_res; \ 6324 } while (0) 6325 6326 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 6327 do { \ 6328 volatile OrigFn _orig = (orig); \ 6329 volatile unsigned long long _argvec[7]; \ 6330 volatile unsigned long long _res; \ 6331 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6332 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6333 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6334 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \ 6335 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \ 6336 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \ 6337 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \ 6338 __asm__ volatile( \ 6339 "ld $4, 8(%1)\n\t" \ 6340 "ld $5, 16(%1)\n\t" \ 6341 "ld $6, 24(%1)\n\t" \ 6342 "ld $7, 32(%1)\n\t" \ 6343 "ld $8, 40(%1)\n\t" \ 6344 "ld $9, 48(%1)\n\t" \ 6345 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6346 VALGRIND_CALL_NOREDIR_T9 \ 6347 "move %0, $2\n" \ 6348 : /*out*/ "=r" (_res) \ 6349 : /*in*/ "r" (&_argvec[0]) \ 6350 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6351 ); \ 6352 lval = (__typeof__(lval)) (long)_res; \ 6353 } while (0) 6354 6355 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 6356 arg7) \ 6357 do { \ 6358 volatile OrigFn _orig = (orig); \ 6359 volatile unsigned long long _argvec[8]; \ 6360 volatile unsigned long long _res; \ 6361 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6362 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6363 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6364 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \ 6365 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \ 6366 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \ 6367 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \ 6368 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \ 6369 __asm__ volatile( \ 6370 "ld $4, 8(%1)\n\t" \ 6371 "ld $5, 16(%1)\n\t" \ 6372 "ld $6, 24(%1)\n\t" \ 6373 "ld $7, 32(%1)\n\t" \ 6374 "ld $8, 40(%1)\n\t" \ 6375 "ld $9, 48(%1)\n\t" \ 6376 "ld $10, 56(%1)\n\t" \ 6377 "ld $25, 0(%1) \n\t" /* target->t9 */ \ 6378 VALGRIND_CALL_NOREDIR_T9 \ 6379 "move %0, $2\n" \ 6380 : /*out*/ "=r" (_res) \ 6381 : /*in*/ "r" (&_argvec[0]) \ 6382 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6383 ); \ 6384 lval = (__typeof__(lval)) (long)_res; \ 6385 } while (0) 6386 6387 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 6388 arg7,arg8) \ 6389 do { \ 6390 volatile OrigFn _orig = (orig); \ 6391 volatile unsigned long long _argvec[9]; \ 6392 volatile unsigned long long _res; \ 6393 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6394 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6395 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6396 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \ 6397 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \ 6398 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \ 6399 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \ 6400 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \ 6401 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \ 6402 __asm__ volatile( \ 6403 "ld $4, 8(%1)\n\t" \ 6404 "ld $5, 16(%1)\n\t" \ 6405 "ld $6, 24(%1)\n\t" \ 6406 "ld $7, 32(%1)\n\t" \ 6407 "ld $8, 40(%1)\n\t" \ 6408 "ld $9, 48(%1)\n\t" \ 6409 "ld $10, 56(%1)\n\t" \ 6410 "ld $11, 64(%1)\n\t" \ 6411 "ld $25, 0(%1) \n\t" /* target->t9 */ \ 6412 VALGRIND_CALL_NOREDIR_T9 \ 6413 "move %0, $2\n" \ 6414 : /*out*/ "=r" (_res) \ 6415 : /*in*/ "r" (&_argvec[0]) \ 6416 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6417 ); \ 6418 lval = (__typeof__(lval)) (long)_res; \ 6419 } while (0) 6420 6421 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 6422 arg7,arg8,arg9) \ 6423 do { \ 6424 volatile OrigFn _orig = (orig); \ 6425 volatile unsigned long long _argvec[10]; \ 6426 volatile unsigned long long _res; \ 6427 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6428 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6429 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6430 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \ 6431 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \ 6432 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \ 6433 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \ 6434 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \ 6435 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \ 6436 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \ 6437 __asm__ volatile( \ 6438 "dsubu $29, $29, 8\n\t" \ 6439 "ld $4, 72(%1)\n\t" \ 6440 "sd $4, 0($29)\n\t" \ 6441 "ld $4, 8(%1)\n\t" \ 6442 "ld $5, 16(%1)\n\t" \ 6443 "ld $6, 24(%1)\n\t" \ 6444 "ld $7, 32(%1)\n\t" \ 6445 "ld $8, 40(%1)\n\t" \ 6446 "ld $9, 48(%1)\n\t" \ 6447 "ld $10, 56(%1)\n\t" \ 6448 "ld $11, 64(%1)\n\t" \ 6449 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6450 VALGRIND_CALL_NOREDIR_T9 \ 6451 "daddu $29, $29, 8\n\t" \ 6452 "move %0, $2\n" \ 6453 : /*out*/ "=r" (_res) \ 6454 : /*in*/ "r" (&_argvec[0]) \ 6455 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6456 ); \ 6457 lval = (__typeof__(lval)) (long)_res; \ 6458 } while (0) 6459 6460 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 6461 arg7,arg8,arg9,arg10) \ 6462 do { \ 6463 volatile OrigFn _orig = (orig); \ 6464 volatile unsigned long long _argvec[11]; \ 6465 volatile unsigned long long _res; \ 6466 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6467 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6468 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6469 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \ 6470 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \ 6471 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \ 6472 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \ 6473 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \ 6474 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \ 6475 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \ 6476 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \ 6477 __asm__ volatile( \ 6478 "dsubu $29, $29, 16\n\t" \ 6479 "ld $4, 72(%1)\n\t" \ 6480 "sd $4, 0($29)\n\t" \ 6481 "ld $4, 80(%1)\n\t" \ 6482 "sd $4, 8($29)\n\t" \ 6483 "ld $4, 8(%1)\n\t" \ 6484 "ld $5, 16(%1)\n\t" \ 6485 "ld $6, 24(%1)\n\t" \ 6486 "ld $7, 32(%1)\n\t" \ 6487 "ld $8, 40(%1)\n\t" \ 6488 "ld $9, 48(%1)\n\t" \ 6489 "ld $10, 56(%1)\n\t" \ 6490 "ld $11, 64(%1)\n\t" \ 6491 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6492 VALGRIND_CALL_NOREDIR_T9 \ 6493 "daddu $29, $29, 16\n\t" \ 6494 "move %0, $2\n" \ 6495 : /*out*/ "=r" (_res) \ 6496 : /*in*/ "r" (&_argvec[0]) \ 6497 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6498 ); \ 6499 lval = (__typeof__(lval)) (long)_res; \ 6500 } while (0) 6501 6502 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 6503 arg6,arg7,arg8,arg9,arg10, \ 6504 arg11) \ 6505 do { \ 6506 volatile OrigFn _orig = (orig); \ 6507 volatile unsigned long long _argvec[12]; \ 6508 volatile unsigned long long _res; \ 6509 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6510 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6511 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6512 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \ 6513 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \ 6514 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \ 6515 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \ 6516 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \ 6517 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \ 6518 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \ 6519 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \ 6520 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \ 6521 __asm__ volatile( \ 6522 "dsubu $29, $29, 24\n\t" \ 6523 "ld $4, 72(%1)\n\t" \ 6524 "sd $4, 0($29)\n\t" \ 6525 "ld $4, 80(%1)\n\t" \ 6526 "sd $4, 8($29)\n\t" \ 6527 "ld $4, 88(%1)\n\t" \ 6528 "sd $4, 16($29)\n\t" \ 6529 "ld $4, 8(%1)\n\t" \ 6530 "ld $5, 16(%1)\n\t" \ 6531 "ld $6, 24(%1)\n\t" \ 6532 "ld $7, 32(%1)\n\t" \ 6533 "ld $8, 40(%1)\n\t" \ 6534 "ld $9, 48(%1)\n\t" \ 6535 "ld $10, 56(%1)\n\t" \ 6536 "ld $11, 64(%1)\n\t" \ 6537 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6538 VALGRIND_CALL_NOREDIR_T9 \ 6539 "daddu $29, $29, 24\n\t" \ 6540 "move %0, $2\n" \ 6541 : /*out*/ "=r" (_res) \ 6542 : /*in*/ "r" (&_argvec[0]) \ 6543 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6544 ); \ 6545 lval = (__typeof__(lval)) (long)_res; \ 6546 } while (0) 6547 6548 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 6549 arg6,arg7,arg8,arg9,arg10, \ 6550 arg11,arg12) \ 6551 do { \ 6552 volatile OrigFn _orig = (orig); \ 6553 volatile unsigned long long _argvec[13]; \ 6554 volatile unsigned long long _res; \ 6555 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \ 6556 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \ 6557 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \ 6558 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \ 6559 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \ 6560 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \ 6561 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \ 6562 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \ 6563 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \ 6564 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \ 6565 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \ 6566 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \ 6567 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \ 6568 __asm__ volatile( \ 6569 "dsubu $29, $29, 32\n\t" \ 6570 "ld $4, 72(%1)\n\t" \ 6571 "sd $4, 0($29)\n\t" \ 6572 "ld $4, 80(%1)\n\t" \ 6573 "sd $4, 8($29)\n\t" \ 6574 "ld $4, 88(%1)\n\t" \ 6575 "sd $4, 16($29)\n\t" \ 6576 "ld $4, 96(%1)\n\t" \ 6577 "sd $4, 24($29)\n\t" \ 6578 "ld $4, 8(%1)\n\t" \ 6579 "ld $5, 16(%1)\n\t" \ 6580 "ld $6, 24(%1)\n\t" \ 6581 "ld $7, 32(%1)\n\t" \ 6582 "ld $8, 40(%1)\n\t" \ 6583 "ld $9, 48(%1)\n\t" \ 6584 "ld $10, 56(%1)\n\t" \ 6585 "ld $11, 64(%1)\n\t" \ 6586 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 6587 VALGRIND_CALL_NOREDIR_T9 \ 6588 "daddu $29, $29, 32\n\t" \ 6589 "move %0, $2\n" \ 6590 : /*out*/ "=r" (_res) \ 6591 : /*in*/ "r" (&_argvec[0]) \ 6592 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 6593 ); \ 6594 lval = (__typeof__(lval)) (long)_res; \ 6595 } while (0) 6596 6597 #endif /* PLAT_mips64_linux */ 6598 6599 /* ------------------------------------------------------------------ */ 6600 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */ 6601 /* */ 6602 /* ------------------------------------------------------------------ */ 6603 6604 /* Some request codes. There are many more of these, but most are not 6605 exposed to end-user view. These are the public ones, all of the 6606 form 0x1000 + small_number. 6607 6608 Core ones are in the range 0x00000000--0x0000ffff. The non-public 6609 ones start at 0x2000. 6610 */ 6611 6612 /* These macros are used by tools -- they must be public, but don't 6613 embed them into other programs. */ 6614 #define VG_USERREQ_TOOL_BASE(a,b) \ 6615 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16)) 6616 #define VG_IS_TOOL_USERREQ(a, b, v) \ 6617 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000)) 6618 6619 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !! 6620 This enum comprises an ABI exported by Valgrind to programs 6621 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE 6622 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most 6623 relevant group. */ 6624 typedef 6625 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001, 6626 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002, 6627 6628 /* These allow any function to be called from the simulated 6629 CPU but run on the real CPU. Nb: the first arg passed to 6630 the function is always the ThreadId of the running 6631 thread! So CLIENT_CALL0 actually requires a 1 arg 6632 function, etc. */ 6633 VG_USERREQ__CLIENT_CALL0 = 0x1101, 6634 VG_USERREQ__CLIENT_CALL1 = 0x1102, 6635 VG_USERREQ__CLIENT_CALL2 = 0x1103, 6636 VG_USERREQ__CLIENT_CALL3 = 0x1104, 6637 6638 /* Can be useful in regression testing suites -- eg. can 6639 send Valgrind's output to /dev/null and still count 6640 errors. */ 6641 VG_USERREQ__COUNT_ERRORS = 0x1201, 6642 6643 /* Allows the client program and/or gdbserver to execute a monitor 6644 command. */ 6645 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202, 6646 6647 /* Allows the client program to change a dynamic command line 6648 option. */ 6649 VG_USERREQ__CLO_CHANGE = 0x1203, 6650 6651 /* These are useful and can be interpreted by any tool that 6652 tracks malloc() et al, by using vg_replace_malloc.c. */ 6653 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301, 6654 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b, 6655 VG_USERREQ__FREELIKE_BLOCK = 0x1302, 6656 /* Memory pool support. */ 6657 VG_USERREQ__CREATE_MEMPOOL = 0x1303, 6658 VG_USERREQ__DESTROY_MEMPOOL = 0x1304, 6659 VG_USERREQ__MEMPOOL_ALLOC = 0x1305, 6660 VG_USERREQ__MEMPOOL_FREE = 0x1306, 6661 VG_USERREQ__MEMPOOL_TRIM = 0x1307, 6662 VG_USERREQ__MOVE_MEMPOOL = 0x1308, 6663 VG_USERREQ__MEMPOOL_CHANGE = 0x1309, 6664 VG_USERREQ__MEMPOOL_EXISTS = 0x130a, 6665 6666 /* Allow printfs to valgrind log. */ 6667 /* The first two pass the va_list argument by value, which 6668 assumes it is the same size as or smaller than a UWord, 6669 which generally isn't the case. Hence are deprecated. 6670 The second two pass the vargs by reference and so are 6671 immune to this problem. */ 6672 /* both :: char* fmt, va_list vargs (DEPRECATED) */ 6673 VG_USERREQ__PRINTF = 0x1401, 6674 VG_USERREQ__PRINTF_BACKTRACE = 0x1402, 6675 /* both :: char* fmt, va_list* vargs */ 6676 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403, 6677 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404, 6678 6679 /* Stack support. */ 6680 VG_USERREQ__STACK_REGISTER = 0x1501, 6681 VG_USERREQ__STACK_DEREGISTER = 0x1502, 6682 VG_USERREQ__STACK_CHANGE = 0x1503, 6683 6684 /* Wine support */ 6685 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601, 6686 6687 /* Querying of debug info. */ 6688 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701, 6689 6690 /* Disable/enable error reporting level. Takes a single 6691 Word arg which is the delta to this thread's error 6692 disablement indicator. Hence 1 disables or further 6693 disables errors, and -1 moves back towards enablement. 6694 Other values are not allowed. */ 6695 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801, 6696 6697 /* Some requests used for Valgrind internal, such as 6698 self-test or self-hosting. */ 6699 /* Initialise IR injection */ 6700 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901, 6701 /* Used by Inner Valgrind to inform Outer Valgrind where to 6702 find the list of inner guest threads */ 6703 VG_USERREQ__INNER_THREADS = 0x1902 6704 } Vg_ClientRequest; 6705 6706 #if !defined(__GNUC__) 6707 # define __extension__ /* */ 6708 #endif 6709 6710 6711 /* Returns the number of Valgrinds this code is running under. That 6712 is, 0 if running natively, 1 if running under Valgrind, 2 if 6713 running under Valgrind which is running under another Valgrind, 6714 etc. */ 6715 #define RUNNING_ON_VALGRIND \ 6716 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \ 6717 VG_USERREQ__RUNNING_ON_VALGRIND, \ 6718 0, 0, 0, 0, 0) \ 6719 6720 6721 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr + 6722 _qzz_len - 1]. Useful if you are debugging a JITter or some such, 6723 since it provides a way to make sure valgrind will retranslate the 6724 invalidated area. Returns no value. */ 6725 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \ 6726 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \ 6727 _qzz_addr, _qzz_len, 0, 0, 0) 6728 6729 #define VALGRIND_INNER_THREADS(_qzz_addr) \ 6730 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \ 6731 _qzz_addr, 0, 0, 0, 0) 6732 6733 6734 /* These requests are for getting Valgrind itself to print something. 6735 Possibly with a backtrace. This is a really ugly hack. The return value 6736 is the number of characters printed, excluding the "**<pid>** " part at the 6737 start and the backtrace (if present). */ 6738 6739 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER) 6740 /* Modern GCC will optimize the static routine out if unused, 6741 and unused attribute will shut down warnings about it. */ 6742 static int VALGRIND_PRINTF(const char *format, ...) 6743 __attribute__((format(__printf__, 1, 2), __unused__)); 6744 #endif 6745 static int 6746 #if defined(_MSC_VER) 6747 __inline 6748 #endif 6749 VALGRIND_PRINTF(const char *format, ...) 6750 { 6751 #if defined(NVALGRIND) 6752 (void)format; 6753 return 0; 6754 #else /* NVALGRIND */ 6755 #if defined(_MSC_VER) || defined(__MINGW64__) 6756 uintptr_t _qzz_res; 6757 #else 6758 unsigned long _qzz_res; 6759 #endif 6760 va_list vargs; 6761 va_start(vargs, format); 6762 #if defined(_MSC_VER) || defined(__MINGW64__) 6763 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 6764 VG_USERREQ__PRINTF_VALIST_BY_REF, 6765 (uintptr_t)format, 6766 (uintptr_t)&vargs, 6767 0, 0, 0); 6768 #else 6769 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 6770 VG_USERREQ__PRINTF_VALIST_BY_REF, 6771 (unsigned long)format, 6772 (unsigned long)&vargs, 6773 0, 0, 0); 6774 #endif 6775 va_end(vargs); 6776 return (int)_qzz_res; 6777 #endif /* NVALGRIND */ 6778 } 6779 6780 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER) 6781 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...) 6782 __attribute__((format(__printf__, 1, 2), __unused__)); 6783 #endif 6784 static int 6785 #if defined(_MSC_VER) 6786 __inline 6787 #endif 6788 VALGRIND_PRINTF_BACKTRACE(const char *format, ...) 6789 { 6790 #if defined(NVALGRIND) 6791 (void)format; 6792 return 0; 6793 #else /* NVALGRIND */ 6794 #if defined(_MSC_VER) || defined(__MINGW64__) 6795 uintptr_t _qzz_res; 6796 #else 6797 unsigned long _qzz_res; 6798 #endif 6799 va_list vargs; 6800 va_start(vargs, format); 6801 #if defined(_MSC_VER) || defined(__MINGW64__) 6802 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 6803 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF, 6804 (uintptr_t)format, 6805 (uintptr_t)&vargs, 6806 0, 0, 0); 6807 #else 6808 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 6809 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF, 6810 (unsigned long)format, 6811 (unsigned long)&vargs, 6812 0, 0, 0); 6813 #endif 6814 va_end(vargs); 6815 return (int)_qzz_res; 6816 #endif /* NVALGRIND */ 6817 } 6818 6819 6820 /* These requests allow control to move from the simulated CPU to the 6821 real CPU, calling an arbitrary function. 6822 6823 Note that the current ThreadId is inserted as the first argument. 6824 So this call: 6825 6826 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2) 6827 6828 requires f to have this signature: 6829 6830 Word f(Word tid, Word arg1, Word arg2) 6831 6832 where "Word" is a word-sized type. 6833 6834 Note that these client requests are not entirely reliable. For example, 6835 if you call a function with them that subsequently calls printf(), 6836 there's a high chance Valgrind will crash. Generally, your prospects of 6837 these working are made higher if the called function does not refer to 6838 any global variables, and does not refer to any libc or other functions 6839 (printf et al). Any kind of entanglement with libc or dynamic linking is 6840 likely to have a bad outcome, for tricky reasons which we've grappled 6841 with a lot in the past. 6842 */ 6843 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \ 6844 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 6845 VG_USERREQ__CLIENT_CALL0, \ 6846 _qyy_fn, \ 6847 0, 0, 0, 0) 6848 6849 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \ 6850 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 6851 VG_USERREQ__CLIENT_CALL1, \ 6852 _qyy_fn, \ 6853 _qyy_arg1, 0, 0, 0) 6854 6855 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \ 6856 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 6857 VG_USERREQ__CLIENT_CALL2, \ 6858 _qyy_fn, \ 6859 _qyy_arg1, _qyy_arg2, 0, 0) 6860 6861 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \ 6862 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 6863 VG_USERREQ__CLIENT_CALL3, \ 6864 _qyy_fn, \ 6865 _qyy_arg1, _qyy_arg2, \ 6866 _qyy_arg3, 0) 6867 6868 6869 /* Counts the number of errors that have been recorded by a tool. Nb: 6870 the tool must record the errors with VG_(maybe_record_error)() or 6871 VG_(unique_error)() for them to be counted. */ 6872 #define VALGRIND_COUNT_ERRORS \ 6873 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 6874 0 /* default return */, \ 6875 VG_USERREQ__COUNT_ERRORS, \ 6876 0, 0, 0, 0, 0) 6877 6878 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing 6879 when heap blocks are allocated in order to give accurate results. This 6880 happens automatically for the standard allocator functions such as 6881 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete, 6882 delete[], etc. 6883 6884 But if your program uses a custom allocator, this doesn't automatically 6885 happen, and Valgrind will not do as well. For example, if you allocate 6886 superblocks with mmap() and then allocates chunks of the superblocks, all 6887 Valgrind's observations will be at the mmap() level and it won't know that 6888 the chunks should be considered separate entities. In Memcheck's case, 6889 that means you probably won't get heap block overrun detection (because 6890 there won't be redzones marked as unaddressable) and you definitely won't 6891 get any leak detection. 6892 6893 The following client requests allow a custom allocator to be annotated so 6894 that it can be handled accurately by Valgrind. 6895 6896 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated 6897 by a malloc()-like function. For Memcheck (an illustrative case), this 6898 does two things: 6899 6900 - It records that the block has been allocated. This means any addresses 6901 within the block mentioned in error messages will be 6902 identified as belonging to the block. It also means that if the block 6903 isn't freed it will be detected by the leak checker. 6904 6905 - It marks the block as being addressable and undefined (if 'is_zeroed' is 6906 not set), or addressable and defined (if 'is_zeroed' is set). This 6907 controls how accesses to the block by the program are handled. 6908 6909 'addr' is the start of the usable block (ie. after any 6910 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator 6911 can apply redzones -- these are blocks of padding at the start and end of 6912 each block. Adding redzones is recommended as it makes it much more likely 6913 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is 6914 zeroed (or filled with another predictable value), as is the case for 6915 calloc(). 6916 6917 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a 6918 heap block -- that will be used by the client program -- is allocated. 6919 It's best to put it at the outermost level of the allocator if possible; 6920 for example, if you have a function my_alloc() which calls 6921 internal_alloc(), and the client request is put inside internal_alloc(), 6922 stack traces relating to the heap block will contain entries for both 6923 my_alloc() and internal_alloc(), which is probably not what you want. 6924 6925 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out 6926 custom blocks from within a heap block, B, that has been allocated with 6927 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking 6928 -- the custom blocks will take precedence. 6929 6930 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For 6931 Memcheck, it does two things: 6932 6933 - It records that the block has been deallocated. This assumes that the 6934 block was annotated as having been allocated via 6935 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued. 6936 6937 - It marks the block as being unaddressable. 6938 6939 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a 6940 heap block is deallocated. 6941 6942 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For 6943 Memcheck, it does four things: 6944 6945 - It records that the size of a block has been changed. This assumes that 6946 the block was annotated as having been allocated via 6947 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued. 6948 6949 - If the block shrunk, it marks the freed memory as being unaddressable. 6950 6951 - If the block grew, it marks the new area as undefined and defines a red 6952 zone past the end of the new block. 6953 6954 - The V-bits of the overlap between the old and the new block are preserved. 6955 6956 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block 6957 and before deallocation of the old block. 6958 6959 In many cases, these three client requests will not be enough to get your 6960 allocator working well with Memcheck. More specifically, if your allocator 6961 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call 6962 will be necessary to mark the memory as addressable just before the zeroing 6963 occurs, otherwise you'll get a lot of invalid write errors. For example, 6964 you'll need to do this if your allocator recycles freed blocks, but it 6965 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK). 6966 Alternatively, if your allocator reuses freed blocks for allocator-internal 6967 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary. 6968 6969 Really, what's happening is a blurring of the lines between the client 6970 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the 6971 memory should be considered unaddressable to the client program, but the 6972 allocator knows more than the rest of the client program and so may be able 6973 to safely access it. Extra client requests are necessary for Valgrind to 6974 understand the distinction between the allocator and the rest of the 6975 program. 6976 6977 Ignored if addr == 0. 6978 */ 6979 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \ 6980 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \ 6981 addr, sizeB, rzB, is_zeroed, 0) 6982 6983 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details. 6984 Ignored if addr == 0. 6985 */ 6986 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \ 6987 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \ 6988 addr, oldSizeB, newSizeB, rzB, 0) 6989 6990 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details. 6991 Ignored if addr == 0. 6992 */ 6993 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \ 6994 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \ 6995 addr, rzB, 0, 0, 0) 6996 6997 /* Create a memory pool. */ 6998 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \ 6999 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \ 7000 pool, rzB, is_zeroed, 0, 0) 7001 7002 /* Create a memory pool with some flags specifying extended behaviour. 7003 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL. 7004 7005 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory 7006 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used 7007 by the application as superblocks to dole out MALLOC_LIKE blocks using 7008 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels" 7009 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC. 7010 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK. 7011 Note that the association between the pool and the second level blocks 7012 is implicit : second level blocks will be located inside first level 7013 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag 7014 for such 2 levels pools, as otherwise valgrind will detect overlapping 7015 memory blocks, and will abort execution (e.g. during leak search). 7016 7017 Such a meta pool can also be marked as an 'auto free' pool using the flag 7018 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the 7019 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE 7020 will automatically free the second level blocks that are contained 7021 inside the first level block freed with VALGRIND_MEMPOOL_FREE. 7022 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls 7023 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included 7024 in the first level block. 7025 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag 7026 without the VALGRIND_MEMPOOL_METAPOOL flag. 7027 */ 7028 #define VALGRIND_MEMPOOL_AUTO_FREE 1 7029 #define VALGRIND_MEMPOOL_METAPOOL 2 7030 #define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \ 7031 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \ 7032 pool, rzB, is_zeroed, flags, 0) 7033 7034 /* Destroy a memory pool. */ 7035 #define VALGRIND_DESTROY_MEMPOOL(pool) \ 7036 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \ 7037 pool, 0, 0, 0, 0) 7038 7039 /* Associate a piece of memory with a memory pool. */ 7040 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \ 7041 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \ 7042 pool, addr, size, 0, 0) 7043 7044 /* Disassociate a piece of memory from a memory pool. */ 7045 #define VALGRIND_MEMPOOL_FREE(pool, addr) \ 7046 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \ 7047 pool, addr, 0, 0, 0) 7048 7049 /* Disassociate any pieces outside a particular range. */ 7050 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \ 7051 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \ 7052 pool, addr, size, 0, 0) 7053 7054 /* Resize and/or move a piece associated with a memory pool. */ 7055 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \ 7056 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \ 7057 poolA, poolB, 0, 0, 0) 7058 7059 /* Resize and/or move a piece associated with a memory pool. */ 7060 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \ 7061 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \ 7062 pool, addrA, addrB, size, 0) 7063 7064 /* Return 1 if a mempool exists, else 0. */ 7065 #define VALGRIND_MEMPOOL_EXISTS(pool) \ 7066 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 7067 VG_USERREQ__MEMPOOL_EXISTS, \ 7068 pool, 0, 0, 0, 0) 7069 7070 /* Mark a piece of memory as being a stack. Returns a stack id. 7071 start is the lowest addressable stack byte, end is the highest 7072 addressable stack byte. */ 7073 #define VALGRIND_STACK_REGISTER(start, end) \ 7074 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 7075 VG_USERREQ__STACK_REGISTER, \ 7076 start, end, 0, 0, 0) 7077 7078 /* Unmark the piece of memory associated with a stack id as being a 7079 stack. */ 7080 #define VALGRIND_STACK_DEREGISTER(id) \ 7081 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \ 7082 id, 0, 0, 0, 0) 7083 7084 /* Change the start and end address of the stack id. 7085 start is the new lowest addressable stack byte, end is the new highest 7086 addressable stack byte. */ 7087 #define VALGRIND_STACK_CHANGE(id, start, end) \ 7088 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \ 7089 id, start, end, 0, 0) 7090 7091 /* Load PDB debug info for Wine PE image_map. */ 7092 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \ 7093 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \ 7094 fd, ptr, total_size, delta, 0) 7095 7096 /* Map a code address to a source file name and line number. buf64 7097 must point to a 64-byte buffer in the caller's address space. The 7098 result will be dumped in there and is guaranteed to be zero 7099 terminated. If no info is found, the first byte is set to zero. */ 7100 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \ 7101 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 7102 VG_USERREQ__MAP_IP_TO_SRCLOC, \ 7103 addr, buf64, 0, 0, 0) 7104 7105 /* Disable error reporting for this thread. Behaves in a stack like 7106 way, so you can safely call this multiple times provided that 7107 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times 7108 to re-enable reporting. The first call of this macro disables 7109 reporting. Subsequent calls have no effect except to increase the 7110 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable 7111 reporting. Child threads do not inherit this setting from their 7112 parents -- they are always created with reporting enabled. */ 7113 #define VALGRIND_DISABLE_ERROR_REPORTING \ 7114 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \ 7115 1, 0, 0, 0, 0) 7116 7117 /* Re-enable error reporting, as per comments on 7118 VALGRIND_DISABLE_ERROR_REPORTING. */ 7119 #define VALGRIND_ENABLE_ERROR_REPORTING \ 7120 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \ 7121 -1, 0, 0, 0, 0) 7122 7123 /* Execute a monitor command from the client program. 7124 If a connection is opened with GDB, the output will be sent 7125 according to the output mode set for vgdb. 7126 If no connection is opened, output will go to the log output. 7127 Returns 1 if command not recognised, 0 otherwise. */ 7128 #define VALGRIND_MONITOR_COMMAND(command) \ 7129 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \ 7130 command, 0, 0, 0, 0) 7131 7132 7133 /* Change the value of a dynamic command line option. 7134 Note that unknown or not dynamically changeable options 7135 will cause a warning message to be output. */ 7136 #define VALGRIND_CLO_CHANGE(option) \ 7137 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \ 7138 option, 0, 0, 0, 0) 7139 7140 7141 #undef PLAT_x86_darwin 7142 #undef PLAT_amd64_darwin 7143 #undef PLAT_x86_win32 7144 #undef PLAT_amd64_win64 7145 #undef PLAT_x86_linux 7146 #undef PLAT_amd64_linux 7147 #undef PLAT_ppc32_linux 7148 #undef PLAT_ppc64be_linux 7149 #undef PLAT_ppc64le_linux 7150 #undef PLAT_arm_linux 7151 #undef PLAT_s390x_linux 7152 #undef PLAT_mips32_linux 7153 #undef PLAT_mips64_linux 7154 #undef PLAT_nanomips_linux 7155 #undef PLAT_x86_solaris 7156 #undef PLAT_amd64_solaris 7157 7158 #endif /* __VALGRIND_H */ 7159