File indexing completed on 2024-05-12 05:25:55
0001 /* -*- c -*- 0002 ---------------------------------------------------------------- 0003 0004 Notice that the following BSD-style license applies to this one 0005 file (valgrind.h) only. The rest of Valgrind is licensed under the 0006 terms of the GNU General Public License, version 2, unless 0007 otherwise indicated. See the COPYING file in the source 0008 distribution for details. 0009 0010 ---------------------------------------------------------------- 0011 0012 This file is part of Valgrind, a dynamic binary instrumentation 0013 framework. 0014 0015 Copyright (C) 2000-2013 Julian Seward. All rights reserved. 0016 0017 Redistribution and use in source and binary forms, with or without 0018 modification, are permitted provided that the following conditions 0019 are met: 0020 0021 1. Redistributions of source code must retain the above copyright 0022 notice, this list of conditions and the following disclaimer. 0023 0024 2. The origin of this software must not be misrepresented; you must 0025 not claim that you wrote the original software. If you use this 0026 software in a product, an acknowledgment in the product 0027 documentation would be appreciated but is not required. 0028 0029 3. Altered source versions must be plainly marked as such, and must 0030 not be misrepresented as being the original software. 0031 0032 4. The name of the author may not be used to endorse or promote 0033 products derived from this software without specific prior written 0034 permission. 0035 0036 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS 0037 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 0038 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 0039 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY 0040 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 0041 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE 0042 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 0043 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 0044 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 0045 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 0046 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 0047 0048 ---------------------------------------------------------------- 0049 0050 Notice that the above BSD-style license applies to this one file 0051 (valgrind.h) only. The entire rest of Valgrind is licensed under 0052 the terms of the GNU General Public License, version 2. See the 0053 COPYING file in the source distribution for details. 0054 0055 ---------------------------------------------------------------- 0056 */ 0057 0058 0059 /* This file is for inclusion into client (your!) code. 0060 0061 You can use these macros to manipulate and query Valgrind's 0062 execution inside your own programs. 0063 0064 The resulting executables will still run without Valgrind, just a 0065 little bit more slowly than they otherwise would, but otherwise 0066 unchanged. When not running on valgrind, each client request 0067 consumes very few (eg. 7) instructions, so the resulting performance 0068 loss is negligible unless you plan to execute client requests 0069 millions of times per second. Nevertheless, if that is still a 0070 problem, you can compile with the NVALGRIND symbol defined (gcc 0071 -DNVALGRIND) so that client requests are not even compiled in. */ 0072 0073 #pragma clang diagnostic push 0074 #pragma clang diagnostic ignored "-Wreserved-id-macro" 0075 0076 #ifndef __VALGRIND_H 0077 #define __VALGRIND_H 0078 0079 /* ------------------------------------------------------------------ */ 0080 /* VERSION NUMBER OF VALGRIND */ 0081 /* ------------------------------------------------------------------ */ 0082 0083 /* Specify Valgrind's version number, so that user code can 0084 conditionally compile based on our version number. Note that these 0085 were introduced at version 3.6 and so do not exist in version 3.5 0086 or earlier. The recommended way to use them to check for "version 0087 X.Y or later" is (eg) 0088 0089 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \ 0090 && (__VALGRIND_MAJOR__ > 3 \ 0091 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6)) 0092 */ 0093 #define __VALGRIND_MAJOR__ 3 0094 #define __VALGRIND_MINOR__ 10 0095 0096 0097 #include <stdarg.h> 0098 #ifdef HAVE_STDINT_H 0099 #if HAVE_STDINT_H 0100 #include <stdint.h> 0101 #endif 0102 #endif 0103 0104 0105 /* Nb: this file might be included in a file compiled with -ansi. So 0106 we can't use C++ style "//" comments nor the "asm" keyword (instead 0107 use "__asm__"). */ 0108 0109 /* Derive some tags indicating what the target platform is. Note 0110 that in this file we're using the compiler's CPP symbols for 0111 identifying architectures, which are different to the ones we use 0112 within the rest of Valgrind. Note, __powerpc__ is active for both 0113 32 and 64-bit PPC, whereas __powerpc64__ is only active for the 0114 latter (on Linux, that is). 0115 0116 Misc note: how to find out what's predefined in gcc by default: 0117 gcc -Wp,-dM somefile.c 0118 */ 0119 #undef PLAT_x86_darwin 0120 #undef PLAT_amd64_darwin 0121 #undef PLAT_x86_win32 0122 #undef PLAT_amd64_win64 0123 #undef PLAT_x86_linux 0124 #undef PLAT_amd64_linux 0125 #undef PLAT_ppc32_linux 0126 #undef PLAT_ppc64_linux 0127 #undef PLAT_arm_linux 0128 #undef PLAT_arm64_linux 0129 #undef PLAT_s390x_linux 0130 #undef PLAT_mips32_linux 0131 #undef PLAT_mips64_linux 0132 0133 0134 #if defined(__APPLE__) && defined(__i386__) 0135 # define PLAT_x86_darwin 1 0136 #elif defined(__APPLE__) && defined(__x86_64__) 0137 # define PLAT_amd64_darwin 1 0138 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \ 0139 || defined(__CYGWIN32__) \ 0140 || (defined(_WIN32) && defined(_M_IX86)) 0141 # define PLAT_x86_win32 1 0142 #elif defined(__MINGW64__) \ 0143 || (defined(_WIN64) && defined(_M_X64)) 0144 # define PLAT_amd64_win64 1 0145 #elif defined(__linux__) && defined(__i386__) 0146 # define PLAT_x86_linux 1 0147 #elif defined(__linux__) && defined(__x86_64__) 0148 # define PLAT_amd64_linux 1 0149 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__) 0150 # define PLAT_ppc32_linux 1 0151 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) 0152 # define PLAT_ppc64_linux 1 0153 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__) 0154 # define PLAT_arm_linux 1 0155 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__) 0156 # define PLAT_arm64_linux 1 0157 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__) 0158 # define PLAT_s390x_linux 1 0159 #elif defined(__linux__) && defined(__mips__) && (__mips==64) 0160 # define PLAT_mips64_linux 1 0161 #elif defined(__linux__) && defined(__mips__) && (__mips!=64) 0162 # define PLAT_mips32_linux 1 0163 #else 0164 /* If we're not compiling for our target platform, don't generate 0165 any inline asms. */ 0166 # if !defined(NVALGRIND) 0167 # define NVALGRIND 1 0168 # endif 0169 #endif 0170 0171 /* XXX: Unfortunately x64 Visual C++ does not suport inline asms, 0172 * so disable the use of valgrind's inline asm's for x64 Visual C++ 0173 * builds, so that x64 Visual C++ builds of GLib can be maintained 0174 */ 0175 #if defined (PLAT_amd64_win64) && defined (_MSC_VER) 0176 # if !defined(NVALGRIND) 0177 # define NVALGRIND 1 0178 # endif 0179 #endif 0180 0181 0182 /* ------------------------------------------------------------------ */ 0183 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */ 0184 /* in here of use to end-users -- skip to the next section. */ 0185 /* ------------------------------------------------------------------ */ 0186 0187 /* 0188 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client 0189 * request. Accepts both pointers and integers as arguments. 0190 * 0191 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind 0192 * client request that does not return a value. 0193 0194 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind 0195 * client request and whose value equals the client request result. Accepts 0196 * both pointers and integers as arguments. Note that such calls are not 0197 * necessarily pure functions -- they may have side effects. 0198 */ 0199 0200 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \ 0201 _zzq_request, _zzq_arg1, _zzq_arg2, \ 0202 _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0203 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \ 0204 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \ 0205 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0) 0206 0207 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \ 0208 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0209 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 0210 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \ 0211 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0) 0212 0213 #if defined(NVALGRIND) 0214 0215 /* Define NVALGRIND to completely remove the Valgrind magic sequence 0216 from the compiled code (analogous to NDEBUG's effects on 0217 assert()) */ 0218 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0219 _zzq_default, _zzq_request, \ 0220 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0221 (_zzq_default) 0222 0223 #else /* ! NVALGRIND */ 0224 0225 /* The following defines the magic code sequences which the JITter 0226 spots and handles magically. Don't look too closely at them as 0227 they will rot your brain. 0228 0229 The assembly code sequences for all architectures is in this one 0230 file. This is because this file must be stand-alone, and we don't 0231 want to have multiple files. 0232 0233 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default 0234 value gets put in the return slot, so that everything works when 0235 this is executed not under Valgrind. Args are passed in a memory 0236 block, and so there's no intrinsic limit to the number that could 0237 be passed, but it's currently five. 0238 0239 The macro args are: 0240 _zzq_rlval result lvalue 0241 _zzq_default default value (result returned when running on real CPU) 0242 _zzq_request request code 0243 _zzq_arg1..5 request params 0244 0245 The other two macros are used to support function wrapping, and are 0246 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the 0247 guest's NRADDR pseudo-register and whatever other information is 0248 needed to safely run the call original from the wrapper: on 0249 ppc64-linux, the R2 value at the divert point is also needed. This 0250 information is abstracted into a user-visible type, OrigFn. 0251 0252 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the 0253 guest, but guarantees that the branch instruction will not be 0254 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64: 0255 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a 0256 complete inline asm, since it needs to be combined with more magic 0257 inline asm stuff to be useful. 0258 */ 0259 0260 /* ------------------------- x86-{linux,darwin} ---------------- */ 0261 0262 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \ 0263 || (defined(PLAT_x86_win32) && defined(__GNUC__)) 0264 0265 typedef 0266 struct { 0267 unsigned int nraddr; /* where's the code? */ 0268 } 0269 OrigFn; 0270 0271 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 0272 "roll $3, %%edi ; roll $13, %%edi\n\t" \ 0273 "roll $29, %%edi ; roll $19, %%edi\n\t" 0274 0275 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0276 _zzq_default, _zzq_request, \ 0277 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0278 __extension__ \ 0279 ({volatile unsigned int _zzq_args[6]; \ 0280 volatile unsigned int _zzq_result; \ 0281 _zzq_args[0] = (unsigned int)(_zzq_request); \ 0282 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 0283 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 0284 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 0285 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 0286 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 0287 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0288 /* %EDX = client_request ( %EAX ) */ \ 0289 "xchgl %%ebx,%%ebx" \ 0290 : "=d" (_zzq_result) \ 0291 : "a" (&_zzq_args[0]), "0" (_zzq_default) \ 0292 : "cc", "memory" \ 0293 ); \ 0294 _zzq_result; \ 0295 }) 0296 0297 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 0298 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 0299 volatile unsigned int __addr; \ 0300 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0301 /* %EAX = guest_NRADDR */ \ 0302 "xchgl %%ecx,%%ecx" \ 0303 : "=a" (__addr) \ 0304 : \ 0305 : "cc", "memory" \ 0306 ); \ 0307 _zzq_orig->nraddr = __addr; \ 0308 } 0309 0310 #define VALGRIND_CALL_NOREDIR_EAX \ 0311 __SPECIAL_INSTRUCTION_PREAMBLE \ 0312 /* call-noredir *%EAX */ \ 0313 "xchgl %%edx,%%edx\n\t" 0314 0315 #define VALGRIND_VEX_INJECT_IR() \ 0316 do { \ 0317 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0318 "xchgl %%edi,%%edi\n\t" \ 0319 : : : "cc", "memory" \ 0320 ); \ 0321 } while (0) 0322 0323 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */ 0324 0325 /* ------------------------- x86-Win32 ------------------------- */ 0326 0327 #if defined(PLAT_x86_win32) && !defined(__GNUC__) 0328 0329 typedef 0330 struct { 0331 unsigned int nraddr; /* where's the code? */ 0332 } 0333 OrigFn; 0334 0335 #if defined(_MSC_VER) 0336 0337 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 0338 __asm rol edi, 3 __asm rol edi, 13 \ 0339 __asm rol edi, 29 __asm rol edi, 19 0340 0341 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0342 _zzq_default, _zzq_request, \ 0343 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0344 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \ 0345 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \ 0346 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \ 0347 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5)) 0348 0349 static __inline uintptr_t 0350 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request, 0351 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2, 0352 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4, 0353 uintptr_t _zzq_arg5) 0354 { 0355 volatile uintptr_t _zzq_args[6]; 0356 volatile unsigned int _zzq_result; 0357 _zzq_args[0] = (uintptr_t)(_zzq_request); 0358 _zzq_args[1] = (uintptr_t)(_zzq_arg1); 0359 _zzq_args[2] = (uintptr_t)(_zzq_arg2); 0360 _zzq_args[3] = (uintptr_t)(_zzq_arg3); 0361 _zzq_args[4] = (uintptr_t)(_zzq_arg4); 0362 _zzq_args[5] = (uintptr_t)(_zzq_arg5); 0363 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default 0364 __SPECIAL_INSTRUCTION_PREAMBLE 0365 /* %EDX = client_request ( %EAX ) */ 0366 __asm xchg ebx,ebx 0367 __asm mov _zzq_result, edx 0368 } 0369 return _zzq_result; 0370 } 0371 0372 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 0373 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 0374 volatile unsigned int __addr; \ 0375 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \ 0376 /* %EAX = guest_NRADDR */ \ 0377 __asm xchg ecx,ecx \ 0378 __asm mov __addr, eax \ 0379 } \ 0380 _zzq_orig->nraddr = __addr; \ 0381 } 0382 0383 #define VALGRIND_CALL_NOREDIR_EAX ERROR 0384 0385 #define VALGRIND_VEX_INJECT_IR() \ 0386 do { \ 0387 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \ 0388 __asm xchg edi,edi \ 0389 } \ 0390 } while (0) 0391 0392 #else 0393 #error Unsupported compiler. 0394 #endif 0395 0396 #endif /* PLAT_x86_win32 */ 0397 0398 /* ------------------------ amd64-{linux,darwin} --------------- */ 0399 0400 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \ 0401 || (defined(PLAT_amd64_win64) && defined(__GNUC__)) 0402 0403 typedef 0404 struct { 0405 unsigned long long int nraddr; /* where's the code? */ 0406 } 0407 OrigFn; 0408 0409 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 0410 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \ 0411 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t" 0412 0413 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0414 _zzq_default, _zzq_request, \ 0415 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0416 __extension__ \ 0417 ({ volatile unsigned long long int _zzq_args[6]; \ 0418 volatile unsigned long long int _zzq_result; \ 0419 _zzq_args[0] = (unsigned long long int)(_zzq_request); \ 0420 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \ 0421 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \ 0422 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \ 0423 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \ 0424 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \ 0425 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0426 /* %RDX = client_request ( %RAX ) */ \ 0427 "xchgq %%rbx,%%rbx" \ 0428 : "=d" (_zzq_result) \ 0429 : "a" (&_zzq_args[0]), "0" (_zzq_default) \ 0430 : "cc", "memory" \ 0431 ); \ 0432 _zzq_result; \ 0433 }) 0434 0435 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 0436 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 0437 volatile unsigned long long int __addr; \ 0438 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0439 /* %RAX = guest_NRADDR */ \ 0440 "xchgq %%rcx,%%rcx" \ 0441 : "=a" (__addr) \ 0442 : \ 0443 : "cc", "memory" \ 0444 ); \ 0445 _zzq_orig->nraddr = __addr; \ 0446 } 0447 0448 #define VALGRIND_CALL_NOREDIR_RAX \ 0449 __SPECIAL_INSTRUCTION_PREAMBLE \ 0450 /* call-noredir *%RAX */ \ 0451 "xchgq %%rdx,%%rdx\n\t" 0452 0453 #define VALGRIND_VEX_INJECT_IR() \ 0454 do { \ 0455 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0456 "xchgq %%rdi,%%rdi\n\t" \ 0457 : : : "cc", "memory" \ 0458 ); \ 0459 } while (0) 0460 0461 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */ 0462 0463 /* ------------------------- amd64-Win64 ------------------------- */ 0464 0465 #if defined(PLAT_amd64_win64) && !defined(__GNUC__) 0466 0467 #error Unsupported compiler. 0468 0469 #endif /* PLAT_amd64_win64 */ 0470 0471 /* ------------------------ ppc32-linux ------------------------ */ 0472 0473 #if defined(PLAT_ppc32_linux) 0474 0475 typedef 0476 struct { 0477 unsigned int nraddr; /* where's the code? */ 0478 } 0479 OrigFn; 0480 0481 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 0482 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \ 0483 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t" 0484 0485 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0486 _zzq_default, _zzq_request, \ 0487 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0488 \ 0489 __extension__ \ 0490 ({ unsigned int _zzq_args[6]; \ 0491 unsigned int _zzq_result; \ 0492 unsigned int* _zzq_ptr; \ 0493 _zzq_args[0] = (unsigned int)(_zzq_request); \ 0494 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 0495 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 0496 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 0497 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 0498 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 0499 _zzq_ptr = _zzq_args; \ 0500 __asm__ volatile("mr 3,%1\n\t" /*default*/ \ 0501 "mr 4,%2\n\t" /*ptr*/ \ 0502 __SPECIAL_INSTRUCTION_PREAMBLE \ 0503 /* %R3 = client_request ( %R4 ) */ \ 0504 "or 1,1,1\n\t" \ 0505 "mr %0,3" /*result*/ \ 0506 : "=b" (_zzq_result) \ 0507 : "b" (_zzq_default), "b" (_zzq_ptr) \ 0508 : "cc", "memory", "r3", "r4"); \ 0509 _zzq_result; \ 0510 }) 0511 0512 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 0513 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 0514 unsigned int __addr; \ 0515 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0516 /* %R3 = guest_NRADDR */ \ 0517 "or 2,2,2\n\t" \ 0518 "mr %0,3" \ 0519 : "=b" (__addr) \ 0520 : \ 0521 : "cc", "memory", "r3" \ 0522 ); \ 0523 _zzq_orig->nraddr = __addr; \ 0524 } 0525 0526 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 0527 __SPECIAL_INSTRUCTION_PREAMBLE \ 0528 /* branch-and-link-to-noredir *%R11 */ \ 0529 "or 3,3,3\n\t" 0530 0531 #define VALGRIND_VEX_INJECT_IR() \ 0532 do { \ 0533 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0534 "or 5,5,5\n\t" \ 0535 ); \ 0536 } while (0) 0537 0538 #endif /* PLAT_ppc32_linux */ 0539 0540 /* ------------------------ ppc64-linux ------------------------ */ 0541 0542 #if defined(PLAT_ppc64_linux) 0543 0544 typedef 0545 struct { 0546 unsigned long long int nraddr; /* where's the code? */ 0547 unsigned long long int r2; /* what tocptr do we need? */ 0548 } 0549 OrigFn; 0550 0551 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 0552 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \ 0553 "rotldi 0,0,61 ; rotldi 0,0,51\n\t" 0554 0555 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0556 _zzq_default, _zzq_request, \ 0557 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0558 \ 0559 __extension__ \ 0560 ({ unsigned long long int _zzq_args[6]; \ 0561 unsigned long long int _zzq_result; \ 0562 unsigned long long int* _zzq_ptr; \ 0563 _zzq_args[0] = (unsigned long long int)(_zzq_request); \ 0564 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \ 0565 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \ 0566 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \ 0567 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \ 0568 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \ 0569 _zzq_ptr = _zzq_args; \ 0570 __asm__ volatile("mr 3,%1\n\t" /*default*/ \ 0571 "mr 4,%2\n\t" /*ptr*/ \ 0572 __SPECIAL_INSTRUCTION_PREAMBLE \ 0573 /* %R3 = client_request ( %R4 ) */ \ 0574 "or 1,1,1\n\t" \ 0575 "mr %0,3" /*result*/ \ 0576 : "=b" (_zzq_result) \ 0577 : "b" (_zzq_default), "b" (_zzq_ptr) \ 0578 : "cc", "memory", "r3", "r4"); \ 0579 _zzq_result; \ 0580 }) 0581 0582 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 0583 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 0584 unsigned long long int __addr; \ 0585 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0586 /* %R3 = guest_NRADDR */ \ 0587 "or 2,2,2\n\t" \ 0588 "mr %0,3" \ 0589 : "=b" (__addr) \ 0590 : \ 0591 : "cc", "memory", "r3" \ 0592 ); \ 0593 _zzq_orig->nraddr = __addr; \ 0594 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0595 /* %R3 = guest_NRADDR_GPR2 */ \ 0596 "or 4,4,4\n\t" \ 0597 "mr %0,3" \ 0598 : "=b" (__addr) \ 0599 : \ 0600 : "cc", "memory", "r3" \ 0601 ); \ 0602 _zzq_orig->r2 = __addr; \ 0603 } 0604 0605 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 0606 __SPECIAL_INSTRUCTION_PREAMBLE \ 0607 /* branch-and-link-to-noredir *%R11 */ \ 0608 "or 3,3,3\n\t" 0609 0610 #define VALGRIND_VEX_INJECT_IR() \ 0611 do { \ 0612 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0613 "or 5,5,5\n\t" \ 0614 ); \ 0615 } while (0) 0616 0617 #endif /* PLAT_ppc64_linux */ 0618 0619 /* ------------------------- arm-linux ------------------------- */ 0620 0621 #if defined(PLAT_arm_linux) 0622 0623 typedef 0624 struct { 0625 unsigned int nraddr; /* where's the code? */ 0626 } 0627 OrigFn; 0628 0629 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 0630 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \ 0631 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t" 0632 0633 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0634 _zzq_default, _zzq_request, \ 0635 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0636 \ 0637 __extension__ \ 0638 ({volatile unsigned int _zzq_args[6]; \ 0639 volatile unsigned int _zzq_result; \ 0640 _zzq_args[0] = (unsigned int)(_zzq_request); \ 0641 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 0642 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 0643 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 0644 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 0645 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 0646 __asm__ volatile("mov r3, %1\n\t" /*default*/ \ 0647 "mov r4, %2\n\t" /*ptr*/ \ 0648 __SPECIAL_INSTRUCTION_PREAMBLE \ 0649 /* R3 = client_request ( R4 ) */ \ 0650 "orr r10, r10, r10\n\t" \ 0651 "mov %0, r3" /*result*/ \ 0652 : "=r" (_zzq_result) \ 0653 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 0654 : "cc","memory", "r3", "r4"); \ 0655 _zzq_result; \ 0656 }) 0657 0658 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 0659 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 0660 unsigned int __addr; \ 0661 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0662 /* R3 = guest_NRADDR */ \ 0663 "orr r11, r11, r11\n\t" \ 0664 "mov %0, r3" \ 0665 : "=r" (__addr) \ 0666 : \ 0667 : "cc", "memory", "r3" \ 0668 ); \ 0669 _zzq_orig->nraddr = __addr; \ 0670 } 0671 0672 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 0673 __SPECIAL_INSTRUCTION_PREAMBLE \ 0674 /* branch-and-link-to-noredir *%R4 */ \ 0675 "orr r12, r12, r12\n\t" 0676 0677 #define VALGRIND_VEX_INJECT_IR() \ 0678 do { \ 0679 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0680 "orr r9, r9, r9\n\t" \ 0681 : : : "cc", "memory" \ 0682 ); \ 0683 } while (0) 0684 0685 #endif /* PLAT_arm_linux */ 0686 0687 /* ------------------------ arm64-linux ------------------------- */ 0688 0689 #if defined(PLAT_arm64_linux) 0690 0691 typedef 0692 struct { 0693 unsigned long long int nraddr; /* where's the code? */ 0694 } 0695 OrigFn; 0696 0697 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 0698 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \ 0699 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t" 0700 0701 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0702 _zzq_default, _zzq_request, \ 0703 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0704 \ 0705 __extension__ \ 0706 ({volatile unsigned long long int _zzq_args[6]; \ 0707 volatile unsigned long long int _zzq_result; \ 0708 _zzq_args[0] = (unsigned long long int)(_zzq_request); \ 0709 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \ 0710 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \ 0711 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \ 0712 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \ 0713 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \ 0714 __asm__ volatile("mov x3, %1\n\t" /*default*/ \ 0715 "mov x4, %2\n\t" /*ptr*/ \ 0716 __SPECIAL_INSTRUCTION_PREAMBLE \ 0717 /* X3 = client_request ( X4 ) */ \ 0718 "orr x10, x10, x10\n\t" \ 0719 "mov %0, x3" /*result*/ \ 0720 : "=r" (_zzq_result) \ 0721 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 0722 : "cc","memory", "x3", "x4"); \ 0723 _zzq_result; \ 0724 }) 0725 0726 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 0727 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 0728 unsigned long long int __addr; \ 0729 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0730 /* X3 = guest_NRADDR */ \ 0731 "orr x11, x11, x11\n\t" \ 0732 "mov %0, x3" \ 0733 : "=r" (__addr) \ 0734 : \ 0735 : "cc", "memory", "x3" \ 0736 ); \ 0737 _zzq_orig->nraddr = __addr; \ 0738 } 0739 0740 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 0741 __SPECIAL_INSTRUCTION_PREAMBLE \ 0742 /* branch-and-link-to-noredir X8 */ \ 0743 "orr x12, x12, x12\n\t" 0744 0745 #define VALGRIND_VEX_INJECT_IR() \ 0746 do { \ 0747 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0748 "orr x9, x9, x9\n\t" \ 0749 : : : "cc", "memory" \ 0750 ); \ 0751 } while (0) 0752 0753 #endif /* PLAT_arm64_linux */ 0754 0755 /* ------------------------ s390x-linux ------------------------ */ 0756 0757 #if defined(PLAT_s390x_linux) 0758 0759 typedef 0760 struct { 0761 unsigned long long int nraddr; /* where's the code? */ 0762 } 0763 OrigFn; 0764 0765 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific 0766 * code. This detection is implemented in platform specific toIR.c 0767 * (e.g. VEX/priv/guest_s390_decoder.c). 0768 */ 0769 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 0770 "lr 15,15\n\t" \ 0771 "lr 1,1\n\t" \ 0772 "lr 2,2\n\t" \ 0773 "lr 3,3\n\t" 0774 0775 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t" 0776 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t" 0777 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t" 0778 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t" 0779 0780 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0781 _zzq_default, _zzq_request, \ 0782 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0783 __extension__ \ 0784 ({volatile unsigned long long int _zzq_args[6]; \ 0785 volatile unsigned long long int _zzq_result; \ 0786 _zzq_args[0] = (unsigned long long int)(_zzq_request); \ 0787 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \ 0788 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \ 0789 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \ 0790 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \ 0791 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \ 0792 __asm__ volatile(/* r2 = args */ \ 0793 "lgr 2,%1\n\t" \ 0794 /* r3 = default */ \ 0795 "lgr 3,%2\n\t" \ 0796 __SPECIAL_INSTRUCTION_PREAMBLE \ 0797 __CLIENT_REQUEST_CODE \ 0798 /* results = r3 */ \ 0799 "lgr %0, 3\n\t" \ 0800 : "=d" (_zzq_result) \ 0801 : "a" (&_zzq_args[0]), "0" (_zzq_default) \ 0802 : "cc", "2", "3", "memory" \ 0803 ); \ 0804 _zzq_result; \ 0805 }) 0806 0807 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 0808 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 0809 volatile unsigned long long int __addr; \ 0810 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0811 __GET_NR_CONTEXT_CODE \ 0812 "lgr %0, 3\n\t" \ 0813 : "=a" (__addr) \ 0814 : \ 0815 : "cc", "3", "memory" \ 0816 ); \ 0817 _zzq_orig->nraddr = __addr; \ 0818 } 0819 0820 #define VALGRIND_CALL_NOREDIR_R1 \ 0821 __SPECIAL_INSTRUCTION_PREAMBLE \ 0822 __CALL_NO_REDIR_CODE 0823 0824 #define VALGRIND_VEX_INJECT_IR() \ 0825 do { \ 0826 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0827 __VEX_INJECT_IR_CODE); \ 0828 } while (0) 0829 0830 #endif /* PLAT_s390x_linux */ 0831 0832 /* ------------------------- mips32-linux ---------------- */ 0833 0834 #if defined(PLAT_mips32_linux) 0835 0836 typedef 0837 struct { 0838 unsigned int nraddr; /* where's the code? */ 0839 } 0840 OrigFn; 0841 0842 /* .word 0x342 0843 * .word 0x742 0844 * .word 0xC2 0845 * .word 0x4C2*/ 0846 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 0847 "srl $0, $0, 13\n\t" \ 0848 "srl $0, $0, 29\n\t" \ 0849 "srl $0, $0, 3\n\t" \ 0850 "srl $0, $0, 19\n\t" 0851 0852 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0853 _zzq_default, _zzq_request, \ 0854 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0855 __extension__ \ 0856 ({ volatile unsigned int _zzq_args[6]; \ 0857 volatile unsigned int _zzq_result; \ 0858 _zzq_args[0] = (unsigned int)(_zzq_request); \ 0859 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 0860 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 0861 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 0862 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 0863 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 0864 __asm__ volatile("move $11, %1\n\t" /*default*/ \ 0865 "move $12, %2\n\t" /*ptr*/ \ 0866 __SPECIAL_INSTRUCTION_PREAMBLE \ 0867 /* T3 = client_request ( T4 ) */ \ 0868 "or $13, $13, $13\n\t" \ 0869 "move %0, $11\n\t" /*result*/ \ 0870 : "=r" (_zzq_result) \ 0871 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 0872 : "$11", "$12"); \ 0873 _zzq_result; \ 0874 }) 0875 0876 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 0877 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 0878 volatile unsigned int __addr; \ 0879 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0880 /* %t9 = guest_NRADDR */ \ 0881 "or $14, $14, $14\n\t" \ 0882 "move %0, $11" /*result*/ \ 0883 : "=r" (__addr) \ 0884 : \ 0885 : "$11" \ 0886 ); \ 0887 _zzq_orig->nraddr = __addr; \ 0888 } 0889 0890 #define VALGRIND_CALL_NOREDIR_T9 \ 0891 __SPECIAL_INSTRUCTION_PREAMBLE \ 0892 /* call-noredir *%t9 */ \ 0893 "or $15, $15, $15\n\t" 0894 0895 #define VALGRIND_VEX_INJECT_IR() \ 0896 do { \ 0897 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0898 "or $11, $11, $11\n\t" \ 0899 ); \ 0900 } while (0) 0901 0902 0903 #endif /* PLAT_mips32_linux */ 0904 0905 /* ------------------------- mips64-linux ---------------- */ 0906 0907 #if defined(PLAT_mips64_linux) 0908 0909 typedef 0910 struct { 0911 unsigned long long nraddr; /* where's the code? */ 0912 } 0913 OrigFn; 0914 0915 /* dsll $0,$0, 3 0916 * dsll $0,$0, 13 0917 * dsll $0,$0, 29 0918 * dsll $0,$0, 19*/ 0919 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 0920 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \ 0921 "dsll $0,$0,29 ; dsll $0,$0,19\n\t" 0922 0923 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 0924 _zzq_default, _zzq_request, \ 0925 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 0926 __extension__ \ 0927 ({ volatile unsigned long long int _zzq_args[6]; \ 0928 volatile unsigned long long int _zzq_result; \ 0929 _zzq_args[0] = (unsigned long long int)(_zzq_request); \ 0930 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \ 0931 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \ 0932 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \ 0933 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \ 0934 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \ 0935 __asm__ volatile("move $11, %1\n\t" /*default*/ \ 0936 "move $12, %2\n\t" /*ptr*/ \ 0937 __SPECIAL_INSTRUCTION_PREAMBLE \ 0938 /* $11 = client_request ( $12 ) */ \ 0939 "or $13, $13, $13\n\t" \ 0940 "move %0, $11\n\t" /*result*/ \ 0941 : "=r" (_zzq_result) \ 0942 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 0943 : "$11", "$12"); \ 0944 _zzq_result; \ 0945 }) 0946 0947 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 0948 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 0949 volatile unsigned long long int __addr; \ 0950 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0951 /* $11 = guest_NRADDR */ \ 0952 "or $14, $14, $14\n\t" \ 0953 "move %0, $11" /*result*/ \ 0954 : "=r" (__addr) \ 0955 : \ 0956 : "$11"); \ 0957 _zzq_orig->nraddr = __addr; \ 0958 } 0959 0960 #define VALGRIND_CALL_NOREDIR_T9 \ 0961 __SPECIAL_INSTRUCTION_PREAMBLE \ 0962 /* call-noredir $25 */ \ 0963 "or $15, $15, $15\n\t" 0964 0965 #define VALGRIND_VEX_INJECT_IR() \ 0966 do { \ 0967 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 0968 "or $11, $11, $11\n\t" \ 0969 ); \ 0970 } while (0) 0971 0972 #endif /* PLAT_mips64_linux */ 0973 0974 /* Insert assembly code for other platforms here... */ 0975 0976 #endif /* NVALGRIND */ 0977 0978 0979 /* ------------------------------------------------------------------ */ 0980 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */ 0981 /* ugly. It's the least-worst tradeoff I can think of. */ 0982 /* ------------------------------------------------------------------ */ 0983 0984 /* This section defines magic (a.k.a appalling-hack) macros for doing 0985 guaranteed-no-redirection macros, so as to get from function 0986 wrappers to the functions they are wrapping. The whole point is to 0987 construct standard call sequences, but to do the call itself with a 0988 special no-redirect call pseudo-instruction that the JIT 0989 understands and handles specially. This section is long and 0990 repetitious, and I can't see a way to make it shorter. 0991 0992 The naming scheme is as follows: 0993 0994 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc} 0995 0996 'W' stands for "word" and 'v' for "void". Hence there are 0997 different macros for calling arity 0, 1, 2, 3, 4, etc, functions, 0998 and for each, the possibility of returning a word-typed result, or 0999 no result. 1000 */ 1001 1002 /* Use these to write the name of your wrapper. NOTE: duplicates 1003 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts 1004 the default behaviour equivalance class tag "0000" into the name. 1005 See pub_tool_redir.h for details -- normally you don't need to 1006 think about this, though. */ 1007 1008 /* Use an extra level of macroisation so as to ensure the soname/fnname 1009 args are fully macro-expanded before pasting them together. */ 1010 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd 1011 1012 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \ 1013 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname) 1014 1015 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \ 1016 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname) 1017 1018 /* Use this macro from within a wrapper function to collect the 1019 context (address and possibly other info) of the original function. 1020 Once you have that you can then use it in one of the CALL_FN_ 1021 macros. The type of the argument _lval is OrigFn. */ 1022 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval) 1023 1024 /* Also provide end-user facilities for function replacement, rather 1025 than wrapping. A replacement function differs from a wrapper in 1026 that it has no way to get hold of the original function being 1027 called, and hence no way to call onwards to it. In a replacement 1028 function, VALGRIND_GET_ORIG_FN always returns zero. */ 1029 1030 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \ 1031 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname) 1032 1033 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \ 1034 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname) 1035 1036 /* Derivatives of the main macros below, for calling functions 1037 returning void. */ 1038 1039 #define CALL_FN_v_v(fnptr) \ 1040 do { volatile unsigned long _junk; \ 1041 CALL_FN_W_v(_junk,fnptr); } while (0) 1042 1043 #define CALL_FN_v_W(fnptr, arg1) \ 1044 do { volatile unsigned long _junk; \ 1045 CALL_FN_W_W(_junk,fnptr,arg1); } while (0) 1046 1047 #define CALL_FN_v_WW(fnptr, arg1,arg2) \ 1048 do { volatile unsigned long _junk; \ 1049 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0) 1050 1051 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \ 1052 do { volatile unsigned long _junk; \ 1053 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0) 1054 1055 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \ 1056 do { volatile unsigned long _junk; \ 1057 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0) 1058 1059 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \ 1060 do { volatile unsigned long _junk; \ 1061 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0) 1062 1063 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \ 1064 do { volatile unsigned long _junk; \ 1065 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0) 1066 1067 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \ 1068 do { volatile unsigned long _junk; \ 1069 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0) 1070 1071 /* ------------------------- x86-{linux,darwin} ---------------- */ 1072 1073 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) 1074 1075 /* These regs are trashed by the hidden call. No need to mention eax 1076 as gcc can already see that, plus causes gcc to bomb. */ 1077 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx" 1078 1079 /* Macros to save and align the stack before making a function 1080 call and restore it afterwards as gcc may not keep the stack 1081 pointer aligned if it doesn't realise calls are being made 1082 to other functions. */ 1083 1084 #define VALGRIND_ALIGN_STACK \ 1085 "movl %%esp,%%edi\n\t" \ 1086 "andl $0xfffffff0,%%esp\n\t" 1087 #define VALGRIND_RESTORE_STACK \ 1088 "movl %%edi,%%esp\n\t" 1089 1090 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned 1091 long) == 4. */ 1092 1093 #define CALL_FN_W_v(lval, orig) \ 1094 do { \ 1095 volatile OrigFn _orig = (orig); \ 1096 volatile unsigned long _argvec[1]; \ 1097 volatile unsigned long _res; \ 1098 _argvec[0] = (unsigned long)_orig.nraddr; \ 1099 __asm__ volatile( \ 1100 VALGRIND_ALIGN_STACK \ 1101 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1102 VALGRIND_CALL_NOREDIR_EAX \ 1103 VALGRIND_RESTORE_STACK \ 1104 : /*out*/ "=a" (_res) \ 1105 : /*in*/ "a" (&_argvec[0]) \ 1106 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1107 ); \ 1108 lval = (__typeof__(lval)) _res; \ 1109 } while (0) 1110 1111 #define CALL_FN_W_W(lval, orig, arg1) \ 1112 do { \ 1113 volatile OrigFn _orig = (orig); \ 1114 volatile unsigned long _argvec[2]; \ 1115 volatile unsigned long _res; \ 1116 _argvec[0] = (unsigned long)_orig.nraddr; \ 1117 _argvec[1] = (unsigned long)(arg1); \ 1118 __asm__ volatile( \ 1119 VALGRIND_ALIGN_STACK \ 1120 "subl $12, %%esp\n\t" \ 1121 "pushl 4(%%eax)\n\t" \ 1122 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1123 VALGRIND_CALL_NOREDIR_EAX \ 1124 VALGRIND_RESTORE_STACK \ 1125 : /*out*/ "=a" (_res) \ 1126 : /*in*/ "a" (&_argvec[0]) \ 1127 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1128 ); \ 1129 lval = (__typeof__(lval)) _res; \ 1130 } while (0) 1131 1132 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 1133 do { \ 1134 volatile OrigFn _orig = (orig); \ 1135 volatile unsigned long _argvec[3]; \ 1136 volatile unsigned long _res; \ 1137 _argvec[0] = (unsigned long)_orig.nraddr; \ 1138 _argvec[1] = (unsigned long)(arg1); \ 1139 _argvec[2] = (unsigned long)(arg2); \ 1140 __asm__ volatile( \ 1141 VALGRIND_ALIGN_STACK \ 1142 "subl $8, %%esp\n\t" \ 1143 "pushl 8(%%eax)\n\t" \ 1144 "pushl 4(%%eax)\n\t" \ 1145 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1146 VALGRIND_CALL_NOREDIR_EAX \ 1147 VALGRIND_RESTORE_STACK \ 1148 : /*out*/ "=a" (_res) \ 1149 : /*in*/ "a" (&_argvec[0]) \ 1150 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1151 ); \ 1152 lval = (__typeof__(lval)) _res; \ 1153 } while (0) 1154 1155 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 1156 do { \ 1157 volatile OrigFn _orig = (orig); \ 1158 volatile unsigned long _argvec[4]; \ 1159 volatile unsigned long _res; \ 1160 _argvec[0] = (unsigned long)_orig.nraddr; \ 1161 _argvec[1] = (unsigned long)(arg1); \ 1162 _argvec[2] = (unsigned long)(arg2); \ 1163 _argvec[3] = (unsigned long)(arg3); \ 1164 __asm__ volatile( \ 1165 VALGRIND_ALIGN_STACK \ 1166 "subl $4, %%esp\n\t" \ 1167 "pushl 12(%%eax)\n\t" \ 1168 "pushl 8(%%eax)\n\t" \ 1169 "pushl 4(%%eax)\n\t" \ 1170 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1171 VALGRIND_CALL_NOREDIR_EAX \ 1172 VALGRIND_RESTORE_STACK \ 1173 : /*out*/ "=a" (_res) \ 1174 : /*in*/ "a" (&_argvec[0]) \ 1175 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1176 ); \ 1177 lval = (__typeof__(lval)) _res; \ 1178 } while (0) 1179 1180 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 1181 do { \ 1182 volatile OrigFn _orig = (orig); \ 1183 volatile unsigned long _argvec[5]; \ 1184 volatile unsigned long _res; \ 1185 _argvec[0] = (unsigned long)_orig.nraddr; \ 1186 _argvec[1] = (unsigned long)(arg1); \ 1187 _argvec[2] = (unsigned long)(arg2); \ 1188 _argvec[3] = (unsigned long)(arg3); \ 1189 _argvec[4] = (unsigned long)(arg4); \ 1190 __asm__ volatile( \ 1191 VALGRIND_ALIGN_STACK \ 1192 "pushl 16(%%eax)\n\t" \ 1193 "pushl 12(%%eax)\n\t" \ 1194 "pushl 8(%%eax)\n\t" \ 1195 "pushl 4(%%eax)\n\t" \ 1196 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1197 VALGRIND_CALL_NOREDIR_EAX \ 1198 VALGRIND_RESTORE_STACK \ 1199 : /*out*/ "=a" (_res) \ 1200 : /*in*/ "a" (&_argvec[0]) \ 1201 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1202 ); \ 1203 lval = (__typeof__(lval)) _res; \ 1204 } while (0) 1205 1206 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 1207 do { \ 1208 volatile OrigFn _orig = (orig); \ 1209 volatile unsigned long _argvec[6]; \ 1210 volatile unsigned long _res; \ 1211 _argvec[0] = (unsigned long)_orig.nraddr; \ 1212 _argvec[1] = (unsigned long)(arg1); \ 1213 _argvec[2] = (unsigned long)(arg2); \ 1214 _argvec[3] = (unsigned long)(arg3); \ 1215 _argvec[4] = (unsigned long)(arg4); \ 1216 _argvec[5] = (unsigned long)(arg5); \ 1217 __asm__ volatile( \ 1218 VALGRIND_ALIGN_STACK \ 1219 "subl $12, %%esp\n\t" \ 1220 "pushl 20(%%eax)\n\t" \ 1221 "pushl 16(%%eax)\n\t" \ 1222 "pushl 12(%%eax)\n\t" \ 1223 "pushl 8(%%eax)\n\t" \ 1224 "pushl 4(%%eax)\n\t" \ 1225 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1226 VALGRIND_CALL_NOREDIR_EAX \ 1227 VALGRIND_RESTORE_STACK \ 1228 : /*out*/ "=a" (_res) \ 1229 : /*in*/ "a" (&_argvec[0]) \ 1230 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1231 ); \ 1232 lval = (__typeof__(lval)) _res; \ 1233 } while (0) 1234 1235 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 1236 do { \ 1237 volatile OrigFn _orig = (orig); \ 1238 volatile unsigned long _argvec[7]; \ 1239 volatile unsigned long _res; \ 1240 _argvec[0] = (unsigned long)_orig.nraddr; \ 1241 _argvec[1] = (unsigned long)(arg1); \ 1242 _argvec[2] = (unsigned long)(arg2); \ 1243 _argvec[3] = (unsigned long)(arg3); \ 1244 _argvec[4] = (unsigned long)(arg4); \ 1245 _argvec[5] = (unsigned long)(arg5); \ 1246 _argvec[6] = (unsigned long)(arg6); \ 1247 __asm__ volatile( \ 1248 VALGRIND_ALIGN_STACK \ 1249 "subl $8, %%esp\n\t" \ 1250 "pushl 24(%%eax)\n\t" \ 1251 "pushl 20(%%eax)\n\t" \ 1252 "pushl 16(%%eax)\n\t" \ 1253 "pushl 12(%%eax)\n\t" \ 1254 "pushl 8(%%eax)\n\t" \ 1255 "pushl 4(%%eax)\n\t" \ 1256 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1257 VALGRIND_CALL_NOREDIR_EAX \ 1258 VALGRIND_RESTORE_STACK \ 1259 : /*out*/ "=a" (_res) \ 1260 : /*in*/ "a" (&_argvec[0]) \ 1261 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1262 ); \ 1263 lval = (__typeof__(lval)) _res; \ 1264 } while (0) 1265 1266 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1267 arg7) \ 1268 do { \ 1269 volatile OrigFn _orig = (orig); \ 1270 volatile unsigned long _argvec[8]; \ 1271 volatile unsigned long _res; \ 1272 _argvec[0] = (unsigned long)_orig.nraddr; \ 1273 _argvec[1] = (unsigned long)(arg1); \ 1274 _argvec[2] = (unsigned long)(arg2); \ 1275 _argvec[3] = (unsigned long)(arg3); \ 1276 _argvec[4] = (unsigned long)(arg4); \ 1277 _argvec[5] = (unsigned long)(arg5); \ 1278 _argvec[6] = (unsigned long)(arg6); \ 1279 _argvec[7] = (unsigned long)(arg7); \ 1280 __asm__ volatile( \ 1281 VALGRIND_ALIGN_STACK \ 1282 "subl $4, %%esp\n\t" \ 1283 "pushl 28(%%eax)\n\t" \ 1284 "pushl 24(%%eax)\n\t" \ 1285 "pushl 20(%%eax)\n\t" \ 1286 "pushl 16(%%eax)\n\t" \ 1287 "pushl 12(%%eax)\n\t" \ 1288 "pushl 8(%%eax)\n\t" \ 1289 "pushl 4(%%eax)\n\t" \ 1290 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1291 VALGRIND_CALL_NOREDIR_EAX \ 1292 VALGRIND_RESTORE_STACK \ 1293 : /*out*/ "=a" (_res) \ 1294 : /*in*/ "a" (&_argvec[0]) \ 1295 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1296 ); \ 1297 lval = (__typeof__(lval)) _res; \ 1298 } while (0) 1299 1300 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1301 arg7,arg8) \ 1302 do { \ 1303 volatile OrigFn _orig = (orig); \ 1304 volatile unsigned long _argvec[9]; \ 1305 volatile unsigned long _res; \ 1306 _argvec[0] = (unsigned long)_orig.nraddr; \ 1307 _argvec[1] = (unsigned long)(arg1); \ 1308 _argvec[2] = (unsigned long)(arg2); \ 1309 _argvec[3] = (unsigned long)(arg3); \ 1310 _argvec[4] = (unsigned long)(arg4); \ 1311 _argvec[5] = (unsigned long)(arg5); \ 1312 _argvec[6] = (unsigned long)(arg6); \ 1313 _argvec[7] = (unsigned long)(arg7); \ 1314 _argvec[8] = (unsigned long)(arg8); \ 1315 __asm__ volatile( \ 1316 VALGRIND_ALIGN_STACK \ 1317 "pushl 32(%%eax)\n\t" \ 1318 "pushl 28(%%eax)\n\t" \ 1319 "pushl 24(%%eax)\n\t" \ 1320 "pushl 20(%%eax)\n\t" \ 1321 "pushl 16(%%eax)\n\t" \ 1322 "pushl 12(%%eax)\n\t" \ 1323 "pushl 8(%%eax)\n\t" \ 1324 "pushl 4(%%eax)\n\t" \ 1325 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1326 VALGRIND_CALL_NOREDIR_EAX \ 1327 VALGRIND_RESTORE_STACK \ 1328 : /*out*/ "=a" (_res) \ 1329 : /*in*/ "a" (&_argvec[0]) \ 1330 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1331 ); \ 1332 lval = (__typeof__(lval)) _res; \ 1333 } while (0) 1334 1335 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1336 arg7,arg8,arg9) \ 1337 do { \ 1338 volatile OrigFn _orig = (orig); \ 1339 volatile unsigned long _argvec[10]; \ 1340 volatile unsigned long _res; \ 1341 _argvec[0] = (unsigned long)_orig.nraddr; \ 1342 _argvec[1] = (unsigned long)(arg1); \ 1343 _argvec[2] = (unsigned long)(arg2); \ 1344 _argvec[3] = (unsigned long)(arg3); \ 1345 _argvec[4] = (unsigned long)(arg4); \ 1346 _argvec[5] = (unsigned long)(arg5); \ 1347 _argvec[6] = (unsigned long)(arg6); \ 1348 _argvec[7] = (unsigned long)(arg7); \ 1349 _argvec[8] = (unsigned long)(arg8); \ 1350 _argvec[9] = (unsigned long)(arg9); \ 1351 __asm__ volatile( \ 1352 VALGRIND_ALIGN_STACK \ 1353 "subl $12, %%esp\n\t" \ 1354 "pushl 36(%%eax)\n\t" \ 1355 "pushl 32(%%eax)\n\t" \ 1356 "pushl 28(%%eax)\n\t" \ 1357 "pushl 24(%%eax)\n\t" \ 1358 "pushl 20(%%eax)\n\t" \ 1359 "pushl 16(%%eax)\n\t" \ 1360 "pushl 12(%%eax)\n\t" \ 1361 "pushl 8(%%eax)\n\t" \ 1362 "pushl 4(%%eax)\n\t" \ 1363 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1364 VALGRIND_CALL_NOREDIR_EAX \ 1365 VALGRIND_RESTORE_STACK \ 1366 : /*out*/ "=a" (_res) \ 1367 : /*in*/ "a" (&_argvec[0]) \ 1368 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1369 ); \ 1370 lval = (__typeof__(lval)) _res; \ 1371 } while (0) 1372 1373 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1374 arg7,arg8,arg9,arg10) \ 1375 do { \ 1376 volatile OrigFn _orig = (orig); \ 1377 volatile unsigned long _argvec[11]; \ 1378 volatile unsigned long _res; \ 1379 _argvec[0] = (unsigned long)_orig.nraddr; \ 1380 _argvec[1] = (unsigned long)(arg1); \ 1381 _argvec[2] = (unsigned long)(arg2); \ 1382 _argvec[3] = (unsigned long)(arg3); \ 1383 _argvec[4] = (unsigned long)(arg4); \ 1384 _argvec[5] = (unsigned long)(arg5); \ 1385 _argvec[6] = (unsigned long)(arg6); \ 1386 _argvec[7] = (unsigned long)(arg7); \ 1387 _argvec[8] = (unsigned long)(arg8); \ 1388 _argvec[9] = (unsigned long)(arg9); \ 1389 _argvec[10] = (unsigned long)(arg10); \ 1390 __asm__ volatile( \ 1391 VALGRIND_ALIGN_STACK \ 1392 "subl $8, %%esp\n\t" \ 1393 "pushl 40(%%eax)\n\t" \ 1394 "pushl 36(%%eax)\n\t" \ 1395 "pushl 32(%%eax)\n\t" \ 1396 "pushl 28(%%eax)\n\t" \ 1397 "pushl 24(%%eax)\n\t" \ 1398 "pushl 20(%%eax)\n\t" \ 1399 "pushl 16(%%eax)\n\t" \ 1400 "pushl 12(%%eax)\n\t" \ 1401 "pushl 8(%%eax)\n\t" \ 1402 "pushl 4(%%eax)\n\t" \ 1403 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1404 VALGRIND_CALL_NOREDIR_EAX \ 1405 VALGRIND_RESTORE_STACK \ 1406 : /*out*/ "=a" (_res) \ 1407 : /*in*/ "a" (&_argvec[0]) \ 1408 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1409 ); \ 1410 lval = (__typeof__(lval)) _res; \ 1411 } while (0) 1412 1413 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 1414 arg6,arg7,arg8,arg9,arg10, \ 1415 arg11) \ 1416 do { \ 1417 volatile OrigFn _orig = (orig); \ 1418 volatile unsigned long _argvec[12]; \ 1419 volatile unsigned long _res; \ 1420 _argvec[0] = (unsigned long)_orig.nraddr; \ 1421 _argvec[1] = (unsigned long)(arg1); \ 1422 _argvec[2] = (unsigned long)(arg2); \ 1423 _argvec[3] = (unsigned long)(arg3); \ 1424 _argvec[4] = (unsigned long)(arg4); \ 1425 _argvec[5] = (unsigned long)(arg5); \ 1426 _argvec[6] = (unsigned long)(arg6); \ 1427 _argvec[7] = (unsigned long)(arg7); \ 1428 _argvec[8] = (unsigned long)(arg8); \ 1429 _argvec[9] = (unsigned long)(arg9); \ 1430 _argvec[10] = (unsigned long)(arg10); \ 1431 _argvec[11] = (unsigned long)(arg11); \ 1432 __asm__ volatile( \ 1433 VALGRIND_ALIGN_STACK \ 1434 "subl $4, %%esp\n\t" \ 1435 "pushl 44(%%eax)\n\t" \ 1436 "pushl 40(%%eax)\n\t" \ 1437 "pushl 36(%%eax)\n\t" \ 1438 "pushl 32(%%eax)\n\t" \ 1439 "pushl 28(%%eax)\n\t" \ 1440 "pushl 24(%%eax)\n\t" \ 1441 "pushl 20(%%eax)\n\t" \ 1442 "pushl 16(%%eax)\n\t" \ 1443 "pushl 12(%%eax)\n\t" \ 1444 "pushl 8(%%eax)\n\t" \ 1445 "pushl 4(%%eax)\n\t" \ 1446 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1447 VALGRIND_CALL_NOREDIR_EAX \ 1448 VALGRIND_RESTORE_STACK \ 1449 : /*out*/ "=a" (_res) \ 1450 : /*in*/ "a" (&_argvec[0]) \ 1451 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1452 ); \ 1453 lval = (__typeof__(lval)) _res; \ 1454 } while (0) 1455 1456 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 1457 arg6,arg7,arg8,arg9,arg10, \ 1458 arg11,arg12) \ 1459 do { \ 1460 volatile OrigFn _orig = (orig); \ 1461 volatile unsigned long _argvec[13]; \ 1462 volatile unsigned long _res; \ 1463 _argvec[0] = (unsigned long)_orig.nraddr; \ 1464 _argvec[1] = (unsigned long)(arg1); \ 1465 _argvec[2] = (unsigned long)(arg2); \ 1466 _argvec[3] = (unsigned long)(arg3); \ 1467 _argvec[4] = (unsigned long)(arg4); \ 1468 _argvec[5] = (unsigned long)(arg5); \ 1469 _argvec[6] = (unsigned long)(arg6); \ 1470 _argvec[7] = (unsigned long)(arg7); \ 1471 _argvec[8] = (unsigned long)(arg8); \ 1472 _argvec[9] = (unsigned long)(arg9); \ 1473 _argvec[10] = (unsigned long)(arg10); \ 1474 _argvec[11] = (unsigned long)(arg11); \ 1475 _argvec[12] = (unsigned long)(arg12); \ 1476 __asm__ volatile( \ 1477 VALGRIND_ALIGN_STACK \ 1478 "pushl 48(%%eax)\n\t" \ 1479 "pushl 44(%%eax)\n\t" \ 1480 "pushl 40(%%eax)\n\t" \ 1481 "pushl 36(%%eax)\n\t" \ 1482 "pushl 32(%%eax)\n\t" \ 1483 "pushl 28(%%eax)\n\t" \ 1484 "pushl 24(%%eax)\n\t" \ 1485 "pushl 20(%%eax)\n\t" \ 1486 "pushl 16(%%eax)\n\t" \ 1487 "pushl 12(%%eax)\n\t" \ 1488 "pushl 8(%%eax)\n\t" \ 1489 "pushl 4(%%eax)\n\t" \ 1490 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1491 VALGRIND_CALL_NOREDIR_EAX \ 1492 VALGRIND_RESTORE_STACK \ 1493 : /*out*/ "=a" (_res) \ 1494 : /*in*/ "a" (&_argvec[0]) \ 1495 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1496 ); \ 1497 lval = (__typeof__(lval)) _res; \ 1498 } while (0) 1499 1500 #endif /* PLAT_x86_linux || PLAT_x86_darwin */ 1501 1502 /* ------------------------ amd64-{linux,darwin} --------------- */ 1503 1504 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) 1505 1506 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */ 1507 1508 /* These regs are trashed by the hidden call. */ 1509 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \ 1510 "rdi", "r8", "r9", "r10", "r11" 1511 1512 /* This is all pretty complex. It's so as to make stack unwinding 1513 work reliably. See bug 243270. The basic problem is the sub and 1514 add of 128 of %rsp in all of the following macros. If gcc believes 1515 the CFA is in %rsp, then unwinding may fail, because what's at the 1516 CFA is not what gcc "expected" when it constructs the CFIs for the 1517 places where the macros are instantiated. 1518 1519 But we can't just add a CFI annotation to increase the CFA offset 1520 by 128, to match the sub of 128 from %rsp, because we don't know 1521 whether gcc has chosen %rsp as the CFA at that point, or whether it 1522 has chosen some other register (eg, %rbp). In the latter case, 1523 adding a CFI annotation to change the CFA offset is simply wrong. 1524 1525 So the solution is to get hold of the CFA using 1526 __builtin_dwarf_cfa(), put it in a known register, and add a 1527 CFI annotation to say what the register is. We choose %rbp for 1528 this (perhaps perversely), because: 1529 1530 (1) %rbp is already subject to unwinding. If a new register was 1531 chosen then the unwinder would have to unwind it in all stack 1532 traces, which is expensive, and 1533 1534 (2) %rbp is already subject to precise exception updates in the 1535 JIT. If a new register was chosen, we'd have to have precise 1536 exceptions for it too, which reduces performance of the 1537 generated code. 1538 1539 However .. one extra complication. We can't just whack the result 1540 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the 1541 list of trashed registers at the end of the inline assembly 1542 fragments; gcc won't allow %rbp to appear in that list. Hence 1543 instead we need to stash %rbp in %r15 for the duration of the asm, 1544 and say that %r15 is trashed instead. gcc seems happy to go with 1545 that. 1546 1547 Oh .. and this all needs to be conditionalised so that it is 1548 unchanged from before this commit, when compiled with older gccs 1549 that don't support __builtin_dwarf_cfa. Furthermore, since 1550 this header file is freestanding, it has to be independent of 1551 config.h, and so the following conditionalisation cannot depend on 1552 configure time checks. 1553 1554 Although it's not clear from 1555 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)', 1556 this expression excludes Darwin. 1557 .cfi directives in Darwin assembly appear to be completely 1558 different and I haven't investigated how they work. 1559 1560 For even more entertainment value, note we have to use the 1561 completely undocumented __builtin_dwarf_cfa(), which appears to 1562 really compute the CFA, whereas __builtin_frame_address(0) claims 1563 to but actually doesn't. See 1564 https://bugs.kde.org/show_bug.cgi?id=243270#c47 1565 */ 1566 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM) 1567 # define __FRAME_POINTER \ 1568 ,"r"(__builtin_dwarf_cfa()) 1569 # define VALGRIND_CFI_PROLOGUE \ 1570 "movq %%rbp, %%r15\n\t" \ 1571 "movq %2, %%rbp\n\t" \ 1572 ".cfi_remember_state\n\t" \ 1573 ".cfi_def_cfa rbp, 0\n\t" 1574 # define VALGRIND_CFI_EPILOGUE \ 1575 "movq %%r15, %%rbp\n\t" \ 1576 ".cfi_restore_state\n\t" 1577 #else 1578 # define __FRAME_POINTER 1579 # define VALGRIND_CFI_PROLOGUE 1580 # define VALGRIND_CFI_EPILOGUE 1581 #endif 1582 1583 /* Macros to save and align the stack before making a function 1584 call and restore it afterwards as gcc may not keep the stack 1585 pointer aligned if it doesn't realise calls are being made 1586 to other functions. */ 1587 1588 #define VALGRIND_ALIGN_STACK \ 1589 "movq %%rsp,%%r14\n\t" \ 1590 "andq $0xfffffffffffffff0,%%rsp\n\t" 1591 #define VALGRIND_RESTORE_STACK \ 1592 "movq %%r14,%%rsp\n\t" 1593 1594 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned 1595 long) == 8. */ 1596 1597 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_ 1598 macros. In order not to trash the stack redzone, we need to drop 1599 %rsp by 128 before the hidden call, and restore afterwards. The 1600 nastyness is that it is only by luck that the stack still appears 1601 to be unwindable during the hidden call - since then the behaviour 1602 of any routine using this macro does not match what the CFI data 1603 says. Sigh. 1604 1605 Why is this important? Imagine that a wrapper has a stack 1606 allocated local, and passes to the hidden call, a pointer to it. 1607 Because gcc does not know about the hidden call, it may allocate 1608 that local in the redzone. Unfortunately the hidden call may then 1609 trash it before it comes to use it. So we must step clear of the 1610 redzone, for the duration of the hidden call, to make it safe. 1611 1612 Probably the same problem afflicts the other redzone-style ABIs too 1613 (ppc64-linux); but for those, the stack is 1614 self describing (none of this CFI nonsense) so at least messing 1615 with the stack pointer doesn't give a danger of non-unwindable 1616 stack. */ 1617 1618 #define CALL_FN_W_v(lval, orig) \ 1619 do { \ 1620 volatile OrigFn _orig = (orig); \ 1621 volatile unsigned long _argvec[1]; \ 1622 volatile unsigned long _res; \ 1623 _argvec[0] = (unsigned long)_orig.nraddr; \ 1624 __asm__ volatile( \ 1625 VALGRIND_CFI_PROLOGUE \ 1626 VALGRIND_ALIGN_STACK \ 1627 "subq $128,%%rsp\n\t" \ 1628 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1629 VALGRIND_CALL_NOREDIR_RAX \ 1630 VALGRIND_RESTORE_STACK \ 1631 VALGRIND_CFI_EPILOGUE \ 1632 : /*out*/ "=a" (_res) \ 1633 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1634 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1635 ); \ 1636 lval = (__typeof__(lval)) _res; \ 1637 } while (0) 1638 1639 #define CALL_FN_W_W(lval, orig, arg1) \ 1640 do { \ 1641 volatile OrigFn _orig = (orig); \ 1642 volatile unsigned long _argvec[2]; \ 1643 volatile unsigned long _res; \ 1644 _argvec[0] = (unsigned long)_orig.nraddr; \ 1645 _argvec[1] = (unsigned long)(arg1); \ 1646 __asm__ volatile( \ 1647 VALGRIND_CFI_PROLOGUE \ 1648 VALGRIND_ALIGN_STACK \ 1649 "subq $128,%%rsp\n\t" \ 1650 "movq 8(%%rax), %%rdi\n\t" \ 1651 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1652 VALGRIND_CALL_NOREDIR_RAX \ 1653 VALGRIND_RESTORE_STACK \ 1654 VALGRIND_CFI_EPILOGUE \ 1655 : /*out*/ "=a" (_res) \ 1656 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1657 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1658 ); \ 1659 lval = (__typeof__(lval)) _res; \ 1660 } while (0) 1661 1662 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 1663 do { \ 1664 volatile OrigFn _orig = (orig); \ 1665 volatile unsigned long _argvec[3]; \ 1666 volatile unsigned long _res; \ 1667 _argvec[0] = (unsigned long)_orig.nraddr; \ 1668 _argvec[1] = (unsigned long)(arg1); \ 1669 _argvec[2] = (unsigned long)(arg2); \ 1670 __asm__ volatile( \ 1671 VALGRIND_CFI_PROLOGUE \ 1672 VALGRIND_ALIGN_STACK \ 1673 "subq $128,%%rsp\n\t" \ 1674 "movq 16(%%rax), %%rsi\n\t" \ 1675 "movq 8(%%rax), %%rdi\n\t" \ 1676 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1677 VALGRIND_CALL_NOREDIR_RAX \ 1678 VALGRIND_RESTORE_STACK \ 1679 VALGRIND_CFI_EPILOGUE \ 1680 : /*out*/ "=a" (_res) \ 1681 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1682 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1683 ); \ 1684 lval = (__typeof__(lval)) _res; \ 1685 } while (0) 1686 1687 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 1688 do { \ 1689 volatile OrigFn _orig = (orig); \ 1690 volatile unsigned long _argvec[4]; \ 1691 volatile unsigned long _res; \ 1692 _argvec[0] = (unsigned long)_orig.nraddr; \ 1693 _argvec[1] = (unsigned long)(arg1); \ 1694 _argvec[2] = (unsigned long)(arg2); \ 1695 _argvec[3] = (unsigned long)(arg3); \ 1696 __asm__ volatile( \ 1697 VALGRIND_CFI_PROLOGUE \ 1698 VALGRIND_ALIGN_STACK \ 1699 "subq $128,%%rsp\n\t" \ 1700 "movq 24(%%rax), %%rdx\n\t" \ 1701 "movq 16(%%rax), %%rsi\n\t" \ 1702 "movq 8(%%rax), %%rdi\n\t" \ 1703 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1704 VALGRIND_CALL_NOREDIR_RAX \ 1705 VALGRIND_RESTORE_STACK \ 1706 VALGRIND_CFI_EPILOGUE \ 1707 : /*out*/ "=a" (_res) \ 1708 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1709 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1710 ); \ 1711 lval = (__typeof__(lval)) _res; \ 1712 } while (0) 1713 1714 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 1715 do { \ 1716 volatile OrigFn _orig = (orig); \ 1717 volatile unsigned long _argvec[5]; \ 1718 volatile unsigned long _res; \ 1719 _argvec[0] = (unsigned long)_orig.nraddr; \ 1720 _argvec[1] = (unsigned long)(arg1); \ 1721 _argvec[2] = (unsigned long)(arg2); \ 1722 _argvec[3] = (unsigned long)(arg3); \ 1723 _argvec[4] = (unsigned long)(arg4); \ 1724 __asm__ volatile( \ 1725 VALGRIND_CFI_PROLOGUE \ 1726 VALGRIND_ALIGN_STACK \ 1727 "subq $128,%%rsp\n\t" \ 1728 "movq 32(%%rax), %%rcx\n\t" \ 1729 "movq 24(%%rax), %%rdx\n\t" \ 1730 "movq 16(%%rax), %%rsi\n\t" \ 1731 "movq 8(%%rax), %%rdi\n\t" \ 1732 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1733 VALGRIND_CALL_NOREDIR_RAX \ 1734 VALGRIND_RESTORE_STACK \ 1735 VALGRIND_CFI_EPILOGUE \ 1736 : /*out*/ "=a" (_res) \ 1737 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1738 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1739 ); \ 1740 lval = (__typeof__(lval)) _res; \ 1741 } while (0) 1742 1743 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 1744 do { \ 1745 volatile OrigFn _orig = (orig); \ 1746 volatile unsigned long _argvec[6]; \ 1747 volatile unsigned long _res; \ 1748 _argvec[0] = (unsigned long)_orig.nraddr; \ 1749 _argvec[1] = (unsigned long)(arg1); \ 1750 _argvec[2] = (unsigned long)(arg2); \ 1751 _argvec[3] = (unsigned long)(arg3); \ 1752 _argvec[4] = (unsigned long)(arg4); \ 1753 _argvec[5] = (unsigned long)(arg5); \ 1754 __asm__ volatile( \ 1755 VALGRIND_CFI_PROLOGUE \ 1756 VALGRIND_ALIGN_STACK \ 1757 "subq $128,%%rsp\n\t" \ 1758 "movq 40(%%rax), %%r8\n\t" \ 1759 "movq 32(%%rax), %%rcx\n\t" \ 1760 "movq 24(%%rax), %%rdx\n\t" \ 1761 "movq 16(%%rax), %%rsi\n\t" \ 1762 "movq 8(%%rax), %%rdi\n\t" \ 1763 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1764 VALGRIND_CALL_NOREDIR_RAX \ 1765 VALGRIND_RESTORE_STACK \ 1766 VALGRIND_CFI_EPILOGUE \ 1767 : /*out*/ "=a" (_res) \ 1768 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1769 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1770 ); \ 1771 lval = (__typeof__(lval)) _res; \ 1772 } while (0) 1773 1774 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 1775 do { \ 1776 volatile OrigFn _orig = (orig); \ 1777 volatile unsigned long _argvec[7]; \ 1778 volatile unsigned long _res; \ 1779 _argvec[0] = (unsigned long)_orig.nraddr; \ 1780 _argvec[1] = (unsigned long)(arg1); \ 1781 _argvec[2] = (unsigned long)(arg2); \ 1782 _argvec[3] = (unsigned long)(arg3); \ 1783 _argvec[4] = (unsigned long)(arg4); \ 1784 _argvec[5] = (unsigned long)(arg5); \ 1785 _argvec[6] = (unsigned long)(arg6); \ 1786 __asm__ volatile( \ 1787 VALGRIND_CFI_PROLOGUE \ 1788 VALGRIND_ALIGN_STACK \ 1789 "subq $128,%%rsp\n\t" \ 1790 "movq 48(%%rax), %%r9\n\t" \ 1791 "movq 40(%%rax), %%r8\n\t" \ 1792 "movq 32(%%rax), %%rcx\n\t" \ 1793 "movq 24(%%rax), %%rdx\n\t" \ 1794 "movq 16(%%rax), %%rsi\n\t" \ 1795 "movq 8(%%rax), %%rdi\n\t" \ 1796 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1797 VALGRIND_CALL_NOREDIR_RAX \ 1798 VALGRIND_RESTORE_STACK \ 1799 VALGRIND_CFI_EPILOGUE \ 1800 : /*out*/ "=a" (_res) \ 1801 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1802 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1803 ); \ 1804 lval = (__typeof__(lval)) _res; \ 1805 } while (0) 1806 1807 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1808 arg7) \ 1809 do { \ 1810 volatile OrigFn _orig = (orig); \ 1811 volatile unsigned long _argvec[8]; \ 1812 volatile unsigned long _res; \ 1813 _argvec[0] = (unsigned long)_orig.nraddr; \ 1814 _argvec[1] = (unsigned long)(arg1); \ 1815 _argvec[2] = (unsigned long)(arg2); \ 1816 _argvec[3] = (unsigned long)(arg3); \ 1817 _argvec[4] = (unsigned long)(arg4); \ 1818 _argvec[5] = (unsigned long)(arg5); \ 1819 _argvec[6] = (unsigned long)(arg6); \ 1820 _argvec[7] = (unsigned long)(arg7); \ 1821 __asm__ volatile( \ 1822 VALGRIND_CFI_PROLOGUE \ 1823 VALGRIND_ALIGN_STACK \ 1824 "subq $136,%%rsp\n\t" \ 1825 "pushq 56(%%rax)\n\t" \ 1826 "movq 48(%%rax), %%r9\n\t" \ 1827 "movq 40(%%rax), %%r8\n\t" \ 1828 "movq 32(%%rax), %%rcx\n\t" \ 1829 "movq 24(%%rax), %%rdx\n\t" \ 1830 "movq 16(%%rax), %%rsi\n\t" \ 1831 "movq 8(%%rax), %%rdi\n\t" \ 1832 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1833 VALGRIND_CALL_NOREDIR_RAX \ 1834 VALGRIND_RESTORE_STACK \ 1835 VALGRIND_CFI_EPILOGUE \ 1836 : /*out*/ "=a" (_res) \ 1837 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1838 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1839 ); \ 1840 lval = (__typeof__(lval)) _res; \ 1841 } while (0) 1842 1843 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1844 arg7,arg8) \ 1845 do { \ 1846 volatile OrigFn _orig = (orig); \ 1847 volatile unsigned long _argvec[9]; \ 1848 volatile unsigned long _res; \ 1849 _argvec[0] = (unsigned long)_orig.nraddr; \ 1850 _argvec[1] = (unsigned long)(arg1); \ 1851 _argvec[2] = (unsigned long)(arg2); \ 1852 _argvec[3] = (unsigned long)(arg3); \ 1853 _argvec[4] = (unsigned long)(arg4); \ 1854 _argvec[5] = (unsigned long)(arg5); \ 1855 _argvec[6] = (unsigned long)(arg6); \ 1856 _argvec[7] = (unsigned long)(arg7); \ 1857 _argvec[8] = (unsigned long)(arg8); \ 1858 __asm__ volatile( \ 1859 VALGRIND_CFI_PROLOGUE \ 1860 VALGRIND_ALIGN_STACK \ 1861 "subq $128,%%rsp\n\t" \ 1862 "pushq 64(%%rax)\n\t" \ 1863 "pushq 56(%%rax)\n\t" \ 1864 "movq 48(%%rax), %%r9\n\t" \ 1865 "movq 40(%%rax), %%r8\n\t" \ 1866 "movq 32(%%rax), %%rcx\n\t" \ 1867 "movq 24(%%rax), %%rdx\n\t" \ 1868 "movq 16(%%rax), %%rsi\n\t" \ 1869 "movq 8(%%rax), %%rdi\n\t" \ 1870 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1871 VALGRIND_CALL_NOREDIR_RAX \ 1872 VALGRIND_RESTORE_STACK \ 1873 VALGRIND_CFI_EPILOGUE \ 1874 : /*out*/ "=a" (_res) \ 1875 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1876 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1877 ); \ 1878 lval = (__typeof__(lval)) _res; \ 1879 } while (0) 1880 1881 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1882 arg7,arg8,arg9) \ 1883 do { \ 1884 volatile OrigFn _orig = (orig); \ 1885 volatile unsigned long _argvec[10]; \ 1886 volatile unsigned long _res; \ 1887 _argvec[0] = (unsigned long)_orig.nraddr; \ 1888 _argvec[1] = (unsigned long)(arg1); \ 1889 _argvec[2] = (unsigned long)(arg2); \ 1890 _argvec[3] = (unsigned long)(arg3); \ 1891 _argvec[4] = (unsigned long)(arg4); \ 1892 _argvec[5] = (unsigned long)(arg5); \ 1893 _argvec[6] = (unsigned long)(arg6); \ 1894 _argvec[7] = (unsigned long)(arg7); \ 1895 _argvec[8] = (unsigned long)(arg8); \ 1896 _argvec[9] = (unsigned long)(arg9); \ 1897 __asm__ volatile( \ 1898 VALGRIND_CFI_PROLOGUE \ 1899 VALGRIND_ALIGN_STACK \ 1900 "subq $136,%%rsp\n\t" \ 1901 "pushq 72(%%rax)\n\t" \ 1902 "pushq 64(%%rax)\n\t" \ 1903 "pushq 56(%%rax)\n\t" \ 1904 "movq 48(%%rax), %%r9\n\t" \ 1905 "movq 40(%%rax), %%r8\n\t" \ 1906 "movq 32(%%rax), %%rcx\n\t" \ 1907 "movq 24(%%rax), %%rdx\n\t" \ 1908 "movq 16(%%rax), %%rsi\n\t" \ 1909 "movq 8(%%rax), %%rdi\n\t" \ 1910 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1911 VALGRIND_CALL_NOREDIR_RAX \ 1912 VALGRIND_RESTORE_STACK \ 1913 VALGRIND_CFI_EPILOGUE \ 1914 : /*out*/ "=a" (_res) \ 1915 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1916 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1917 ); \ 1918 lval = (__typeof__(lval)) _res; \ 1919 } while (0) 1920 1921 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1922 arg7,arg8,arg9,arg10) \ 1923 do { \ 1924 volatile OrigFn _orig = (orig); \ 1925 volatile unsigned long _argvec[11]; \ 1926 volatile unsigned long _res; \ 1927 _argvec[0] = (unsigned long)_orig.nraddr; \ 1928 _argvec[1] = (unsigned long)(arg1); \ 1929 _argvec[2] = (unsigned long)(arg2); \ 1930 _argvec[3] = (unsigned long)(arg3); \ 1931 _argvec[4] = (unsigned long)(arg4); \ 1932 _argvec[5] = (unsigned long)(arg5); \ 1933 _argvec[6] = (unsigned long)(arg6); \ 1934 _argvec[7] = (unsigned long)(arg7); \ 1935 _argvec[8] = (unsigned long)(arg8); \ 1936 _argvec[9] = (unsigned long)(arg9); \ 1937 _argvec[10] = (unsigned long)(arg10); \ 1938 __asm__ volatile( \ 1939 VALGRIND_CFI_PROLOGUE \ 1940 VALGRIND_ALIGN_STACK \ 1941 "subq $128,%%rsp\n\t" \ 1942 "pushq 80(%%rax)\n\t" \ 1943 "pushq 72(%%rax)\n\t" \ 1944 "pushq 64(%%rax)\n\t" \ 1945 "pushq 56(%%rax)\n\t" \ 1946 "movq 48(%%rax), %%r9\n\t" \ 1947 "movq 40(%%rax), %%r8\n\t" \ 1948 "movq 32(%%rax), %%rcx\n\t" \ 1949 "movq 24(%%rax), %%rdx\n\t" \ 1950 "movq 16(%%rax), %%rsi\n\t" \ 1951 "movq 8(%%rax), %%rdi\n\t" \ 1952 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1953 VALGRIND_CALL_NOREDIR_RAX \ 1954 VALGRIND_RESTORE_STACK \ 1955 VALGRIND_CFI_EPILOGUE \ 1956 : /*out*/ "=a" (_res) \ 1957 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1958 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1959 ); \ 1960 lval = (__typeof__(lval)) _res; \ 1961 } while (0) 1962 1963 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1964 arg7,arg8,arg9,arg10,arg11) \ 1965 do { \ 1966 volatile OrigFn _orig = (orig); \ 1967 volatile unsigned long _argvec[12]; \ 1968 volatile unsigned long _res; \ 1969 _argvec[0] = (unsigned long)_orig.nraddr; \ 1970 _argvec[1] = (unsigned long)(arg1); \ 1971 _argvec[2] = (unsigned long)(arg2); \ 1972 _argvec[3] = (unsigned long)(arg3); \ 1973 _argvec[4] = (unsigned long)(arg4); \ 1974 _argvec[5] = (unsigned long)(arg5); \ 1975 _argvec[6] = (unsigned long)(arg6); \ 1976 _argvec[7] = (unsigned long)(arg7); \ 1977 _argvec[8] = (unsigned long)(arg8); \ 1978 _argvec[9] = (unsigned long)(arg9); \ 1979 _argvec[10] = (unsigned long)(arg10); \ 1980 _argvec[11] = (unsigned long)(arg11); \ 1981 __asm__ volatile( \ 1982 VALGRIND_CFI_PROLOGUE \ 1983 VALGRIND_ALIGN_STACK \ 1984 "subq $136,%%rsp\n\t" \ 1985 "pushq 88(%%rax)\n\t" \ 1986 "pushq 80(%%rax)\n\t" \ 1987 "pushq 72(%%rax)\n\t" \ 1988 "pushq 64(%%rax)\n\t" \ 1989 "pushq 56(%%rax)\n\t" \ 1990 "movq 48(%%rax), %%r9\n\t" \ 1991 "movq 40(%%rax), %%r8\n\t" \ 1992 "movq 32(%%rax), %%rcx\n\t" \ 1993 "movq 24(%%rax), %%rdx\n\t" \ 1994 "movq 16(%%rax), %%rsi\n\t" \ 1995 "movq 8(%%rax), %%rdi\n\t" \ 1996 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1997 VALGRIND_CALL_NOREDIR_RAX \ 1998 VALGRIND_RESTORE_STACK \ 1999 VALGRIND_CFI_EPILOGUE \ 2000 : /*out*/ "=a" (_res) \ 2001 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 2002 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 2003 ); \ 2004 lval = (__typeof__(lval)) _res; \ 2005 } while (0) 2006 2007 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2008 arg7,arg8,arg9,arg10,arg11,arg12) \ 2009 do { \ 2010 volatile OrigFn _orig = (orig); \ 2011 volatile unsigned long _argvec[13]; \ 2012 volatile unsigned long _res; \ 2013 _argvec[0] = (unsigned long)_orig.nraddr; \ 2014 _argvec[1] = (unsigned long)(arg1); \ 2015 _argvec[2] = (unsigned long)(arg2); \ 2016 _argvec[3] = (unsigned long)(arg3); \ 2017 _argvec[4] = (unsigned long)(arg4); \ 2018 _argvec[5] = (unsigned long)(arg5); \ 2019 _argvec[6] = (unsigned long)(arg6); \ 2020 _argvec[7] = (unsigned long)(arg7); \ 2021 _argvec[8] = (unsigned long)(arg8); \ 2022 _argvec[9] = (unsigned long)(arg9); \ 2023 _argvec[10] = (unsigned long)(arg10); \ 2024 _argvec[11] = (unsigned long)(arg11); \ 2025 _argvec[12] = (unsigned long)(arg12); \ 2026 __asm__ volatile( \ 2027 VALGRIND_CFI_PROLOGUE \ 2028 VALGRIND_ALIGN_STACK \ 2029 "subq $128,%%rsp\n\t" \ 2030 "pushq 96(%%rax)\n\t" \ 2031 "pushq 88(%%rax)\n\t" \ 2032 "pushq 80(%%rax)\n\t" \ 2033 "pushq 72(%%rax)\n\t" \ 2034 "pushq 64(%%rax)\n\t" \ 2035 "pushq 56(%%rax)\n\t" \ 2036 "movq 48(%%rax), %%r9\n\t" \ 2037 "movq 40(%%rax), %%r8\n\t" \ 2038 "movq 32(%%rax), %%rcx\n\t" \ 2039 "movq 24(%%rax), %%rdx\n\t" \ 2040 "movq 16(%%rax), %%rsi\n\t" \ 2041 "movq 8(%%rax), %%rdi\n\t" \ 2042 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 2043 VALGRIND_CALL_NOREDIR_RAX \ 2044 VALGRIND_RESTORE_STACK \ 2045 VALGRIND_CFI_EPILOGUE \ 2046 : /*out*/ "=a" (_res) \ 2047 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 2048 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 2049 ); \ 2050 lval = (__typeof__(lval)) _res; \ 2051 } while (0) 2052 2053 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */ 2054 2055 /* ------------------------ ppc32-linux ------------------------ */ 2056 2057 #if defined(PLAT_ppc32_linux) 2058 2059 /* This is useful for finding out about the on-stack stuff: 2060 2061 extern int f9 ( int,int,int,int,int,int,int,int,int ); 2062 extern int f10 ( int,int,int,int,int,int,int,int,int,int ); 2063 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int ); 2064 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int ); 2065 2066 int g9 ( void ) { 2067 return f9(11,22,33,44,55,66,77,88,99); 2068 } 2069 int g10 ( void ) { 2070 return f10(11,22,33,44,55,66,77,88,99,110); 2071 } 2072 int g11 ( void ) { 2073 return f11(11,22,33,44,55,66,77,88,99,110,121); 2074 } 2075 int g12 ( void ) { 2076 return f12(11,22,33,44,55,66,77,88,99,110,121,132); 2077 } 2078 */ 2079 2080 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */ 2081 2082 /* These regs are trashed by the hidden call. */ 2083 #define __CALLER_SAVED_REGS \ 2084 "lr", "ctr", "xer", \ 2085 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \ 2086 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \ 2087 "r11", "r12", "r13" 2088 2089 /* Macros to save and align the stack before making a function 2090 call and restore it afterwards as gcc may not keep the stack 2091 pointer aligned if it doesn't realise calls are being made 2092 to other functions. */ 2093 2094 #define VALGRIND_ALIGN_STACK \ 2095 "mr 28,1\n\t" \ 2096 "rlwinm 1,1,0,0,27\n\t" 2097 #define VALGRIND_RESTORE_STACK \ 2098 "mr 1,28\n\t" 2099 2100 /* These CALL_FN_ macros assume that on ppc32-linux, 2101 sizeof(unsigned long) == 4. */ 2102 2103 #define CALL_FN_W_v(lval, orig) \ 2104 do { \ 2105 volatile OrigFn _orig = (orig); \ 2106 volatile unsigned long _argvec[1]; \ 2107 volatile unsigned long _res; \ 2108 _argvec[0] = (unsigned long)_orig.nraddr; \ 2109 __asm__ volatile( \ 2110 VALGRIND_ALIGN_STACK \ 2111 "mr 11,%1\n\t" \ 2112 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2113 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2114 VALGRIND_RESTORE_STACK \ 2115 "mr %0,3" \ 2116 : /*out*/ "=r" (_res) \ 2117 : /*in*/ "r" (&_argvec[0]) \ 2118 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2119 ); \ 2120 lval = (__typeof__(lval)) _res; \ 2121 } while (0) 2122 2123 #define CALL_FN_W_W(lval, orig, arg1) \ 2124 do { \ 2125 volatile OrigFn _orig = (orig); \ 2126 volatile unsigned long _argvec[2]; \ 2127 volatile unsigned long _res; \ 2128 _argvec[0] = (unsigned long)_orig.nraddr; \ 2129 _argvec[1] = (unsigned long)arg1; \ 2130 __asm__ volatile( \ 2131 VALGRIND_ALIGN_STACK \ 2132 "mr 11,%1\n\t" \ 2133 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2134 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2135 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2136 VALGRIND_RESTORE_STACK \ 2137 "mr %0,3" \ 2138 : /*out*/ "=r" (_res) \ 2139 : /*in*/ "r" (&_argvec[0]) \ 2140 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2141 ); \ 2142 lval = (__typeof__(lval)) _res; \ 2143 } while (0) 2144 2145 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 2146 do { \ 2147 volatile OrigFn _orig = (orig); \ 2148 volatile unsigned long _argvec[3]; \ 2149 volatile unsigned long _res; \ 2150 _argvec[0] = (unsigned long)_orig.nraddr; \ 2151 _argvec[1] = (unsigned long)arg1; \ 2152 _argvec[2] = (unsigned long)arg2; \ 2153 __asm__ volatile( \ 2154 VALGRIND_ALIGN_STACK \ 2155 "mr 11,%1\n\t" \ 2156 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2157 "lwz 4,8(11)\n\t" \ 2158 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2159 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2160 VALGRIND_RESTORE_STACK \ 2161 "mr %0,3" \ 2162 : /*out*/ "=r" (_res) \ 2163 : /*in*/ "r" (&_argvec[0]) \ 2164 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2165 ); \ 2166 lval = (__typeof__(lval)) _res; \ 2167 } while (0) 2168 2169 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 2170 do { \ 2171 volatile OrigFn _orig = (orig); \ 2172 volatile unsigned long _argvec[4]; \ 2173 volatile unsigned long _res; \ 2174 _argvec[0] = (unsigned long)_orig.nraddr; \ 2175 _argvec[1] = (unsigned long)arg1; \ 2176 _argvec[2] = (unsigned long)arg2; \ 2177 _argvec[3] = (unsigned long)arg3; \ 2178 __asm__ volatile( \ 2179 VALGRIND_ALIGN_STACK \ 2180 "mr 11,%1\n\t" \ 2181 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2182 "lwz 4,8(11)\n\t" \ 2183 "lwz 5,12(11)\n\t" \ 2184 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2185 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2186 VALGRIND_RESTORE_STACK \ 2187 "mr %0,3" \ 2188 : /*out*/ "=r" (_res) \ 2189 : /*in*/ "r" (&_argvec[0]) \ 2190 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2191 ); \ 2192 lval = (__typeof__(lval)) _res; \ 2193 } while (0) 2194 2195 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 2196 do { \ 2197 volatile OrigFn _orig = (orig); \ 2198 volatile unsigned long _argvec[5]; \ 2199 volatile unsigned long _res; \ 2200 _argvec[0] = (unsigned long)_orig.nraddr; \ 2201 _argvec[1] = (unsigned long)arg1; \ 2202 _argvec[2] = (unsigned long)arg2; \ 2203 _argvec[3] = (unsigned long)arg3; \ 2204 _argvec[4] = (unsigned long)arg4; \ 2205 __asm__ volatile( \ 2206 VALGRIND_ALIGN_STACK \ 2207 "mr 11,%1\n\t" \ 2208 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2209 "lwz 4,8(11)\n\t" \ 2210 "lwz 5,12(11)\n\t" \ 2211 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2212 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2213 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2214 VALGRIND_RESTORE_STACK \ 2215 "mr %0,3" \ 2216 : /*out*/ "=r" (_res) \ 2217 : /*in*/ "r" (&_argvec[0]) \ 2218 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2219 ); \ 2220 lval = (__typeof__(lval)) _res; \ 2221 } while (0) 2222 2223 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 2224 do { \ 2225 volatile OrigFn _orig = (orig); \ 2226 volatile unsigned long _argvec[6]; \ 2227 volatile unsigned long _res; \ 2228 _argvec[0] = (unsigned long)_orig.nraddr; \ 2229 _argvec[1] = (unsigned long)arg1; \ 2230 _argvec[2] = (unsigned long)arg2; \ 2231 _argvec[3] = (unsigned long)arg3; \ 2232 _argvec[4] = (unsigned long)arg4; \ 2233 _argvec[5] = (unsigned long)arg5; \ 2234 __asm__ volatile( \ 2235 VALGRIND_ALIGN_STACK \ 2236 "mr 11,%1\n\t" \ 2237 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2238 "lwz 4,8(11)\n\t" \ 2239 "lwz 5,12(11)\n\t" \ 2240 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2241 "lwz 7,20(11)\n\t" \ 2242 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2243 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2244 VALGRIND_RESTORE_STACK \ 2245 "mr %0,3" \ 2246 : /*out*/ "=r" (_res) \ 2247 : /*in*/ "r" (&_argvec[0]) \ 2248 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2249 ); \ 2250 lval = (__typeof__(lval)) _res; \ 2251 } while (0) 2252 2253 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 2254 do { \ 2255 volatile OrigFn _orig = (orig); \ 2256 volatile unsigned long _argvec[7]; \ 2257 volatile unsigned long _res; \ 2258 _argvec[0] = (unsigned long)_orig.nraddr; \ 2259 _argvec[1] = (unsigned long)arg1; \ 2260 _argvec[2] = (unsigned long)arg2; \ 2261 _argvec[3] = (unsigned long)arg3; \ 2262 _argvec[4] = (unsigned long)arg4; \ 2263 _argvec[5] = (unsigned long)arg5; \ 2264 _argvec[6] = (unsigned long)arg6; \ 2265 __asm__ volatile( \ 2266 VALGRIND_ALIGN_STACK \ 2267 "mr 11,%1\n\t" \ 2268 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2269 "lwz 4,8(11)\n\t" \ 2270 "lwz 5,12(11)\n\t" \ 2271 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2272 "lwz 7,20(11)\n\t" \ 2273 "lwz 8,24(11)\n\t" \ 2274 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2275 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2276 VALGRIND_RESTORE_STACK \ 2277 "mr %0,3" \ 2278 : /*out*/ "=r" (_res) \ 2279 : /*in*/ "r" (&_argvec[0]) \ 2280 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2281 ); \ 2282 lval = (__typeof__(lval)) _res; \ 2283 } while (0) 2284 2285 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2286 arg7) \ 2287 do { \ 2288 volatile OrigFn _orig = (orig); \ 2289 volatile unsigned long _argvec[8]; \ 2290 volatile unsigned long _res; \ 2291 _argvec[0] = (unsigned long)_orig.nraddr; \ 2292 _argvec[1] = (unsigned long)arg1; \ 2293 _argvec[2] = (unsigned long)arg2; \ 2294 _argvec[3] = (unsigned long)arg3; \ 2295 _argvec[4] = (unsigned long)arg4; \ 2296 _argvec[5] = (unsigned long)arg5; \ 2297 _argvec[6] = (unsigned long)arg6; \ 2298 _argvec[7] = (unsigned long)arg7; \ 2299 __asm__ volatile( \ 2300 VALGRIND_ALIGN_STACK \ 2301 "mr 11,%1\n\t" \ 2302 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2303 "lwz 4,8(11)\n\t" \ 2304 "lwz 5,12(11)\n\t" \ 2305 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2306 "lwz 7,20(11)\n\t" \ 2307 "lwz 8,24(11)\n\t" \ 2308 "lwz 9,28(11)\n\t" \ 2309 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2310 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2311 VALGRIND_RESTORE_STACK \ 2312 "mr %0,3" \ 2313 : /*out*/ "=r" (_res) \ 2314 : /*in*/ "r" (&_argvec[0]) \ 2315 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2316 ); \ 2317 lval = (__typeof__(lval)) _res; \ 2318 } while (0) 2319 2320 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2321 arg7,arg8) \ 2322 do { \ 2323 volatile OrigFn _orig = (orig); \ 2324 volatile unsigned long _argvec[9]; \ 2325 volatile unsigned long _res; \ 2326 _argvec[0] = (unsigned long)_orig.nraddr; \ 2327 _argvec[1] = (unsigned long)arg1; \ 2328 _argvec[2] = (unsigned long)arg2; \ 2329 _argvec[3] = (unsigned long)arg3; \ 2330 _argvec[4] = (unsigned long)arg4; \ 2331 _argvec[5] = (unsigned long)arg5; \ 2332 _argvec[6] = (unsigned long)arg6; \ 2333 _argvec[7] = (unsigned long)arg7; \ 2334 _argvec[8] = (unsigned long)arg8; \ 2335 __asm__ volatile( \ 2336 VALGRIND_ALIGN_STACK \ 2337 "mr 11,%1\n\t" \ 2338 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2339 "lwz 4,8(11)\n\t" \ 2340 "lwz 5,12(11)\n\t" \ 2341 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2342 "lwz 7,20(11)\n\t" \ 2343 "lwz 8,24(11)\n\t" \ 2344 "lwz 9,28(11)\n\t" \ 2345 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2346 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2347 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2348 VALGRIND_RESTORE_STACK \ 2349 "mr %0,3" \ 2350 : /*out*/ "=r" (_res) \ 2351 : /*in*/ "r" (&_argvec[0]) \ 2352 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2353 ); \ 2354 lval = (__typeof__(lval)) _res; \ 2355 } while (0) 2356 2357 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2358 arg7,arg8,arg9) \ 2359 do { \ 2360 volatile OrigFn _orig = (orig); \ 2361 volatile unsigned long _argvec[10]; \ 2362 volatile unsigned long _res; \ 2363 _argvec[0] = (unsigned long)_orig.nraddr; \ 2364 _argvec[1] = (unsigned long)arg1; \ 2365 _argvec[2] = (unsigned long)arg2; \ 2366 _argvec[3] = (unsigned long)arg3; \ 2367 _argvec[4] = (unsigned long)arg4; \ 2368 _argvec[5] = (unsigned long)arg5; \ 2369 _argvec[6] = (unsigned long)arg6; \ 2370 _argvec[7] = (unsigned long)arg7; \ 2371 _argvec[8] = (unsigned long)arg8; \ 2372 _argvec[9] = (unsigned long)arg9; \ 2373 __asm__ volatile( \ 2374 VALGRIND_ALIGN_STACK \ 2375 "mr 11,%1\n\t" \ 2376 "addi 1,1,-16\n\t" \ 2377 /* arg9 */ \ 2378 "lwz 3,36(11)\n\t" \ 2379 "stw 3,8(1)\n\t" \ 2380 /* args1-8 */ \ 2381 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2382 "lwz 4,8(11)\n\t" \ 2383 "lwz 5,12(11)\n\t" \ 2384 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2385 "lwz 7,20(11)\n\t" \ 2386 "lwz 8,24(11)\n\t" \ 2387 "lwz 9,28(11)\n\t" \ 2388 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2389 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2390 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2391 VALGRIND_RESTORE_STACK \ 2392 "mr %0,3" \ 2393 : /*out*/ "=r" (_res) \ 2394 : /*in*/ "r" (&_argvec[0]) \ 2395 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2396 ); \ 2397 lval = (__typeof__(lval)) _res; \ 2398 } while (0) 2399 2400 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2401 arg7,arg8,arg9,arg10) \ 2402 do { \ 2403 volatile OrigFn _orig = (orig); \ 2404 volatile unsigned long _argvec[11]; \ 2405 volatile unsigned long _res; \ 2406 _argvec[0] = (unsigned long)_orig.nraddr; \ 2407 _argvec[1] = (unsigned long)arg1; \ 2408 _argvec[2] = (unsigned long)arg2; \ 2409 _argvec[3] = (unsigned long)arg3; \ 2410 _argvec[4] = (unsigned long)arg4; \ 2411 _argvec[5] = (unsigned long)arg5; \ 2412 _argvec[6] = (unsigned long)arg6; \ 2413 _argvec[7] = (unsigned long)arg7; \ 2414 _argvec[8] = (unsigned long)arg8; \ 2415 _argvec[9] = (unsigned long)arg9; \ 2416 _argvec[10] = (unsigned long)arg10; \ 2417 __asm__ volatile( \ 2418 VALGRIND_ALIGN_STACK \ 2419 "mr 11,%1\n\t" \ 2420 "addi 1,1,-16\n\t" \ 2421 /* arg10 */ \ 2422 "lwz 3,40(11)\n\t" \ 2423 "stw 3,12(1)\n\t" \ 2424 /* arg9 */ \ 2425 "lwz 3,36(11)\n\t" \ 2426 "stw 3,8(1)\n\t" \ 2427 /* args1-8 */ \ 2428 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2429 "lwz 4,8(11)\n\t" \ 2430 "lwz 5,12(11)\n\t" \ 2431 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2432 "lwz 7,20(11)\n\t" \ 2433 "lwz 8,24(11)\n\t" \ 2434 "lwz 9,28(11)\n\t" \ 2435 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2436 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2437 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2438 VALGRIND_RESTORE_STACK \ 2439 "mr %0,3" \ 2440 : /*out*/ "=r" (_res) \ 2441 : /*in*/ "r" (&_argvec[0]) \ 2442 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2443 ); \ 2444 lval = (__typeof__(lval)) _res; \ 2445 } while (0) 2446 2447 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2448 arg7,arg8,arg9,arg10,arg11) \ 2449 do { \ 2450 volatile OrigFn _orig = (orig); \ 2451 volatile unsigned long _argvec[12]; \ 2452 volatile unsigned long _res; \ 2453 _argvec[0] = (unsigned long)_orig.nraddr; \ 2454 _argvec[1] = (unsigned long)arg1; \ 2455 _argvec[2] = (unsigned long)arg2; \ 2456 _argvec[3] = (unsigned long)arg3; \ 2457 _argvec[4] = (unsigned long)arg4; \ 2458 _argvec[5] = (unsigned long)arg5; \ 2459 _argvec[6] = (unsigned long)arg6; \ 2460 _argvec[7] = (unsigned long)arg7; \ 2461 _argvec[8] = (unsigned long)arg8; \ 2462 _argvec[9] = (unsigned long)arg9; \ 2463 _argvec[10] = (unsigned long)arg10; \ 2464 _argvec[11] = (unsigned long)arg11; \ 2465 __asm__ volatile( \ 2466 VALGRIND_ALIGN_STACK \ 2467 "mr 11,%1\n\t" \ 2468 "addi 1,1,-32\n\t" \ 2469 /* arg11 */ \ 2470 "lwz 3,44(11)\n\t" \ 2471 "stw 3,16(1)\n\t" \ 2472 /* arg10 */ \ 2473 "lwz 3,40(11)\n\t" \ 2474 "stw 3,12(1)\n\t" \ 2475 /* arg9 */ \ 2476 "lwz 3,36(11)\n\t" \ 2477 "stw 3,8(1)\n\t" \ 2478 /* args1-8 */ \ 2479 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2480 "lwz 4,8(11)\n\t" \ 2481 "lwz 5,12(11)\n\t" \ 2482 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2483 "lwz 7,20(11)\n\t" \ 2484 "lwz 8,24(11)\n\t" \ 2485 "lwz 9,28(11)\n\t" \ 2486 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2487 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2488 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2489 VALGRIND_RESTORE_STACK \ 2490 "mr %0,3" \ 2491 : /*out*/ "=r" (_res) \ 2492 : /*in*/ "r" (&_argvec[0]) \ 2493 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2494 ); \ 2495 lval = (__typeof__(lval)) _res; \ 2496 } while (0) 2497 2498 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2499 arg7,arg8,arg9,arg10,arg11,arg12) \ 2500 do { \ 2501 volatile OrigFn _orig = (orig); \ 2502 volatile unsigned long _argvec[13]; \ 2503 volatile unsigned long _res; \ 2504 _argvec[0] = (unsigned long)_orig.nraddr; \ 2505 _argvec[1] = (unsigned long)arg1; \ 2506 _argvec[2] = (unsigned long)arg2; \ 2507 _argvec[3] = (unsigned long)arg3; \ 2508 _argvec[4] = (unsigned long)arg4; \ 2509 _argvec[5] = (unsigned long)arg5; \ 2510 _argvec[6] = (unsigned long)arg6; \ 2511 _argvec[7] = (unsigned long)arg7; \ 2512 _argvec[8] = (unsigned long)arg8; \ 2513 _argvec[9] = (unsigned long)arg9; \ 2514 _argvec[10] = (unsigned long)arg10; \ 2515 _argvec[11] = (unsigned long)arg11; \ 2516 _argvec[12] = (unsigned long)arg12; \ 2517 __asm__ volatile( \ 2518 VALGRIND_ALIGN_STACK \ 2519 "mr 11,%1\n\t" \ 2520 "addi 1,1,-32\n\t" \ 2521 /* arg12 */ \ 2522 "lwz 3,48(11)\n\t" \ 2523 "stw 3,20(1)\n\t" \ 2524 /* arg11 */ \ 2525 "lwz 3,44(11)\n\t" \ 2526 "stw 3,16(1)\n\t" \ 2527 /* arg10 */ \ 2528 "lwz 3,40(11)\n\t" \ 2529 "stw 3,12(1)\n\t" \ 2530 /* arg9 */ \ 2531 "lwz 3,36(11)\n\t" \ 2532 "stw 3,8(1)\n\t" \ 2533 /* args1-8 */ \ 2534 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2535 "lwz 4,8(11)\n\t" \ 2536 "lwz 5,12(11)\n\t" \ 2537 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2538 "lwz 7,20(11)\n\t" \ 2539 "lwz 8,24(11)\n\t" \ 2540 "lwz 9,28(11)\n\t" \ 2541 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2542 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2543 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2544 VALGRIND_RESTORE_STACK \ 2545 "mr %0,3" \ 2546 : /*out*/ "=r" (_res) \ 2547 : /*in*/ "r" (&_argvec[0]) \ 2548 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2549 ); \ 2550 lval = (__typeof__(lval)) _res; \ 2551 } while (0) 2552 2553 #endif /* PLAT_ppc32_linux */ 2554 2555 /* ------------------------ ppc64-linux ------------------------ */ 2556 2557 #if defined(PLAT_ppc64_linux) 2558 2559 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */ 2560 2561 /* These regs are trashed by the hidden call. */ 2562 #define __CALLER_SAVED_REGS \ 2563 "lr", "ctr", "xer", \ 2564 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \ 2565 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \ 2566 "r11", "r12", "r13" 2567 2568 /* Macros to save and align the stack before making a function 2569 call and restore it afterwards as gcc may not keep the stack 2570 pointer aligned if it doesn't realise calls are being made 2571 to other functions. */ 2572 2573 #define VALGRIND_ALIGN_STACK \ 2574 "mr 28,1\n\t" \ 2575 "rldicr 1,1,0,59\n\t" 2576 #define VALGRIND_RESTORE_STACK \ 2577 "mr 1,28\n\t" 2578 2579 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned 2580 long) == 8. */ 2581 2582 #define CALL_FN_W_v(lval, orig) \ 2583 do { \ 2584 volatile OrigFn _orig = (orig); \ 2585 volatile unsigned long _argvec[3+0]; \ 2586 volatile unsigned long _res; \ 2587 /* _argvec[0] holds current r2 across the call */ \ 2588 _argvec[1] = (unsigned long)_orig.r2; \ 2589 _argvec[2] = (unsigned long)_orig.nraddr; \ 2590 __asm__ volatile( \ 2591 VALGRIND_ALIGN_STACK \ 2592 "mr 11,%1\n\t" \ 2593 "std 2,-16(11)\n\t" /* save tocptr */ \ 2594 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2595 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2596 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2597 "mr 11,%1\n\t" \ 2598 "mr %0,3\n\t" \ 2599 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2600 VALGRIND_RESTORE_STACK \ 2601 : /*out*/ "=r" (_res) \ 2602 : /*in*/ "r" (&_argvec[2]) \ 2603 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2604 ); \ 2605 lval = (__typeof__(lval)) _res; \ 2606 } while (0) 2607 2608 #define CALL_FN_W_W(lval, orig, arg1) \ 2609 do { \ 2610 volatile OrigFn _orig = (orig); \ 2611 volatile unsigned long _argvec[3+1]; \ 2612 volatile unsigned long _res; \ 2613 /* _argvec[0] holds current r2 across the call */ \ 2614 _argvec[1] = (unsigned long)_orig.r2; \ 2615 _argvec[2] = (unsigned long)_orig.nraddr; \ 2616 _argvec[2+1] = (unsigned long)arg1; \ 2617 __asm__ volatile( \ 2618 VALGRIND_ALIGN_STACK \ 2619 "mr 11,%1\n\t" \ 2620 "std 2,-16(11)\n\t" /* save tocptr */ \ 2621 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2622 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2623 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2624 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2625 "mr 11,%1\n\t" \ 2626 "mr %0,3\n\t" \ 2627 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2628 VALGRIND_RESTORE_STACK \ 2629 : /*out*/ "=r" (_res) \ 2630 : /*in*/ "r" (&_argvec[2]) \ 2631 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2632 ); \ 2633 lval = (__typeof__(lval)) _res; \ 2634 } while (0) 2635 2636 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 2637 do { \ 2638 volatile OrigFn _orig = (orig); \ 2639 volatile unsigned long _argvec[3+2]; \ 2640 volatile unsigned long _res; \ 2641 /* _argvec[0] holds current r2 across the call */ \ 2642 _argvec[1] = (unsigned long)_orig.r2; \ 2643 _argvec[2] = (unsigned long)_orig.nraddr; \ 2644 _argvec[2+1] = (unsigned long)arg1; \ 2645 _argvec[2+2] = (unsigned long)arg2; \ 2646 __asm__ volatile( \ 2647 VALGRIND_ALIGN_STACK \ 2648 "mr 11,%1\n\t" \ 2649 "std 2,-16(11)\n\t" /* save tocptr */ \ 2650 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2651 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2652 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2653 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2654 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2655 "mr 11,%1\n\t" \ 2656 "mr %0,3\n\t" \ 2657 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2658 VALGRIND_RESTORE_STACK \ 2659 : /*out*/ "=r" (_res) \ 2660 : /*in*/ "r" (&_argvec[2]) \ 2661 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2662 ); \ 2663 lval = (__typeof__(lval)) _res; \ 2664 } while (0) 2665 2666 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 2667 do { \ 2668 volatile OrigFn _orig = (orig); \ 2669 volatile unsigned long _argvec[3+3]; \ 2670 volatile unsigned long _res; \ 2671 /* _argvec[0] holds current r2 across the call */ \ 2672 _argvec[1] = (unsigned long)_orig.r2; \ 2673 _argvec[2] = (unsigned long)_orig.nraddr; \ 2674 _argvec[2+1] = (unsigned long)arg1; \ 2675 _argvec[2+2] = (unsigned long)arg2; \ 2676 _argvec[2+3] = (unsigned long)arg3; \ 2677 __asm__ volatile( \ 2678 VALGRIND_ALIGN_STACK \ 2679 "mr 11,%1\n\t" \ 2680 "std 2,-16(11)\n\t" /* save tocptr */ \ 2681 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2682 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2683 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2684 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2685 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2686 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2687 "mr 11,%1\n\t" \ 2688 "mr %0,3\n\t" \ 2689 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2690 VALGRIND_RESTORE_STACK \ 2691 : /*out*/ "=r" (_res) \ 2692 : /*in*/ "r" (&_argvec[2]) \ 2693 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2694 ); \ 2695 lval = (__typeof__(lval)) _res; \ 2696 } while (0) 2697 2698 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 2699 do { \ 2700 volatile OrigFn _orig = (orig); \ 2701 volatile unsigned long _argvec[3+4]; \ 2702 volatile unsigned long _res; \ 2703 /* _argvec[0] holds current r2 across the call */ \ 2704 _argvec[1] = (unsigned long)_orig.r2; \ 2705 _argvec[2] = (unsigned long)_orig.nraddr; \ 2706 _argvec[2+1] = (unsigned long)arg1; \ 2707 _argvec[2+2] = (unsigned long)arg2; \ 2708 _argvec[2+3] = (unsigned long)arg3; \ 2709 _argvec[2+4] = (unsigned long)arg4; \ 2710 __asm__ volatile( \ 2711 VALGRIND_ALIGN_STACK \ 2712 "mr 11,%1\n\t" \ 2713 "std 2,-16(11)\n\t" /* save tocptr */ \ 2714 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2715 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2716 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2717 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2718 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2719 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2720 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2721 "mr 11,%1\n\t" \ 2722 "mr %0,3\n\t" \ 2723 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2724 VALGRIND_RESTORE_STACK \ 2725 : /*out*/ "=r" (_res) \ 2726 : /*in*/ "r" (&_argvec[2]) \ 2727 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2728 ); \ 2729 lval = (__typeof__(lval)) _res; \ 2730 } while (0) 2731 2732 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 2733 do { \ 2734 volatile OrigFn _orig = (orig); \ 2735 volatile unsigned long _argvec[3+5]; \ 2736 volatile unsigned long _res; \ 2737 /* _argvec[0] holds current r2 across the call */ \ 2738 _argvec[1] = (unsigned long)_orig.r2; \ 2739 _argvec[2] = (unsigned long)_orig.nraddr; \ 2740 _argvec[2+1] = (unsigned long)arg1; \ 2741 _argvec[2+2] = (unsigned long)arg2; \ 2742 _argvec[2+3] = (unsigned long)arg3; \ 2743 _argvec[2+4] = (unsigned long)arg4; \ 2744 _argvec[2+5] = (unsigned long)arg5; \ 2745 __asm__ volatile( \ 2746 VALGRIND_ALIGN_STACK \ 2747 "mr 11,%1\n\t" \ 2748 "std 2,-16(11)\n\t" /* save tocptr */ \ 2749 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2750 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2751 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2752 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2753 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2754 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2755 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2756 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2757 "mr 11,%1\n\t" \ 2758 "mr %0,3\n\t" \ 2759 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2760 VALGRIND_RESTORE_STACK \ 2761 : /*out*/ "=r" (_res) \ 2762 : /*in*/ "r" (&_argvec[2]) \ 2763 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2764 ); \ 2765 lval = (__typeof__(lval)) _res; \ 2766 } while (0) 2767 2768 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 2769 do { \ 2770 volatile OrigFn _orig = (orig); \ 2771 volatile unsigned long _argvec[3+6]; \ 2772 volatile unsigned long _res; \ 2773 /* _argvec[0] holds current r2 across the call */ \ 2774 _argvec[1] = (unsigned long)_orig.r2; \ 2775 _argvec[2] = (unsigned long)_orig.nraddr; \ 2776 _argvec[2+1] = (unsigned long)arg1; \ 2777 _argvec[2+2] = (unsigned long)arg2; \ 2778 _argvec[2+3] = (unsigned long)arg3; \ 2779 _argvec[2+4] = (unsigned long)arg4; \ 2780 _argvec[2+5] = (unsigned long)arg5; \ 2781 _argvec[2+6] = (unsigned long)arg6; \ 2782 __asm__ volatile( \ 2783 VALGRIND_ALIGN_STACK \ 2784 "mr 11,%1\n\t" \ 2785 "std 2,-16(11)\n\t" /* save tocptr */ \ 2786 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2787 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2788 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2789 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2790 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2791 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2792 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2793 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2794 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2795 "mr 11,%1\n\t" \ 2796 "mr %0,3\n\t" \ 2797 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2798 VALGRIND_RESTORE_STACK \ 2799 : /*out*/ "=r" (_res) \ 2800 : /*in*/ "r" (&_argvec[2]) \ 2801 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2802 ); \ 2803 lval = (__typeof__(lval)) _res; \ 2804 } while (0) 2805 2806 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2807 arg7) \ 2808 do { \ 2809 volatile OrigFn _orig = (orig); \ 2810 volatile unsigned long _argvec[3+7]; \ 2811 volatile unsigned long _res; \ 2812 /* _argvec[0] holds current r2 across the call */ \ 2813 _argvec[1] = (unsigned long)_orig.r2; \ 2814 _argvec[2] = (unsigned long)_orig.nraddr; \ 2815 _argvec[2+1] = (unsigned long)arg1; \ 2816 _argvec[2+2] = (unsigned long)arg2; \ 2817 _argvec[2+3] = (unsigned long)arg3; \ 2818 _argvec[2+4] = (unsigned long)arg4; \ 2819 _argvec[2+5] = (unsigned long)arg5; \ 2820 _argvec[2+6] = (unsigned long)arg6; \ 2821 _argvec[2+7] = (unsigned long)arg7; \ 2822 __asm__ volatile( \ 2823 VALGRIND_ALIGN_STACK \ 2824 "mr 11,%1\n\t" \ 2825 "std 2,-16(11)\n\t" /* save tocptr */ \ 2826 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2827 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2828 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2829 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2830 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2831 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2832 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2833 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 2834 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2835 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2836 "mr 11,%1\n\t" \ 2837 "mr %0,3\n\t" \ 2838 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2839 VALGRIND_RESTORE_STACK \ 2840 : /*out*/ "=r" (_res) \ 2841 : /*in*/ "r" (&_argvec[2]) \ 2842 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2843 ); \ 2844 lval = (__typeof__(lval)) _res; \ 2845 } while (0) 2846 2847 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2848 arg7,arg8) \ 2849 do { \ 2850 volatile OrigFn _orig = (orig); \ 2851 volatile unsigned long _argvec[3+8]; \ 2852 volatile unsigned long _res; \ 2853 /* _argvec[0] holds current r2 across the call */ \ 2854 _argvec[1] = (unsigned long)_orig.r2; \ 2855 _argvec[2] = (unsigned long)_orig.nraddr; \ 2856 _argvec[2+1] = (unsigned long)arg1; \ 2857 _argvec[2+2] = (unsigned long)arg2; \ 2858 _argvec[2+3] = (unsigned long)arg3; \ 2859 _argvec[2+4] = (unsigned long)arg4; \ 2860 _argvec[2+5] = (unsigned long)arg5; \ 2861 _argvec[2+6] = (unsigned long)arg6; \ 2862 _argvec[2+7] = (unsigned long)arg7; \ 2863 _argvec[2+8] = (unsigned long)arg8; \ 2864 __asm__ volatile( \ 2865 VALGRIND_ALIGN_STACK \ 2866 "mr 11,%1\n\t" \ 2867 "std 2,-16(11)\n\t" /* save tocptr */ \ 2868 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2869 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2870 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2871 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2872 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2873 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2874 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2875 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 2876 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 2877 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2878 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2879 "mr 11,%1\n\t" \ 2880 "mr %0,3\n\t" \ 2881 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2882 VALGRIND_RESTORE_STACK \ 2883 : /*out*/ "=r" (_res) \ 2884 : /*in*/ "r" (&_argvec[2]) \ 2885 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2886 ); \ 2887 lval = (__typeof__(lval)) _res; \ 2888 } while (0) 2889 2890 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2891 arg7,arg8,arg9) \ 2892 do { \ 2893 volatile OrigFn _orig = (orig); \ 2894 volatile unsigned long _argvec[3+9]; \ 2895 volatile unsigned long _res; \ 2896 /* _argvec[0] holds current r2 across the call */ \ 2897 _argvec[1] = (unsigned long)_orig.r2; \ 2898 _argvec[2] = (unsigned long)_orig.nraddr; \ 2899 _argvec[2+1] = (unsigned long)arg1; \ 2900 _argvec[2+2] = (unsigned long)arg2; \ 2901 _argvec[2+3] = (unsigned long)arg3; \ 2902 _argvec[2+4] = (unsigned long)arg4; \ 2903 _argvec[2+5] = (unsigned long)arg5; \ 2904 _argvec[2+6] = (unsigned long)arg6; \ 2905 _argvec[2+7] = (unsigned long)arg7; \ 2906 _argvec[2+8] = (unsigned long)arg8; \ 2907 _argvec[2+9] = (unsigned long)arg9; \ 2908 __asm__ volatile( \ 2909 VALGRIND_ALIGN_STACK \ 2910 "mr 11,%1\n\t" \ 2911 "std 2,-16(11)\n\t" /* save tocptr */ \ 2912 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2913 "addi 1,1,-128\n\t" /* expand stack frame */ \ 2914 /* arg9 */ \ 2915 "ld 3,72(11)\n\t" \ 2916 "std 3,112(1)\n\t" \ 2917 /* args1-8 */ \ 2918 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2919 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2920 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2921 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2922 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2923 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2924 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 2925 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 2926 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2927 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2928 "mr 11,%1\n\t" \ 2929 "mr %0,3\n\t" \ 2930 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2931 VALGRIND_RESTORE_STACK \ 2932 : /*out*/ "=r" (_res) \ 2933 : /*in*/ "r" (&_argvec[2]) \ 2934 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2935 ); \ 2936 lval = (__typeof__(lval)) _res; \ 2937 } while (0) 2938 2939 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2940 arg7,arg8,arg9,arg10) \ 2941 do { \ 2942 volatile OrigFn _orig = (orig); \ 2943 volatile unsigned long _argvec[3+10]; \ 2944 volatile unsigned long _res; \ 2945 /* _argvec[0] holds current r2 across the call */ \ 2946 _argvec[1] = (unsigned long)_orig.r2; \ 2947 _argvec[2] = (unsigned long)_orig.nraddr; \ 2948 _argvec[2+1] = (unsigned long)arg1; \ 2949 _argvec[2+2] = (unsigned long)arg2; \ 2950 _argvec[2+3] = (unsigned long)arg3; \ 2951 _argvec[2+4] = (unsigned long)arg4; \ 2952 _argvec[2+5] = (unsigned long)arg5; \ 2953 _argvec[2+6] = (unsigned long)arg6; \ 2954 _argvec[2+7] = (unsigned long)arg7; \ 2955 _argvec[2+8] = (unsigned long)arg8; \ 2956 _argvec[2+9] = (unsigned long)arg9; \ 2957 _argvec[2+10] = (unsigned long)arg10; \ 2958 __asm__ volatile( \ 2959 VALGRIND_ALIGN_STACK \ 2960 "mr 11,%1\n\t" \ 2961 "std 2,-16(11)\n\t" /* save tocptr */ \ 2962 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2963 "addi 1,1,-128\n\t" /* expand stack frame */ \ 2964 /* arg10 */ \ 2965 "ld 3,80(11)\n\t" \ 2966 "std 3,120(1)\n\t" \ 2967 /* arg9 */ \ 2968 "ld 3,72(11)\n\t" \ 2969 "std 3,112(1)\n\t" \ 2970 /* args1-8 */ \ 2971 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2972 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2973 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2974 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2975 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2976 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2977 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 2978 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 2979 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2980 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2981 "mr 11,%1\n\t" \ 2982 "mr %0,3\n\t" \ 2983 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2984 VALGRIND_RESTORE_STACK \ 2985 : /*out*/ "=r" (_res) \ 2986 : /*in*/ "r" (&_argvec[2]) \ 2987 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2988 ); \ 2989 lval = (__typeof__(lval)) _res; \ 2990 } while (0) 2991 2992 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2993 arg7,arg8,arg9,arg10,arg11) \ 2994 do { \ 2995 volatile OrigFn _orig = (orig); \ 2996 volatile unsigned long _argvec[3+11]; \ 2997 volatile unsigned long _res; \ 2998 /* _argvec[0] holds current r2 across the call */ \ 2999 _argvec[1] = (unsigned long)_orig.r2; \ 3000 _argvec[2] = (unsigned long)_orig.nraddr; \ 3001 _argvec[2+1] = (unsigned long)arg1; \ 3002 _argvec[2+2] = (unsigned long)arg2; \ 3003 _argvec[2+3] = (unsigned long)arg3; \ 3004 _argvec[2+4] = (unsigned long)arg4; \ 3005 _argvec[2+5] = (unsigned long)arg5; \ 3006 _argvec[2+6] = (unsigned long)arg6; \ 3007 _argvec[2+7] = (unsigned long)arg7; \ 3008 _argvec[2+8] = (unsigned long)arg8; \ 3009 _argvec[2+9] = (unsigned long)arg9; \ 3010 _argvec[2+10] = (unsigned long)arg10; \ 3011 _argvec[2+11] = (unsigned long)arg11; \ 3012 __asm__ volatile( \ 3013 VALGRIND_ALIGN_STACK \ 3014 "mr 11,%1\n\t" \ 3015 "std 2,-16(11)\n\t" /* save tocptr */ \ 3016 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 3017 "addi 1,1,-144\n\t" /* expand stack frame */ \ 3018 /* arg11 */ \ 3019 "ld 3,88(11)\n\t" \ 3020 "std 3,128(1)\n\t" \ 3021 /* arg10 */ \ 3022 "ld 3,80(11)\n\t" \ 3023 "std 3,120(1)\n\t" \ 3024 /* arg9 */ \ 3025 "ld 3,72(11)\n\t" \ 3026 "std 3,112(1)\n\t" \ 3027 /* args1-8 */ \ 3028 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 3029 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 3030 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 3031 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 3032 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 3033 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 3034 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 3035 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 3036 "ld 11, 0(11)\n\t" /* target->r11 */ \ 3037 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 3038 "mr 11,%1\n\t" \ 3039 "mr %0,3\n\t" \ 3040 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 3041 VALGRIND_RESTORE_STACK \ 3042 : /*out*/ "=r" (_res) \ 3043 : /*in*/ "r" (&_argvec[2]) \ 3044 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3045 ); \ 3046 lval = (__typeof__(lval)) _res; \ 3047 } while (0) 3048 3049 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3050 arg7,arg8,arg9,arg10,arg11,arg12) \ 3051 do { \ 3052 volatile OrigFn _orig = (orig); \ 3053 volatile unsigned long _argvec[3+12]; \ 3054 volatile unsigned long _res; \ 3055 /* _argvec[0] holds current r2 across the call */ \ 3056 _argvec[1] = (unsigned long)_orig.r2; \ 3057 _argvec[2] = (unsigned long)_orig.nraddr; \ 3058 _argvec[2+1] = (unsigned long)arg1; \ 3059 _argvec[2+2] = (unsigned long)arg2; \ 3060 _argvec[2+3] = (unsigned long)arg3; \ 3061 _argvec[2+4] = (unsigned long)arg4; \ 3062 _argvec[2+5] = (unsigned long)arg5; \ 3063 _argvec[2+6] = (unsigned long)arg6; \ 3064 _argvec[2+7] = (unsigned long)arg7; \ 3065 _argvec[2+8] = (unsigned long)arg8; \ 3066 _argvec[2+9] = (unsigned long)arg9; \ 3067 _argvec[2+10] = (unsigned long)arg10; \ 3068 _argvec[2+11] = (unsigned long)arg11; \ 3069 _argvec[2+12] = (unsigned long)arg12; \ 3070 __asm__ volatile( \ 3071 VALGRIND_ALIGN_STACK \ 3072 "mr 11,%1\n\t" \ 3073 "std 2,-16(11)\n\t" /* save tocptr */ \ 3074 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 3075 "addi 1,1,-144\n\t" /* expand stack frame */ \ 3076 /* arg12 */ \ 3077 "ld 3,96(11)\n\t" \ 3078 "std 3,136(1)\n\t" \ 3079 /* arg11 */ \ 3080 "ld 3,88(11)\n\t" \ 3081 "std 3,128(1)\n\t" \ 3082 /* arg10 */ \ 3083 "ld 3,80(11)\n\t" \ 3084 "std 3,120(1)\n\t" \ 3085 /* arg9 */ \ 3086 "ld 3,72(11)\n\t" \ 3087 "std 3,112(1)\n\t" \ 3088 /* args1-8 */ \ 3089 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 3090 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 3091 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 3092 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 3093 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 3094 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 3095 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 3096 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 3097 "ld 11, 0(11)\n\t" /* target->r11 */ \ 3098 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 3099 "mr 11,%1\n\t" \ 3100 "mr %0,3\n\t" \ 3101 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 3102 VALGRIND_RESTORE_STACK \ 3103 : /*out*/ "=r" (_res) \ 3104 : /*in*/ "r" (&_argvec[2]) \ 3105 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3106 ); \ 3107 lval = (__typeof__(lval)) _res; \ 3108 } while (0) 3109 3110 #endif /* PLAT_ppc64_linux */ 3111 3112 /* ------------------------- arm-linux ------------------------- */ 3113 3114 #if defined(PLAT_arm_linux) 3115 3116 /* These regs are trashed by the hidden call. */ 3117 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14" 3118 3119 /* Macros to save and align the stack before making a function 3120 call and restore it afterwards as gcc may not keep the stack 3121 pointer aligned if it doesn't realise calls are being made 3122 to other functions. */ 3123 3124 /* This is a bit tricky. We store the original stack pointer in r10 3125 as it is callee-saves. gcc doesn't allow the use of r11 for some 3126 reason. Also, we can't directly "bic" the stack pointer in thumb 3127 mode since r13 isn't an allowed register number in that context. 3128 So use r4 as a temporary, since that is about to get trashed 3129 anyway, just after each use of this macro. Side effect is we need 3130 to be very careful about any future changes, since 3131 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */ 3132 #define VALGRIND_ALIGN_STACK \ 3133 "mov r10, sp\n\t" \ 3134 "mov r4, sp\n\t" \ 3135 "bic r4, r4, #7\n\t" \ 3136 "mov sp, r4\n\t" 3137 #define VALGRIND_RESTORE_STACK \ 3138 "mov sp, r10\n\t" 3139 3140 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned 3141 long) == 4. */ 3142 3143 #define CALL_FN_W_v(lval, orig) \ 3144 do { \ 3145 volatile OrigFn _orig = (orig); \ 3146 volatile unsigned long _argvec[1]; \ 3147 volatile unsigned long _res; \ 3148 _argvec[0] = (unsigned long)_orig.nraddr; \ 3149 __asm__ volatile( \ 3150 VALGRIND_ALIGN_STACK \ 3151 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3152 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3153 VALGRIND_RESTORE_STACK \ 3154 "mov %0, r0\n" \ 3155 : /*out*/ "=r" (_res) \ 3156 : /*in*/ "0" (&_argvec[0]) \ 3157 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3158 ); \ 3159 lval = (__typeof__(lval)) _res; \ 3160 } while (0) 3161 3162 #define CALL_FN_W_W(lval, orig, arg1) \ 3163 do { \ 3164 volatile OrigFn _orig = (orig); \ 3165 volatile unsigned long _argvec[2]; \ 3166 volatile unsigned long _res; \ 3167 _argvec[0] = (unsigned long)_orig.nraddr; \ 3168 _argvec[1] = (unsigned long)(arg1); \ 3169 __asm__ volatile( \ 3170 VALGRIND_ALIGN_STACK \ 3171 "ldr r0, [%1, #4] \n\t" \ 3172 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3173 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3174 VALGRIND_RESTORE_STACK \ 3175 "mov %0, r0\n" \ 3176 : /*out*/ "=r" (_res) \ 3177 : /*in*/ "0" (&_argvec[0]) \ 3178 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3179 ); \ 3180 lval = (__typeof__(lval)) _res; \ 3181 } while (0) 3182 3183 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 3184 do { \ 3185 volatile OrigFn _orig = (orig); \ 3186 volatile unsigned long _argvec[3]; \ 3187 volatile unsigned long _res; \ 3188 _argvec[0] = (unsigned long)_orig.nraddr; \ 3189 _argvec[1] = (unsigned long)(arg1); \ 3190 _argvec[2] = (unsigned long)(arg2); \ 3191 __asm__ volatile( \ 3192 VALGRIND_ALIGN_STACK \ 3193 "ldr r0, [%1, #4] \n\t" \ 3194 "ldr r1, [%1, #8] \n\t" \ 3195 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3196 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3197 VALGRIND_RESTORE_STACK \ 3198 "mov %0, r0\n" \ 3199 : /*out*/ "=r" (_res) \ 3200 : /*in*/ "0" (&_argvec[0]) \ 3201 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3202 ); \ 3203 lval = (__typeof__(lval)) _res; \ 3204 } while (0) 3205 3206 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 3207 do { \ 3208 volatile OrigFn _orig = (orig); \ 3209 volatile unsigned long _argvec[4]; \ 3210 volatile unsigned long _res; \ 3211 _argvec[0] = (unsigned long)_orig.nraddr; \ 3212 _argvec[1] = (unsigned long)(arg1); \ 3213 _argvec[2] = (unsigned long)(arg2); \ 3214 _argvec[3] = (unsigned long)(arg3); \ 3215 __asm__ volatile( \ 3216 VALGRIND_ALIGN_STACK \ 3217 "ldr r0, [%1, #4] \n\t" \ 3218 "ldr r1, [%1, #8] \n\t" \ 3219 "ldr r2, [%1, #12] \n\t" \ 3220 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3221 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3222 VALGRIND_RESTORE_STACK \ 3223 "mov %0, r0\n" \ 3224 : /*out*/ "=r" (_res) \ 3225 : /*in*/ "0" (&_argvec[0]) \ 3226 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3227 ); \ 3228 lval = (__typeof__(lval)) _res; \ 3229 } while (0) 3230 3231 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 3232 do { \ 3233 volatile OrigFn _orig = (orig); \ 3234 volatile unsigned long _argvec[5]; \ 3235 volatile unsigned long _res; \ 3236 _argvec[0] = (unsigned long)_orig.nraddr; \ 3237 _argvec[1] = (unsigned long)(arg1); \ 3238 _argvec[2] = (unsigned long)(arg2); \ 3239 _argvec[3] = (unsigned long)(arg3); \ 3240 _argvec[4] = (unsigned long)(arg4); \ 3241 __asm__ volatile( \ 3242 VALGRIND_ALIGN_STACK \ 3243 "ldr r0, [%1, #4] \n\t" \ 3244 "ldr r1, [%1, #8] \n\t" \ 3245 "ldr r2, [%1, #12] \n\t" \ 3246 "ldr r3, [%1, #16] \n\t" \ 3247 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3248 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3249 VALGRIND_RESTORE_STACK \ 3250 "mov %0, r0" \ 3251 : /*out*/ "=r" (_res) \ 3252 : /*in*/ "0" (&_argvec[0]) \ 3253 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3254 ); \ 3255 lval = (__typeof__(lval)) _res; \ 3256 } while (0) 3257 3258 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 3259 do { \ 3260 volatile OrigFn _orig = (orig); \ 3261 volatile unsigned long _argvec[6]; \ 3262 volatile unsigned long _res; \ 3263 _argvec[0] = (unsigned long)_orig.nraddr; \ 3264 _argvec[1] = (unsigned long)(arg1); \ 3265 _argvec[2] = (unsigned long)(arg2); \ 3266 _argvec[3] = (unsigned long)(arg3); \ 3267 _argvec[4] = (unsigned long)(arg4); \ 3268 _argvec[5] = (unsigned long)(arg5); \ 3269 __asm__ volatile( \ 3270 VALGRIND_ALIGN_STACK \ 3271 "sub sp, sp, #4 \n\t" \ 3272 "ldr r0, [%1, #20] \n\t" \ 3273 "push {r0} \n\t" \ 3274 "ldr r0, [%1, #4] \n\t" \ 3275 "ldr r1, [%1, #8] \n\t" \ 3276 "ldr r2, [%1, #12] \n\t" \ 3277 "ldr r3, [%1, #16] \n\t" \ 3278 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3279 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3280 VALGRIND_RESTORE_STACK \ 3281 "mov %0, r0" \ 3282 : /*out*/ "=r" (_res) \ 3283 : /*in*/ "0" (&_argvec[0]) \ 3284 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3285 ); \ 3286 lval = (__typeof__(lval)) _res; \ 3287 } while (0) 3288 3289 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 3290 do { \ 3291 volatile OrigFn _orig = (orig); \ 3292 volatile unsigned long _argvec[7]; \ 3293 volatile unsigned long _res; \ 3294 _argvec[0] = (unsigned long)_orig.nraddr; \ 3295 _argvec[1] = (unsigned long)(arg1); \ 3296 _argvec[2] = (unsigned long)(arg2); \ 3297 _argvec[3] = (unsigned long)(arg3); \ 3298 _argvec[4] = (unsigned long)(arg4); \ 3299 _argvec[5] = (unsigned long)(arg5); \ 3300 _argvec[6] = (unsigned long)(arg6); \ 3301 __asm__ volatile( \ 3302 VALGRIND_ALIGN_STACK \ 3303 "ldr r0, [%1, #20] \n\t" \ 3304 "ldr r1, [%1, #24] \n\t" \ 3305 "push {r0, r1} \n\t" \ 3306 "ldr r0, [%1, #4] \n\t" \ 3307 "ldr r1, [%1, #8] \n\t" \ 3308 "ldr r2, [%1, #12] \n\t" \ 3309 "ldr r3, [%1, #16] \n\t" \ 3310 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3311 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3312 VALGRIND_RESTORE_STACK \ 3313 "mov %0, r0" \ 3314 : /*out*/ "=r" (_res) \ 3315 : /*in*/ "0" (&_argvec[0]) \ 3316 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3317 ); \ 3318 lval = (__typeof__(lval)) _res; \ 3319 } while (0) 3320 3321 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3322 arg7) \ 3323 do { \ 3324 volatile OrigFn _orig = (orig); \ 3325 volatile unsigned long _argvec[8]; \ 3326 volatile unsigned long _res; \ 3327 _argvec[0] = (unsigned long)_orig.nraddr; \ 3328 _argvec[1] = (unsigned long)(arg1); \ 3329 _argvec[2] = (unsigned long)(arg2); \ 3330 _argvec[3] = (unsigned long)(arg3); \ 3331 _argvec[4] = (unsigned long)(arg4); \ 3332 _argvec[5] = (unsigned long)(arg5); \ 3333 _argvec[6] = (unsigned long)(arg6); \ 3334 _argvec[7] = (unsigned long)(arg7); \ 3335 __asm__ volatile( \ 3336 VALGRIND_ALIGN_STACK \ 3337 "sub sp, sp, #4 \n\t" \ 3338 "ldr r0, [%1, #20] \n\t" \ 3339 "ldr r1, [%1, #24] \n\t" \ 3340 "ldr r2, [%1, #28] \n\t" \ 3341 "push {r0, r1, r2} \n\t" \ 3342 "ldr r0, [%1, #4] \n\t" \ 3343 "ldr r1, [%1, #8] \n\t" \ 3344 "ldr r2, [%1, #12] \n\t" \ 3345 "ldr r3, [%1, #16] \n\t" \ 3346 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3347 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3348 VALGRIND_RESTORE_STACK \ 3349 "mov %0, r0" \ 3350 : /*out*/ "=r" (_res) \ 3351 : /*in*/ "0" (&_argvec[0]) \ 3352 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3353 ); \ 3354 lval = (__typeof__(lval)) _res; \ 3355 } while (0) 3356 3357 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3358 arg7,arg8) \ 3359 do { \ 3360 volatile OrigFn _orig = (orig); \ 3361 volatile unsigned long _argvec[9]; \ 3362 volatile unsigned long _res; \ 3363 _argvec[0] = (unsigned long)_orig.nraddr; \ 3364 _argvec[1] = (unsigned long)(arg1); \ 3365 _argvec[2] = (unsigned long)(arg2); \ 3366 _argvec[3] = (unsigned long)(arg3); \ 3367 _argvec[4] = (unsigned long)(arg4); \ 3368 _argvec[5] = (unsigned long)(arg5); \ 3369 _argvec[6] = (unsigned long)(arg6); \ 3370 _argvec[7] = (unsigned long)(arg7); \ 3371 _argvec[8] = (unsigned long)(arg8); \ 3372 __asm__ volatile( \ 3373 VALGRIND_ALIGN_STACK \ 3374 "ldr r0, [%1, #20] \n\t" \ 3375 "ldr r1, [%1, #24] \n\t" \ 3376 "ldr r2, [%1, #28] \n\t" \ 3377 "ldr r3, [%1, #32] \n\t" \ 3378 "push {r0, r1, r2, r3} \n\t" \ 3379 "ldr r0, [%1, #4] \n\t" \ 3380 "ldr r1, [%1, #8] \n\t" \ 3381 "ldr r2, [%1, #12] \n\t" \ 3382 "ldr r3, [%1, #16] \n\t" \ 3383 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3384 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3385 VALGRIND_RESTORE_STACK \ 3386 "mov %0, r0" \ 3387 : /*out*/ "=r" (_res) \ 3388 : /*in*/ "0" (&_argvec[0]) \ 3389 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3390 ); \ 3391 lval = (__typeof__(lval)) _res; \ 3392 } while (0) 3393 3394 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3395 arg7,arg8,arg9) \ 3396 do { \ 3397 volatile OrigFn _orig = (orig); \ 3398 volatile unsigned long _argvec[10]; \ 3399 volatile unsigned long _res; \ 3400 _argvec[0] = (unsigned long)_orig.nraddr; \ 3401 _argvec[1] = (unsigned long)(arg1); \ 3402 _argvec[2] = (unsigned long)(arg2); \ 3403 _argvec[3] = (unsigned long)(arg3); \ 3404 _argvec[4] = (unsigned long)(arg4); \ 3405 _argvec[5] = (unsigned long)(arg5); \ 3406 _argvec[6] = (unsigned long)(arg6); \ 3407 _argvec[7] = (unsigned long)(arg7); \ 3408 _argvec[8] = (unsigned long)(arg8); \ 3409 _argvec[9] = (unsigned long)(arg9); \ 3410 __asm__ volatile( \ 3411 VALGRIND_ALIGN_STACK \ 3412 "sub sp, sp, #4 \n\t" \ 3413 "ldr r0, [%1, #20] \n\t" \ 3414 "ldr r1, [%1, #24] \n\t" \ 3415 "ldr r2, [%1, #28] \n\t" \ 3416 "ldr r3, [%1, #32] \n\t" \ 3417 "ldr r4, [%1, #36] \n\t" \ 3418 "push {r0, r1, r2, r3, r4} \n\t" \ 3419 "ldr r0, [%1, #4] \n\t" \ 3420 "ldr r1, [%1, #8] \n\t" \ 3421 "ldr r2, [%1, #12] \n\t" \ 3422 "ldr r3, [%1, #16] \n\t" \ 3423 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3424 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3425 VALGRIND_RESTORE_STACK \ 3426 "mov %0, r0" \ 3427 : /*out*/ "=r" (_res) \ 3428 : /*in*/ "0" (&_argvec[0]) \ 3429 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3430 ); \ 3431 lval = (__typeof__(lval)) _res; \ 3432 } while (0) 3433 3434 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3435 arg7,arg8,arg9,arg10) \ 3436 do { \ 3437 volatile OrigFn _orig = (orig); \ 3438 volatile unsigned long _argvec[11]; \ 3439 volatile unsigned long _res; \ 3440 _argvec[0] = (unsigned long)_orig.nraddr; \ 3441 _argvec[1] = (unsigned long)(arg1); \ 3442 _argvec[2] = (unsigned long)(arg2); \ 3443 _argvec[3] = (unsigned long)(arg3); \ 3444 _argvec[4] = (unsigned long)(arg4); \ 3445 _argvec[5] = (unsigned long)(arg5); \ 3446 _argvec[6] = (unsigned long)(arg6); \ 3447 _argvec[7] = (unsigned long)(arg7); \ 3448 _argvec[8] = (unsigned long)(arg8); \ 3449 _argvec[9] = (unsigned long)(arg9); \ 3450 _argvec[10] = (unsigned long)(arg10); \ 3451 __asm__ volatile( \ 3452 VALGRIND_ALIGN_STACK \ 3453 "ldr r0, [%1, #40] \n\t" \ 3454 "push {r0} \n\t" \ 3455 "ldr r0, [%1, #20] \n\t" \ 3456 "ldr r1, [%1, #24] \n\t" \ 3457 "ldr r2, [%1, #28] \n\t" \ 3458 "ldr r3, [%1, #32] \n\t" \ 3459 "ldr r4, [%1, #36] \n\t" \ 3460 "push {r0, r1, r2, r3, r4} \n\t" \ 3461 "ldr r0, [%1, #4] \n\t" \ 3462 "ldr r1, [%1, #8] \n\t" \ 3463 "ldr r2, [%1, #12] \n\t" \ 3464 "ldr r3, [%1, #16] \n\t" \ 3465 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3466 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3467 VALGRIND_RESTORE_STACK \ 3468 "mov %0, r0" \ 3469 : /*out*/ "=r" (_res) \ 3470 : /*in*/ "0" (&_argvec[0]) \ 3471 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3472 ); \ 3473 lval = (__typeof__(lval)) _res; \ 3474 } while (0) 3475 3476 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 3477 arg6,arg7,arg8,arg9,arg10, \ 3478 arg11) \ 3479 do { \ 3480 volatile OrigFn _orig = (orig); \ 3481 volatile unsigned long _argvec[12]; \ 3482 volatile unsigned long _res; \ 3483 _argvec[0] = (unsigned long)_orig.nraddr; \ 3484 _argvec[1] = (unsigned long)(arg1); \ 3485 _argvec[2] = (unsigned long)(arg2); \ 3486 _argvec[3] = (unsigned long)(arg3); \ 3487 _argvec[4] = (unsigned long)(arg4); \ 3488 _argvec[5] = (unsigned long)(arg5); \ 3489 _argvec[6] = (unsigned long)(arg6); \ 3490 _argvec[7] = (unsigned long)(arg7); \ 3491 _argvec[8] = (unsigned long)(arg8); \ 3492 _argvec[9] = (unsigned long)(arg9); \ 3493 _argvec[10] = (unsigned long)(arg10); \ 3494 _argvec[11] = (unsigned long)(arg11); \ 3495 __asm__ volatile( \ 3496 VALGRIND_ALIGN_STACK \ 3497 "sub sp, sp, #4 \n\t" \ 3498 "ldr r0, [%1, #40] \n\t" \ 3499 "ldr r1, [%1, #44] \n\t" \ 3500 "push {r0, r1} \n\t" \ 3501 "ldr r0, [%1, #20] \n\t" \ 3502 "ldr r1, [%1, #24] \n\t" \ 3503 "ldr r2, [%1, #28] \n\t" \ 3504 "ldr r3, [%1, #32] \n\t" \ 3505 "ldr r4, [%1, #36] \n\t" \ 3506 "push {r0, r1, r2, r3, r4} \n\t" \ 3507 "ldr r0, [%1, #4] \n\t" \ 3508 "ldr r1, [%1, #8] \n\t" \ 3509 "ldr r2, [%1, #12] \n\t" \ 3510 "ldr r3, [%1, #16] \n\t" \ 3511 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3512 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3513 VALGRIND_RESTORE_STACK \ 3514 "mov %0, r0" \ 3515 : /*out*/ "=r" (_res) \ 3516 : /*in*/ "0" (&_argvec[0]) \ 3517 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3518 ); \ 3519 lval = (__typeof__(lval)) _res; \ 3520 } while (0) 3521 3522 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 3523 arg6,arg7,arg8,arg9,arg10, \ 3524 arg11,arg12) \ 3525 do { \ 3526 volatile OrigFn _orig = (orig); \ 3527 volatile unsigned long _argvec[13]; \ 3528 volatile unsigned long _res; \ 3529 _argvec[0] = (unsigned long)_orig.nraddr; \ 3530 _argvec[1] = (unsigned long)(arg1); \ 3531 _argvec[2] = (unsigned long)(arg2); \ 3532 _argvec[3] = (unsigned long)(arg3); \ 3533 _argvec[4] = (unsigned long)(arg4); \ 3534 _argvec[5] = (unsigned long)(arg5); \ 3535 _argvec[6] = (unsigned long)(arg6); \ 3536 _argvec[7] = (unsigned long)(arg7); \ 3537 _argvec[8] = (unsigned long)(arg8); \ 3538 _argvec[9] = (unsigned long)(arg9); \ 3539 _argvec[10] = (unsigned long)(arg10); \ 3540 _argvec[11] = (unsigned long)(arg11); \ 3541 _argvec[12] = (unsigned long)(arg12); \ 3542 __asm__ volatile( \ 3543 VALGRIND_ALIGN_STACK \ 3544 "ldr r0, [%1, #40] \n\t" \ 3545 "ldr r1, [%1, #44] \n\t" \ 3546 "ldr r2, [%1, #48] \n\t" \ 3547 "push {r0, r1, r2} \n\t" \ 3548 "ldr r0, [%1, #20] \n\t" \ 3549 "ldr r1, [%1, #24] \n\t" \ 3550 "ldr r2, [%1, #28] \n\t" \ 3551 "ldr r3, [%1, #32] \n\t" \ 3552 "ldr r4, [%1, #36] \n\t" \ 3553 "push {r0, r1, r2, r3, r4} \n\t" \ 3554 "ldr r0, [%1, #4] \n\t" \ 3555 "ldr r1, [%1, #8] \n\t" \ 3556 "ldr r2, [%1, #12] \n\t" \ 3557 "ldr r3, [%1, #16] \n\t" \ 3558 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3559 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3560 VALGRIND_RESTORE_STACK \ 3561 "mov %0, r0" \ 3562 : /*out*/ "=r" (_res) \ 3563 : /*in*/ "0" (&_argvec[0]) \ 3564 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3565 ); \ 3566 lval = (__typeof__(lval)) _res; \ 3567 } while (0) 3568 3569 #endif /* PLAT_arm_linux */ 3570 3571 /* ------------------------ arm64-linux ------------------------ */ 3572 3573 #if defined(PLAT_arm64_linux) 3574 3575 /* These regs are trashed by the hidden call. */ 3576 #define __CALLER_SAVED_REGS \ 3577 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \ 3578 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \ 3579 "x18", "x19", "x20", "x30", \ 3580 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \ 3581 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \ 3582 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \ 3583 "v26", "v27", "v28", "v29", "v30", "v31" 3584 3585 /* x21 is callee-saved, so we can use it to save and restore SP around 3586 the hidden call. */ 3587 #define VALGRIND_ALIGN_STACK \ 3588 "mov x21, sp\n\t" \ 3589 "bic sp, x21, #15\n\t" 3590 #define VALGRIND_RESTORE_STACK \ 3591 "mov sp, x21\n\t" 3592 3593 /* These CALL_FN_ macros assume that on arm64-linux, 3594 sizeof(unsigned long) == 8. */ 3595 3596 #define CALL_FN_W_v(lval, orig) \ 3597 do { \ 3598 volatile OrigFn _orig = (orig); \ 3599 volatile unsigned long _argvec[1]; \ 3600 volatile unsigned long _res; \ 3601 _argvec[0] = (unsigned long)_orig.nraddr; \ 3602 __asm__ volatile( \ 3603 VALGRIND_ALIGN_STACK \ 3604 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3605 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3606 VALGRIND_RESTORE_STACK \ 3607 "mov %0, x0\n" \ 3608 : /*out*/ "=r" (_res) \ 3609 : /*in*/ "0" (&_argvec[0]) \ 3610 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3611 ); \ 3612 lval = (__typeof__(lval)) _res; \ 3613 } while (0) 3614 3615 #define CALL_FN_W_W(lval, orig, arg1) \ 3616 do { \ 3617 volatile OrigFn _orig = (orig); \ 3618 volatile unsigned long _argvec[2]; \ 3619 volatile unsigned long _res; \ 3620 _argvec[0] = (unsigned long)_orig.nraddr; \ 3621 _argvec[1] = (unsigned long)(arg1); \ 3622 __asm__ volatile( \ 3623 VALGRIND_ALIGN_STACK \ 3624 "ldr x0, [%1, #8] \n\t" \ 3625 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3626 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3627 VALGRIND_RESTORE_STACK \ 3628 "mov %0, x0\n" \ 3629 : /*out*/ "=r" (_res) \ 3630 : /*in*/ "0" (&_argvec[0]) \ 3631 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3632 ); \ 3633 lval = (__typeof__(lval)) _res; \ 3634 } while (0) 3635 3636 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 3637 do { \ 3638 volatile OrigFn _orig = (orig); \ 3639 volatile unsigned long _argvec[3]; \ 3640 volatile unsigned long _res; \ 3641 _argvec[0] = (unsigned long)_orig.nraddr; \ 3642 _argvec[1] = (unsigned long)(arg1); \ 3643 _argvec[2] = (unsigned long)(arg2); \ 3644 __asm__ volatile( \ 3645 VALGRIND_ALIGN_STACK \ 3646 "ldr x0, [%1, #8] \n\t" \ 3647 "ldr x1, [%1, #16] \n\t" \ 3648 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3649 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3650 VALGRIND_RESTORE_STACK \ 3651 "mov %0, x0\n" \ 3652 : /*out*/ "=r" (_res) \ 3653 : /*in*/ "0" (&_argvec[0]) \ 3654 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3655 ); \ 3656 lval = (__typeof__(lval)) _res; \ 3657 } while (0) 3658 3659 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 3660 do { \ 3661 volatile OrigFn _orig = (orig); \ 3662 volatile unsigned long _argvec[4]; \ 3663 volatile unsigned long _res; \ 3664 _argvec[0] = (unsigned long)_orig.nraddr; \ 3665 _argvec[1] = (unsigned long)(arg1); \ 3666 _argvec[2] = (unsigned long)(arg2); \ 3667 _argvec[3] = (unsigned long)(arg3); \ 3668 __asm__ volatile( \ 3669 VALGRIND_ALIGN_STACK \ 3670 "ldr x0, [%1, #8] \n\t" \ 3671 "ldr x1, [%1, #16] \n\t" \ 3672 "ldr x2, [%1, #24] \n\t" \ 3673 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3674 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3675 VALGRIND_RESTORE_STACK \ 3676 "mov %0, x0\n" \ 3677 : /*out*/ "=r" (_res) \ 3678 : /*in*/ "0" (&_argvec[0]) \ 3679 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3680 ); \ 3681 lval = (__typeof__(lval)) _res; \ 3682 } while (0) 3683 3684 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 3685 do { \ 3686 volatile OrigFn _orig = (orig); \ 3687 volatile unsigned long _argvec[5]; \ 3688 volatile unsigned long _res; \ 3689 _argvec[0] = (unsigned long)_orig.nraddr; \ 3690 _argvec[1] = (unsigned long)(arg1); \ 3691 _argvec[2] = (unsigned long)(arg2); \ 3692 _argvec[3] = (unsigned long)(arg3); \ 3693 _argvec[4] = (unsigned long)(arg4); \ 3694 __asm__ volatile( \ 3695 VALGRIND_ALIGN_STACK \ 3696 "ldr x0, [%1, #8] \n\t" \ 3697 "ldr x1, [%1, #16] \n\t" \ 3698 "ldr x2, [%1, #24] \n\t" \ 3699 "ldr x3, [%1, #32] \n\t" \ 3700 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3701 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3702 VALGRIND_RESTORE_STACK \ 3703 "mov %0, x0" \ 3704 : /*out*/ "=r" (_res) \ 3705 : /*in*/ "0" (&_argvec[0]) \ 3706 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3707 ); \ 3708 lval = (__typeof__(lval)) _res; \ 3709 } while (0) 3710 3711 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 3712 do { \ 3713 volatile OrigFn _orig = (orig); \ 3714 volatile unsigned long _argvec[6]; \ 3715 volatile unsigned long _res; \ 3716 _argvec[0] = (unsigned long)_orig.nraddr; \ 3717 _argvec[1] = (unsigned long)(arg1); \ 3718 _argvec[2] = (unsigned long)(arg2); \ 3719 _argvec[3] = (unsigned long)(arg3); \ 3720 _argvec[4] = (unsigned long)(arg4); \ 3721 _argvec[5] = (unsigned long)(arg5); \ 3722 __asm__ volatile( \ 3723 VALGRIND_ALIGN_STACK \ 3724 "ldr x0, [%1, #8] \n\t" \ 3725 "ldr x1, [%1, #16] \n\t" \ 3726 "ldr x2, [%1, #24] \n\t" \ 3727 "ldr x3, [%1, #32] \n\t" \ 3728 "ldr x4, [%1, #40] \n\t" \ 3729 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3730 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3731 VALGRIND_RESTORE_STACK \ 3732 "mov %0, x0" \ 3733 : /*out*/ "=r" (_res) \ 3734 : /*in*/ "0" (&_argvec[0]) \ 3735 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3736 ); \ 3737 lval = (__typeof__(lval)) _res; \ 3738 } while (0) 3739 3740 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 3741 do { \ 3742 volatile OrigFn _orig = (orig); \ 3743 volatile unsigned long _argvec[7]; \ 3744 volatile unsigned long _res; \ 3745 _argvec[0] = (unsigned long)_orig.nraddr; \ 3746 _argvec[1] = (unsigned long)(arg1); \ 3747 _argvec[2] = (unsigned long)(arg2); \ 3748 _argvec[3] = (unsigned long)(arg3); \ 3749 _argvec[4] = (unsigned long)(arg4); \ 3750 _argvec[5] = (unsigned long)(arg5); \ 3751 _argvec[6] = (unsigned long)(arg6); \ 3752 __asm__ volatile( \ 3753 VALGRIND_ALIGN_STACK \ 3754 "ldr x0, [%1, #8] \n\t" \ 3755 "ldr x1, [%1, #16] \n\t" \ 3756 "ldr x2, [%1, #24] \n\t" \ 3757 "ldr x3, [%1, #32] \n\t" \ 3758 "ldr x4, [%1, #40] \n\t" \ 3759 "ldr x5, [%1, #48] \n\t" \ 3760 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3761 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3762 VALGRIND_RESTORE_STACK \ 3763 "mov %0, x0" \ 3764 : /*out*/ "=r" (_res) \ 3765 : /*in*/ "0" (&_argvec[0]) \ 3766 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3767 ); \ 3768 lval = (__typeof__(lval)) _res; \ 3769 } while (0) 3770 3771 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3772 arg7) \ 3773 do { \ 3774 volatile OrigFn _orig = (orig); \ 3775 volatile unsigned long _argvec[8]; \ 3776 volatile unsigned long _res; \ 3777 _argvec[0] = (unsigned long)_orig.nraddr; \ 3778 _argvec[1] = (unsigned long)(arg1); \ 3779 _argvec[2] = (unsigned long)(arg2); \ 3780 _argvec[3] = (unsigned long)(arg3); \ 3781 _argvec[4] = (unsigned long)(arg4); \ 3782 _argvec[5] = (unsigned long)(arg5); \ 3783 _argvec[6] = (unsigned long)(arg6); \ 3784 _argvec[7] = (unsigned long)(arg7); \ 3785 __asm__ volatile( \ 3786 VALGRIND_ALIGN_STACK \ 3787 "ldr x0, [%1, #8] \n\t" \ 3788 "ldr x1, [%1, #16] \n\t" \ 3789 "ldr x2, [%1, #24] \n\t" \ 3790 "ldr x3, [%1, #32] \n\t" \ 3791 "ldr x4, [%1, #40] \n\t" \ 3792 "ldr x5, [%1, #48] \n\t" \ 3793 "ldr x6, [%1, #56] \n\t" \ 3794 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3795 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3796 VALGRIND_RESTORE_STACK \ 3797 "mov %0, x0" \ 3798 : /*out*/ "=r" (_res) \ 3799 : /*in*/ "0" (&_argvec[0]) \ 3800 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3801 ); \ 3802 lval = (__typeof__(lval)) _res; \ 3803 } while (0) 3804 3805 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3806 arg7,arg8) \ 3807 do { \ 3808 volatile OrigFn _orig = (orig); \ 3809 volatile unsigned long _argvec[9]; \ 3810 volatile unsigned long _res; \ 3811 _argvec[0] = (unsigned long)_orig.nraddr; \ 3812 _argvec[1] = (unsigned long)(arg1); \ 3813 _argvec[2] = (unsigned long)(arg2); \ 3814 _argvec[3] = (unsigned long)(arg3); \ 3815 _argvec[4] = (unsigned long)(arg4); \ 3816 _argvec[5] = (unsigned long)(arg5); \ 3817 _argvec[6] = (unsigned long)(arg6); \ 3818 _argvec[7] = (unsigned long)(arg7); \ 3819 _argvec[8] = (unsigned long)(arg8); \ 3820 __asm__ volatile( \ 3821 VALGRIND_ALIGN_STACK \ 3822 "ldr x0, [%1, #8] \n\t" \ 3823 "ldr x1, [%1, #16] \n\t" \ 3824 "ldr x2, [%1, #24] \n\t" \ 3825 "ldr x3, [%1, #32] \n\t" \ 3826 "ldr x4, [%1, #40] \n\t" \ 3827 "ldr x5, [%1, #48] \n\t" \ 3828 "ldr x6, [%1, #56] \n\t" \ 3829 "ldr x7, [%1, #64] \n\t" \ 3830 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3831 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3832 VALGRIND_RESTORE_STACK \ 3833 "mov %0, x0" \ 3834 : /*out*/ "=r" (_res) \ 3835 : /*in*/ "0" (&_argvec[0]) \ 3836 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3837 ); \ 3838 lval = (__typeof__(lval)) _res; \ 3839 } while (0) 3840 3841 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3842 arg7,arg8,arg9) \ 3843 do { \ 3844 volatile OrigFn _orig = (orig); \ 3845 volatile unsigned long _argvec[10]; \ 3846 volatile unsigned long _res; \ 3847 _argvec[0] = (unsigned long)_orig.nraddr; \ 3848 _argvec[1] = (unsigned long)(arg1); \ 3849 _argvec[2] = (unsigned long)(arg2); \ 3850 _argvec[3] = (unsigned long)(arg3); \ 3851 _argvec[4] = (unsigned long)(arg4); \ 3852 _argvec[5] = (unsigned long)(arg5); \ 3853 _argvec[6] = (unsigned long)(arg6); \ 3854 _argvec[7] = (unsigned long)(arg7); \ 3855 _argvec[8] = (unsigned long)(arg8); \ 3856 _argvec[9] = (unsigned long)(arg9); \ 3857 __asm__ volatile( \ 3858 VALGRIND_ALIGN_STACK \ 3859 "sub sp, sp, #0x20 \n\t" \ 3860 "ldr x0, [%1, #8] \n\t" \ 3861 "ldr x1, [%1, #16] \n\t" \ 3862 "ldr x2, [%1, #24] \n\t" \ 3863 "ldr x3, [%1, #32] \n\t" \ 3864 "ldr x4, [%1, #40] \n\t" \ 3865 "ldr x5, [%1, #48] \n\t" \ 3866 "ldr x6, [%1, #56] \n\t" \ 3867 "ldr x7, [%1, #64] \n\t" \ 3868 "ldr x8, [%1, #72] \n\t" \ 3869 "str x8, [sp, #0] \n\t" \ 3870 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3871 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3872 VALGRIND_RESTORE_STACK \ 3873 "mov %0, x0" \ 3874 : /*out*/ "=r" (_res) \ 3875 : /*in*/ "0" (&_argvec[0]) \ 3876 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3877 ); \ 3878 lval = (__typeof__(lval)) _res; \ 3879 } while (0) 3880 3881 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3882 arg7,arg8,arg9,arg10) \ 3883 do { \ 3884 volatile OrigFn _orig = (orig); \ 3885 volatile unsigned long _argvec[11]; \ 3886 volatile unsigned long _res; \ 3887 _argvec[0] = (unsigned long)_orig.nraddr; \ 3888 _argvec[1] = (unsigned long)(arg1); \ 3889 _argvec[2] = (unsigned long)(arg2); \ 3890 _argvec[3] = (unsigned long)(arg3); \ 3891 _argvec[4] = (unsigned long)(arg4); \ 3892 _argvec[5] = (unsigned long)(arg5); \ 3893 _argvec[6] = (unsigned long)(arg6); \ 3894 _argvec[7] = (unsigned long)(arg7); \ 3895 _argvec[8] = (unsigned long)(arg8); \ 3896 _argvec[9] = (unsigned long)(arg9); \ 3897 _argvec[10] = (unsigned long)(arg10); \ 3898 __asm__ volatile( \ 3899 VALGRIND_ALIGN_STACK \ 3900 "sub sp, sp, #0x20 \n\t" \ 3901 "ldr x0, [%1, #8] \n\t" \ 3902 "ldr x1, [%1, #16] \n\t" \ 3903 "ldr x2, [%1, #24] \n\t" \ 3904 "ldr x3, [%1, #32] \n\t" \ 3905 "ldr x4, [%1, #40] \n\t" \ 3906 "ldr x5, [%1, #48] \n\t" \ 3907 "ldr x6, [%1, #56] \n\t" \ 3908 "ldr x7, [%1, #64] \n\t" \ 3909 "ldr x8, [%1, #72] \n\t" \ 3910 "str x8, [sp, #0] \n\t" \ 3911 "ldr x8, [%1, #80] \n\t" \ 3912 "str x8, [sp, #8] \n\t" \ 3913 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3914 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3915 VALGRIND_RESTORE_STACK \ 3916 "mov %0, x0" \ 3917 : /*out*/ "=r" (_res) \ 3918 : /*in*/ "0" (&_argvec[0]) \ 3919 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3920 ); \ 3921 lval = (__typeof__(lval)) _res; \ 3922 } while (0) 3923 3924 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3925 arg7,arg8,arg9,arg10,arg11) \ 3926 do { \ 3927 volatile OrigFn _orig = (orig); \ 3928 volatile unsigned long _argvec[12]; \ 3929 volatile unsigned long _res; \ 3930 _argvec[0] = (unsigned long)_orig.nraddr; \ 3931 _argvec[1] = (unsigned long)(arg1); \ 3932 _argvec[2] = (unsigned long)(arg2); \ 3933 _argvec[3] = (unsigned long)(arg3); \ 3934 _argvec[4] = (unsigned long)(arg4); \ 3935 _argvec[5] = (unsigned long)(arg5); \ 3936 _argvec[6] = (unsigned long)(arg6); \ 3937 _argvec[7] = (unsigned long)(arg7); \ 3938 _argvec[8] = (unsigned long)(arg8); \ 3939 _argvec[9] = (unsigned long)(arg9); \ 3940 _argvec[10] = (unsigned long)(arg10); \ 3941 _argvec[11] = (unsigned long)(arg11); \ 3942 __asm__ volatile( \ 3943 VALGRIND_ALIGN_STACK \ 3944 "sub sp, sp, #0x30 \n\t" \ 3945 "ldr x0, [%1, #8] \n\t" \ 3946 "ldr x1, [%1, #16] \n\t" \ 3947 "ldr x2, [%1, #24] \n\t" \ 3948 "ldr x3, [%1, #32] \n\t" \ 3949 "ldr x4, [%1, #40] \n\t" \ 3950 "ldr x5, [%1, #48] \n\t" \ 3951 "ldr x6, [%1, #56] \n\t" \ 3952 "ldr x7, [%1, #64] \n\t" \ 3953 "ldr x8, [%1, #72] \n\t" \ 3954 "str x8, [sp, #0] \n\t" \ 3955 "ldr x8, [%1, #80] \n\t" \ 3956 "str x8, [sp, #8] \n\t" \ 3957 "ldr x8, [%1, #88] \n\t" \ 3958 "str x8, [sp, #16] \n\t" \ 3959 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3960 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3961 VALGRIND_RESTORE_STACK \ 3962 "mov %0, x0" \ 3963 : /*out*/ "=r" (_res) \ 3964 : /*in*/ "0" (&_argvec[0]) \ 3965 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3966 ); \ 3967 lval = (__typeof__(lval)) _res; \ 3968 } while (0) 3969 3970 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3971 arg7,arg8,arg9,arg10,arg11, \ 3972 arg12) \ 3973 do { \ 3974 volatile OrigFn _orig = (orig); \ 3975 volatile unsigned long _argvec[13]; \ 3976 volatile unsigned long _res; \ 3977 _argvec[0] = (unsigned long)_orig.nraddr; \ 3978 _argvec[1] = (unsigned long)(arg1); \ 3979 _argvec[2] = (unsigned long)(arg2); \ 3980 _argvec[3] = (unsigned long)(arg3); \ 3981 _argvec[4] = (unsigned long)(arg4); \ 3982 _argvec[5] = (unsigned long)(arg5); \ 3983 _argvec[6] = (unsigned long)(arg6); \ 3984 _argvec[7] = (unsigned long)(arg7); \ 3985 _argvec[8] = (unsigned long)(arg8); \ 3986 _argvec[9] = (unsigned long)(arg9); \ 3987 _argvec[10] = (unsigned long)(arg10); \ 3988 _argvec[11] = (unsigned long)(arg11); \ 3989 _argvec[12] = (unsigned long)(arg12); \ 3990 __asm__ volatile( \ 3991 VALGRIND_ALIGN_STACK \ 3992 "sub sp, sp, #0x30 \n\t" \ 3993 "ldr x0, [%1, #8] \n\t" \ 3994 "ldr x1, [%1, #16] \n\t" \ 3995 "ldr x2, [%1, #24] \n\t" \ 3996 "ldr x3, [%1, #32] \n\t" \ 3997 "ldr x4, [%1, #40] \n\t" \ 3998 "ldr x5, [%1, #48] \n\t" \ 3999 "ldr x6, [%1, #56] \n\t" \ 4000 "ldr x7, [%1, #64] \n\t" \ 4001 "ldr x8, [%1, #72] \n\t" \ 4002 "str x8, [sp, #0] \n\t" \ 4003 "ldr x8, [%1, #80] \n\t" \ 4004 "str x8, [sp, #8] \n\t" \ 4005 "ldr x8, [%1, #88] \n\t" \ 4006 "str x8, [sp, #16] \n\t" \ 4007 "ldr x8, [%1, #96] \n\t" \ 4008 "str x8, [sp, #24] \n\t" \ 4009 "ldr x8, [%1] \n\t" /* target->x8 */ \ 4010 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 4011 VALGRIND_RESTORE_STACK \ 4012 "mov %0, x0" \ 4013 : /*out*/ "=r" (_res) \ 4014 : /*in*/ "0" (&_argvec[0]) \ 4015 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 4016 ); \ 4017 lval = (__typeof__(lval)) _res; \ 4018 } while (0) 4019 4020 #endif /* PLAT_arm64_linux */ 4021 4022 /* ------------------------- s390x-linux ------------------------- */ 4023 4024 #if defined(PLAT_s390x_linux) 4025 4026 /* Similar workaround as amd64 (see above), but we use r11 as frame 4027 pointer and save the old r11 in r7. r11 might be used for 4028 argvec, therefore we copy argvec in r1 since r1 is clobbered 4029 after the call anyway. */ 4030 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM) 4031 # define __FRAME_POINTER \ 4032 ,"d"(__builtin_dwarf_cfa()) 4033 # define VALGRIND_CFI_PROLOGUE \ 4034 ".cfi_remember_state\n\t" \ 4035 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \ 4036 "lgr 7,11\n\t" \ 4037 "lgr 11,%2\n\t" \ 4038 ".cfi_def_cfa r11, 0\n\t" 4039 # define VALGRIND_CFI_EPILOGUE \ 4040 "lgr 11, 7\n\t" \ 4041 ".cfi_restore_state\n\t" 4042 #else 4043 # define __FRAME_POINTER 4044 # define VALGRIND_CFI_PROLOGUE \ 4045 "lgr 1,%1\n\t" 4046 # define VALGRIND_CFI_EPILOGUE 4047 #endif 4048 4049 /* Nb: On s390 the stack pointer is properly aligned *at all times* 4050 according to the s390 GCC maintainer. (The ABI specification is not 4051 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and 4052 VALGRIND_RESTORE_STACK are not defined here. */ 4053 4054 /* These regs are trashed by the hidden call. Note that we overwrite 4055 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the 4056 function a proper return address. All others are ABI defined call 4057 clobbers. */ 4058 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \ 4059 "f0","f1","f2","f3","f4","f5","f6","f7" 4060 4061 /* Nb: Although r11 is modified in the asm snippets below (inside 4062 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for 4063 two reasons: 4064 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not 4065 modified 4066 (2) GCC will complain that r11 cannot appear inside a clobber section, 4067 when compiled with -O -fno-omit-frame-pointer 4068 */ 4069 4070 #define CALL_FN_W_v(lval, orig) \ 4071 do { \ 4072 volatile OrigFn _orig = (orig); \ 4073 volatile unsigned long _argvec[1]; \ 4074 volatile unsigned long _res; \ 4075 _argvec[0] = (unsigned long)_orig.nraddr; \ 4076 __asm__ volatile( \ 4077 VALGRIND_CFI_PROLOGUE \ 4078 "aghi 15,-160\n\t" \ 4079 "lg 1, 0(1)\n\t" /* target->r1 */ \ 4080 VALGRIND_CALL_NOREDIR_R1 \ 4081 "lgr %0, 2\n\t" \ 4082 "aghi 15,160\n\t" \ 4083 VALGRIND_CFI_EPILOGUE \ 4084 : /*out*/ "=d" (_res) \ 4085 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \ 4086 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4087 ); \ 4088 lval = (__typeof__(lval)) _res; \ 4089 } while (0) 4090 4091 /* The call abi has the arguments in r2-r6 and stack */ 4092 #define CALL_FN_W_W(lval, orig, arg1) \ 4093 do { \ 4094 volatile OrigFn _orig = (orig); \ 4095 volatile unsigned long _argvec[2]; \ 4096 volatile unsigned long _res; \ 4097 _argvec[0] = (unsigned long)_orig.nraddr; \ 4098 _argvec[1] = (unsigned long)arg1; \ 4099 __asm__ volatile( \ 4100 VALGRIND_CFI_PROLOGUE \ 4101 "aghi 15,-160\n\t" \ 4102 "lg 2, 8(1)\n\t" \ 4103 "lg 1, 0(1)\n\t" \ 4104 VALGRIND_CALL_NOREDIR_R1 \ 4105 "lgr %0, 2\n\t" \ 4106 "aghi 15,160\n\t" \ 4107 VALGRIND_CFI_EPILOGUE \ 4108 : /*out*/ "=d" (_res) \ 4109 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4110 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4111 ); \ 4112 lval = (__typeof__(lval)) _res; \ 4113 } while (0) 4114 4115 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \ 4116 do { \ 4117 volatile OrigFn _orig = (orig); \ 4118 volatile unsigned long _argvec[3]; \ 4119 volatile unsigned long _res; \ 4120 _argvec[0] = (unsigned long)_orig.nraddr; \ 4121 _argvec[1] = (unsigned long)arg1; \ 4122 _argvec[2] = (unsigned long)arg2; \ 4123 __asm__ volatile( \ 4124 VALGRIND_CFI_PROLOGUE \ 4125 "aghi 15,-160\n\t" \ 4126 "lg 2, 8(1)\n\t" \ 4127 "lg 3,16(1)\n\t" \ 4128 "lg 1, 0(1)\n\t" \ 4129 VALGRIND_CALL_NOREDIR_R1 \ 4130 "lgr %0, 2\n\t" \ 4131 "aghi 15,160\n\t" \ 4132 VALGRIND_CFI_EPILOGUE \ 4133 : /*out*/ "=d" (_res) \ 4134 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4135 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4136 ); \ 4137 lval = (__typeof__(lval)) _res; \ 4138 } while (0) 4139 4140 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \ 4141 do { \ 4142 volatile OrigFn _orig = (orig); \ 4143 volatile unsigned long _argvec[4]; \ 4144 volatile unsigned long _res; \ 4145 _argvec[0] = (unsigned long)_orig.nraddr; \ 4146 _argvec[1] = (unsigned long)arg1; \ 4147 _argvec[2] = (unsigned long)arg2; \ 4148 _argvec[3] = (unsigned long)arg3; \ 4149 __asm__ volatile( \ 4150 VALGRIND_CFI_PROLOGUE \ 4151 "aghi 15,-160\n\t" \ 4152 "lg 2, 8(1)\n\t" \ 4153 "lg 3,16(1)\n\t" \ 4154 "lg 4,24(1)\n\t" \ 4155 "lg 1, 0(1)\n\t" \ 4156 VALGRIND_CALL_NOREDIR_R1 \ 4157 "lgr %0, 2\n\t" \ 4158 "aghi 15,160\n\t" \ 4159 VALGRIND_CFI_EPILOGUE \ 4160 : /*out*/ "=d" (_res) \ 4161 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4162 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4163 ); \ 4164 lval = (__typeof__(lval)) _res; \ 4165 } while (0) 4166 4167 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \ 4168 do { \ 4169 volatile OrigFn _orig = (orig); \ 4170 volatile unsigned long _argvec[5]; \ 4171 volatile unsigned long _res; \ 4172 _argvec[0] = (unsigned long)_orig.nraddr; \ 4173 _argvec[1] = (unsigned long)arg1; \ 4174 _argvec[2] = (unsigned long)arg2; \ 4175 _argvec[3] = (unsigned long)arg3; \ 4176 _argvec[4] = (unsigned long)arg4; \ 4177 __asm__ volatile( \ 4178 VALGRIND_CFI_PROLOGUE \ 4179 "aghi 15,-160\n\t" \ 4180 "lg 2, 8(1)\n\t" \ 4181 "lg 3,16(1)\n\t" \ 4182 "lg 4,24(1)\n\t" \ 4183 "lg 5,32(1)\n\t" \ 4184 "lg 1, 0(1)\n\t" \ 4185 VALGRIND_CALL_NOREDIR_R1 \ 4186 "lgr %0, 2\n\t" \ 4187 "aghi 15,160\n\t" \ 4188 VALGRIND_CFI_EPILOGUE \ 4189 : /*out*/ "=d" (_res) \ 4190 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4191 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4192 ); \ 4193 lval = (__typeof__(lval)) _res; \ 4194 } while (0) 4195 4196 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \ 4197 do { \ 4198 volatile OrigFn _orig = (orig); \ 4199 volatile unsigned long _argvec[6]; \ 4200 volatile unsigned long _res; \ 4201 _argvec[0] = (unsigned long)_orig.nraddr; \ 4202 _argvec[1] = (unsigned long)arg1; \ 4203 _argvec[2] = (unsigned long)arg2; \ 4204 _argvec[3] = (unsigned long)arg3; \ 4205 _argvec[4] = (unsigned long)arg4; \ 4206 _argvec[5] = (unsigned long)arg5; \ 4207 __asm__ volatile( \ 4208 VALGRIND_CFI_PROLOGUE \ 4209 "aghi 15,-160\n\t" \ 4210 "lg 2, 8(1)\n\t" \ 4211 "lg 3,16(1)\n\t" \ 4212 "lg 4,24(1)\n\t" \ 4213 "lg 5,32(1)\n\t" \ 4214 "lg 6,40(1)\n\t" \ 4215 "lg 1, 0(1)\n\t" \ 4216 VALGRIND_CALL_NOREDIR_R1 \ 4217 "lgr %0, 2\n\t" \ 4218 "aghi 15,160\n\t" \ 4219 VALGRIND_CFI_EPILOGUE \ 4220 : /*out*/ "=d" (_res) \ 4221 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4222 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4223 ); \ 4224 lval = (__typeof__(lval)) _res; \ 4225 } while (0) 4226 4227 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4228 arg6) \ 4229 do { \ 4230 volatile OrigFn _orig = (orig); \ 4231 volatile unsigned long _argvec[7]; \ 4232 volatile unsigned long _res; \ 4233 _argvec[0] = (unsigned long)_orig.nraddr; \ 4234 _argvec[1] = (unsigned long)arg1; \ 4235 _argvec[2] = (unsigned long)arg2; \ 4236 _argvec[3] = (unsigned long)arg3; \ 4237 _argvec[4] = (unsigned long)arg4; \ 4238 _argvec[5] = (unsigned long)arg5; \ 4239 _argvec[6] = (unsigned long)arg6; \ 4240 __asm__ volatile( \ 4241 VALGRIND_CFI_PROLOGUE \ 4242 "aghi 15,-168\n\t" \ 4243 "lg 2, 8(1)\n\t" \ 4244 "lg 3,16(1)\n\t" \ 4245 "lg 4,24(1)\n\t" \ 4246 "lg 5,32(1)\n\t" \ 4247 "lg 6,40(1)\n\t" \ 4248 "mvc 160(8,15), 48(1)\n\t" \ 4249 "lg 1, 0(1)\n\t" \ 4250 VALGRIND_CALL_NOREDIR_R1 \ 4251 "lgr %0, 2\n\t" \ 4252 "aghi 15,168\n\t" \ 4253 VALGRIND_CFI_EPILOGUE \ 4254 : /*out*/ "=d" (_res) \ 4255 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4256 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4257 ); \ 4258 lval = (__typeof__(lval)) _res; \ 4259 } while (0) 4260 4261 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4262 arg6, arg7) \ 4263 do { \ 4264 volatile OrigFn _orig = (orig); \ 4265 volatile unsigned long _argvec[8]; \ 4266 volatile unsigned long _res; \ 4267 _argvec[0] = (unsigned long)_orig.nraddr; \ 4268 _argvec[1] = (unsigned long)arg1; \ 4269 _argvec[2] = (unsigned long)arg2; \ 4270 _argvec[3] = (unsigned long)arg3; \ 4271 _argvec[4] = (unsigned long)arg4; \ 4272 _argvec[5] = (unsigned long)arg5; \ 4273 _argvec[6] = (unsigned long)arg6; \ 4274 _argvec[7] = (unsigned long)arg7; \ 4275 __asm__ volatile( \ 4276 VALGRIND_CFI_PROLOGUE \ 4277 "aghi 15,-176\n\t" \ 4278 "lg 2, 8(1)\n\t" \ 4279 "lg 3,16(1)\n\t" \ 4280 "lg 4,24(1)\n\t" \ 4281 "lg 5,32(1)\n\t" \ 4282 "lg 6,40(1)\n\t" \ 4283 "mvc 160(8,15), 48(1)\n\t" \ 4284 "mvc 168(8,15), 56(1)\n\t" \ 4285 "lg 1, 0(1)\n\t" \ 4286 VALGRIND_CALL_NOREDIR_R1 \ 4287 "lgr %0, 2\n\t" \ 4288 "aghi 15,176\n\t" \ 4289 VALGRIND_CFI_EPILOGUE \ 4290 : /*out*/ "=d" (_res) \ 4291 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4292 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4293 ); \ 4294 lval = (__typeof__(lval)) _res; \ 4295 } while (0) 4296 4297 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4298 arg6, arg7 ,arg8) \ 4299 do { \ 4300 volatile OrigFn _orig = (orig); \ 4301 volatile unsigned long _argvec[9]; \ 4302 volatile unsigned long _res; \ 4303 _argvec[0] = (unsigned long)_orig.nraddr; \ 4304 _argvec[1] = (unsigned long)arg1; \ 4305 _argvec[2] = (unsigned long)arg2; \ 4306 _argvec[3] = (unsigned long)arg3; \ 4307 _argvec[4] = (unsigned long)arg4; \ 4308 _argvec[5] = (unsigned long)arg5; \ 4309 _argvec[6] = (unsigned long)arg6; \ 4310 _argvec[7] = (unsigned long)arg7; \ 4311 _argvec[8] = (unsigned long)arg8; \ 4312 __asm__ volatile( \ 4313 VALGRIND_CFI_PROLOGUE \ 4314 "aghi 15,-184\n\t" \ 4315 "lg 2, 8(1)\n\t" \ 4316 "lg 3,16(1)\n\t" \ 4317 "lg 4,24(1)\n\t" \ 4318 "lg 5,32(1)\n\t" \ 4319 "lg 6,40(1)\n\t" \ 4320 "mvc 160(8,15), 48(1)\n\t" \ 4321 "mvc 168(8,15), 56(1)\n\t" \ 4322 "mvc 176(8,15), 64(1)\n\t" \ 4323 "lg 1, 0(1)\n\t" \ 4324 VALGRIND_CALL_NOREDIR_R1 \ 4325 "lgr %0, 2\n\t" \ 4326 "aghi 15,184\n\t" \ 4327 VALGRIND_CFI_EPILOGUE \ 4328 : /*out*/ "=d" (_res) \ 4329 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4330 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4331 ); \ 4332 lval = (__typeof__(lval)) _res; \ 4333 } while (0) 4334 4335 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4336 arg6, arg7 ,arg8, arg9) \ 4337 do { \ 4338 volatile OrigFn _orig = (orig); \ 4339 volatile unsigned long _argvec[10]; \ 4340 volatile unsigned long _res; \ 4341 _argvec[0] = (unsigned long)_orig.nraddr; \ 4342 _argvec[1] = (unsigned long)arg1; \ 4343 _argvec[2] = (unsigned long)arg2; \ 4344 _argvec[3] = (unsigned long)arg3; \ 4345 _argvec[4] = (unsigned long)arg4; \ 4346 _argvec[5] = (unsigned long)arg5; \ 4347 _argvec[6] = (unsigned long)arg6; \ 4348 _argvec[7] = (unsigned long)arg7; \ 4349 _argvec[8] = (unsigned long)arg8; \ 4350 _argvec[9] = (unsigned long)arg9; \ 4351 __asm__ volatile( \ 4352 VALGRIND_CFI_PROLOGUE \ 4353 "aghi 15,-192\n\t" \ 4354 "lg 2, 8(1)\n\t" \ 4355 "lg 3,16(1)\n\t" \ 4356 "lg 4,24(1)\n\t" \ 4357 "lg 5,32(1)\n\t" \ 4358 "lg 6,40(1)\n\t" \ 4359 "mvc 160(8,15), 48(1)\n\t" \ 4360 "mvc 168(8,15), 56(1)\n\t" \ 4361 "mvc 176(8,15), 64(1)\n\t" \ 4362 "mvc 184(8,15), 72(1)\n\t" \ 4363 "lg 1, 0(1)\n\t" \ 4364 VALGRIND_CALL_NOREDIR_R1 \ 4365 "lgr %0, 2\n\t" \ 4366 "aghi 15,192\n\t" \ 4367 VALGRIND_CFI_EPILOGUE \ 4368 : /*out*/ "=d" (_res) \ 4369 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4370 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4371 ); \ 4372 lval = (__typeof__(lval)) _res; \ 4373 } while (0) 4374 4375 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4376 arg6, arg7 ,arg8, arg9, arg10) \ 4377 do { \ 4378 volatile OrigFn _orig = (orig); \ 4379 volatile unsigned long _argvec[11]; \ 4380 volatile unsigned long _res; \ 4381 _argvec[0] = (unsigned long)_orig.nraddr; \ 4382 _argvec[1] = (unsigned long)arg1; \ 4383 _argvec[2] = (unsigned long)arg2; \ 4384 _argvec[3] = (unsigned long)arg3; \ 4385 _argvec[4] = (unsigned long)arg4; \ 4386 _argvec[5] = (unsigned long)arg5; \ 4387 _argvec[6] = (unsigned long)arg6; \ 4388 _argvec[7] = (unsigned long)arg7; \ 4389 _argvec[8] = (unsigned long)arg8; \ 4390 _argvec[9] = (unsigned long)arg9; \ 4391 _argvec[10] = (unsigned long)arg10; \ 4392 __asm__ volatile( \ 4393 VALGRIND_CFI_PROLOGUE \ 4394 "aghi 15,-200\n\t" \ 4395 "lg 2, 8(1)\n\t" \ 4396 "lg 3,16(1)\n\t" \ 4397 "lg 4,24(1)\n\t" \ 4398 "lg 5,32(1)\n\t" \ 4399 "lg 6,40(1)\n\t" \ 4400 "mvc 160(8,15), 48(1)\n\t" \ 4401 "mvc 168(8,15), 56(1)\n\t" \ 4402 "mvc 176(8,15), 64(1)\n\t" \ 4403 "mvc 184(8,15), 72(1)\n\t" \ 4404 "mvc 192(8,15), 80(1)\n\t" \ 4405 "lg 1, 0(1)\n\t" \ 4406 VALGRIND_CALL_NOREDIR_R1 \ 4407 "lgr %0, 2\n\t" \ 4408 "aghi 15,200\n\t" \ 4409 VALGRIND_CFI_EPILOGUE \ 4410 : /*out*/ "=d" (_res) \ 4411 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4412 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4413 ); \ 4414 lval = (__typeof__(lval)) _res; \ 4415 } while (0) 4416 4417 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4418 arg6, arg7 ,arg8, arg9, arg10, arg11) \ 4419 do { \ 4420 volatile OrigFn _orig = (orig); \ 4421 volatile unsigned long _argvec[12]; \ 4422 volatile unsigned long _res; \ 4423 _argvec[0] = (unsigned long)_orig.nraddr; \ 4424 _argvec[1] = (unsigned long)arg1; \ 4425 _argvec[2] = (unsigned long)arg2; \ 4426 _argvec[3] = (unsigned long)arg3; \ 4427 _argvec[4] = (unsigned long)arg4; \ 4428 _argvec[5] = (unsigned long)arg5; \ 4429 _argvec[6] = (unsigned long)arg6; \ 4430 _argvec[7] = (unsigned long)arg7; \ 4431 _argvec[8] = (unsigned long)arg8; \ 4432 _argvec[9] = (unsigned long)arg9; \ 4433 _argvec[10] = (unsigned long)arg10; \ 4434 _argvec[11] = (unsigned long)arg11; \ 4435 __asm__ volatile( \ 4436 VALGRIND_CFI_PROLOGUE \ 4437 "aghi 15,-208\n\t" \ 4438 "lg 2, 8(1)\n\t" \ 4439 "lg 3,16(1)\n\t" \ 4440 "lg 4,24(1)\n\t" \ 4441 "lg 5,32(1)\n\t" \ 4442 "lg 6,40(1)\n\t" \ 4443 "mvc 160(8,15), 48(1)\n\t" \ 4444 "mvc 168(8,15), 56(1)\n\t" \ 4445 "mvc 176(8,15), 64(1)\n\t" \ 4446 "mvc 184(8,15), 72(1)\n\t" \ 4447 "mvc 192(8,15), 80(1)\n\t" \ 4448 "mvc 200(8,15), 88(1)\n\t" \ 4449 "lg 1, 0(1)\n\t" \ 4450 VALGRIND_CALL_NOREDIR_R1 \ 4451 "lgr %0, 2\n\t" \ 4452 "aghi 15,208\n\t" \ 4453 VALGRIND_CFI_EPILOGUE \ 4454 : /*out*/ "=d" (_res) \ 4455 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4456 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4457 ); \ 4458 lval = (__typeof__(lval)) _res; \ 4459 } while (0) 4460 4461 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4462 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\ 4463 do { \ 4464 volatile OrigFn _orig = (orig); \ 4465 volatile unsigned long _argvec[13]; \ 4466 volatile unsigned long _res; \ 4467 _argvec[0] = (unsigned long)_orig.nraddr; \ 4468 _argvec[1] = (unsigned long)arg1; \ 4469 _argvec[2] = (unsigned long)arg2; \ 4470 _argvec[3] = (unsigned long)arg3; \ 4471 _argvec[4] = (unsigned long)arg4; \ 4472 _argvec[5] = (unsigned long)arg5; \ 4473 _argvec[6] = (unsigned long)arg6; \ 4474 _argvec[7] = (unsigned long)arg7; \ 4475 _argvec[8] = (unsigned long)arg8; \ 4476 _argvec[9] = (unsigned long)arg9; \ 4477 _argvec[10] = (unsigned long)arg10; \ 4478 _argvec[11] = (unsigned long)arg11; \ 4479 _argvec[12] = (unsigned long)arg12; \ 4480 __asm__ volatile( \ 4481 VALGRIND_CFI_PROLOGUE \ 4482 "aghi 15,-216\n\t" \ 4483 "lg 2, 8(1)\n\t" \ 4484 "lg 3,16(1)\n\t" \ 4485 "lg 4,24(1)\n\t" \ 4486 "lg 5,32(1)\n\t" \ 4487 "lg 6,40(1)\n\t" \ 4488 "mvc 160(8,15), 48(1)\n\t" \ 4489 "mvc 168(8,15), 56(1)\n\t" \ 4490 "mvc 176(8,15), 64(1)\n\t" \ 4491 "mvc 184(8,15), 72(1)\n\t" \ 4492 "mvc 192(8,15), 80(1)\n\t" \ 4493 "mvc 200(8,15), 88(1)\n\t" \ 4494 "mvc 208(8,15), 96(1)\n\t" \ 4495 "lg 1, 0(1)\n\t" \ 4496 VALGRIND_CALL_NOREDIR_R1 \ 4497 "lgr %0, 2\n\t" \ 4498 "aghi 15,216\n\t" \ 4499 VALGRIND_CFI_EPILOGUE \ 4500 : /*out*/ "=d" (_res) \ 4501 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4502 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4503 ); \ 4504 lval = (__typeof__(lval)) _res; \ 4505 } while (0) 4506 4507 4508 #endif /* PLAT_s390x_linux */ 4509 4510 /* ------------------------- mips32-linux ----------------------- */ 4511 4512 #if defined(PLAT_mips32_linux) 4513 4514 /* These regs are trashed by the hidden call. */ 4515 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \ 4516 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \ 4517 "$25", "$31" 4518 4519 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned 4520 long) == 4. */ 4521 4522 #define CALL_FN_W_v(lval, orig) \ 4523 do { \ 4524 volatile OrigFn _orig = (orig); \ 4525 volatile unsigned long _argvec[1]; \ 4526 volatile unsigned long _res; \ 4527 _argvec[0] = (unsigned long)_orig.nraddr; \ 4528 __asm__ volatile( \ 4529 "subu $29, $29, 8 \n\t" \ 4530 "sw $28, 0($29) \n\t" \ 4531 "sw $31, 4($29) \n\t" \ 4532 "subu $29, $29, 16 \n\t" \ 4533 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4534 VALGRIND_CALL_NOREDIR_T9 \ 4535 "addu $29, $29, 16\n\t" \ 4536 "lw $28, 0($29) \n\t" \ 4537 "lw $31, 4($29) \n\t" \ 4538 "addu $29, $29, 8 \n\t" \ 4539 "move %0, $2\n" \ 4540 : /*out*/ "=r" (_res) \ 4541 : /*in*/ "0" (&_argvec[0]) \ 4542 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4543 ); \ 4544 lval = (__typeof__(lval)) _res; \ 4545 } while (0) 4546 4547 #define CALL_FN_W_W(lval, orig, arg1) \ 4548 do { \ 4549 volatile OrigFn _orig = (orig); \ 4550 volatile unsigned long _argvec[2]; \ 4551 volatile unsigned long _res; \ 4552 _argvec[0] = (unsigned long)_orig.nraddr; \ 4553 _argvec[1] = (unsigned long)(arg1); \ 4554 __asm__ volatile( \ 4555 "subu $29, $29, 8 \n\t" \ 4556 "sw $28, 0($29) \n\t" \ 4557 "sw $31, 4($29) \n\t" \ 4558 "subu $29, $29, 16 \n\t" \ 4559 "lw $4, 4(%1) \n\t" /* arg1*/ \ 4560 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4561 VALGRIND_CALL_NOREDIR_T9 \ 4562 "addu $29, $29, 16 \n\t" \ 4563 "lw $28, 0($29) \n\t" \ 4564 "lw $31, 4($29) \n\t" \ 4565 "addu $29, $29, 8 \n\t" \ 4566 "move %0, $2\n" \ 4567 : /*out*/ "=r" (_res) \ 4568 : /*in*/ "0" (&_argvec[0]) \ 4569 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4570 ); \ 4571 lval = (__typeof__(lval)) _res; \ 4572 } while (0) 4573 4574 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 4575 do { \ 4576 volatile OrigFn _orig = (orig); \ 4577 volatile unsigned long _argvec[3]; \ 4578 volatile unsigned long _res; \ 4579 _argvec[0] = (unsigned long)_orig.nraddr; \ 4580 _argvec[1] = (unsigned long)(arg1); \ 4581 _argvec[2] = (unsigned long)(arg2); \ 4582 __asm__ volatile( \ 4583 "subu $29, $29, 8 \n\t" \ 4584 "sw $28, 0($29) \n\t" \ 4585 "sw $31, 4($29) \n\t" \ 4586 "subu $29, $29, 16 \n\t" \ 4587 "lw $4, 4(%1) \n\t" \ 4588 "lw $5, 8(%1) \n\t" \ 4589 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4590 VALGRIND_CALL_NOREDIR_T9 \ 4591 "addu $29, $29, 16 \n\t" \ 4592 "lw $28, 0($29) \n\t" \ 4593 "lw $31, 4($29) \n\t" \ 4594 "addu $29, $29, 8 \n\t" \ 4595 "move %0, $2\n" \ 4596 : /*out*/ "=r" (_res) \ 4597 : /*in*/ "0" (&_argvec[0]) \ 4598 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4599 ); \ 4600 lval = (__typeof__(lval)) _res; \ 4601 } while (0) 4602 4603 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 4604 do { \ 4605 volatile OrigFn _orig = (orig); \ 4606 volatile unsigned long _argvec[4]; \ 4607 volatile unsigned long _res; \ 4608 _argvec[0] = (unsigned long)_orig.nraddr; \ 4609 _argvec[1] = (unsigned long)(arg1); \ 4610 _argvec[2] = (unsigned long)(arg2); \ 4611 _argvec[3] = (unsigned long)(arg3); \ 4612 __asm__ volatile( \ 4613 "subu $29, $29, 8 \n\t" \ 4614 "sw $28, 0($29) \n\t" \ 4615 "sw $31, 4($29) \n\t" \ 4616 "subu $29, $29, 16 \n\t" \ 4617 "lw $4, 4(%1) \n\t" \ 4618 "lw $5, 8(%1) \n\t" \ 4619 "lw $6, 12(%1) \n\t" \ 4620 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4621 VALGRIND_CALL_NOREDIR_T9 \ 4622 "addu $29, $29, 16 \n\t" \ 4623 "lw $28, 0($29) \n\t" \ 4624 "lw $31, 4($29) \n\t" \ 4625 "addu $29, $29, 8 \n\t" \ 4626 "move %0, $2\n" \ 4627 : /*out*/ "=r" (_res) \ 4628 : /*in*/ "0" (&_argvec[0]) \ 4629 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4630 ); \ 4631 lval = (__typeof__(lval)) _res; \ 4632 } while (0) 4633 4634 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 4635 do { \ 4636 volatile OrigFn _orig = (orig); \ 4637 volatile unsigned long _argvec[5]; \ 4638 volatile unsigned long _res; \ 4639 _argvec[0] = (unsigned long)_orig.nraddr; \ 4640 _argvec[1] = (unsigned long)(arg1); \ 4641 _argvec[2] = (unsigned long)(arg2); \ 4642 _argvec[3] = (unsigned long)(arg3); \ 4643 _argvec[4] = (unsigned long)(arg4); \ 4644 __asm__ volatile( \ 4645 "subu $29, $29, 8 \n\t" \ 4646 "sw $28, 0($29) \n\t" \ 4647 "sw $31, 4($29) \n\t" \ 4648 "subu $29, $29, 16 \n\t" \ 4649 "lw $4, 4(%1) \n\t" \ 4650 "lw $5, 8(%1) \n\t" \ 4651 "lw $6, 12(%1) \n\t" \ 4652 "lw $7, 16(%1) \n\t" \ 4653 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4654 VALGRIND_CALL_NOREDIR_T9 \ 4655 "addu $29, $29, 16 \n\t" \ 4656 "lw $28, 0($29) \n\t" \ 4657 "lw $31, 4($29) \n\t" \ 4658 "addu $29, $29, 8 \n\t" \ 4659 "move %0, $2\n" \ 4660 : /*out*/ "=r" (_res) \ 4661 : /*in*/ "0" (&_argvec[0]) \ 4662 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4663 ); \ 4664 lval = (__typeof__(lval)) _res; \ 4665 } while (0) 4666 4667 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 4668 do { \ 4669 volatile OrigFn _orig = (orig); \ 4670 volatile unsigned long _argvec[6]; \ 4671 volatile unsigned long _res; \ 4672 _argvec[0] = (unsigned long)_orig.nraddr; \ 4673 _argvec[1] = (unsigned long)(arg1); \ 4674 _argvec[2] = (unsigned long)(arg2); \ 4675 _argvec[3] = (unsigned long)(arg3); \ 4676 _argvec[4] = (unsigned long)(arg4); \ 4677 _argvec[5] = (unsigned long)(arg5); \ 4678 __asm__ volatile( \ 4679 "subu $29, $29, 8 \n\t" \ 4680 "sw $28, 0($29) \n\t" \ 4681 "sw $31, 4($29) \n\t" \ 4682 "lw $4, 20(%1) \n\t" \ 4683 "subu $29, $29, 24\n\t" \ 4684 "sw $4, 16($29) \n\t" \ 4685 "lw $4, 4(%1) \n\t" \ 4686 "lw $5, 8(%1) \n\t" \ 4687 "lw $6, 12(%1) \n\t" \ 4688 "lw $7, 16(%1) \n\t" \ 4689 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4690 VALGRIND_CALL_NOREDIR_T9 \ 4691 "addu $29, $29, 24 \n\t" \ 4692 "lw $28, 0($29) \n\t" \ 4693 "lw $31, 4($29) \n\t" \ 4694 "addu $29, $29, 8 \n\t" \ 4695 "move %0, $2\n" \ 4696 : /*out*/ "=r" (_res) \ 4697 : /*in*/ "0" (&_argvec[0]) \ 4698 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4699 ); \ 4700 lval = (__typeof__(lval)) _res; \ 4701 } while (0) 4702 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 4703 do { \ 4704 volatile OrigFn _orig = (orig); \ 4705 volatile unsigned long _argvec[7]; \ 4706 volatile unsigned long _res; \ 4707 _argvec[0] = (unsigned long)_orig.nraddr; \ 4708 _argvec[1] = (unsigned long)(arg1); \ 4709 _argvec[2] = (unsigned long)(arg2); \ 4710 _argvec[3] = (unsigned long)(arg3); \ 4711 _argvec[4] = (unsigned long)(arg4); \ 4712 _argvec[5] = (unsigned long)(arg5); \ 4713 _argvec[6] = (unsigned long)(arg6); \ 4714 __asm__ volatile( \ 4715 "subu $29, $29, 8 \n\t" \ 4716 "sw $28, 0($29) \n\t" \ 4717 "sw $31, 4($29) \n\t" \ 4718 "lw $4, 20(%1) \n\t" \ 4719 "subu $29, $29, 32\n\t" \ 4720 "sw $4, 16($29) \n\t" \ 4721 "lw $4, 24(%1) \n\t" \ 4722 "nop\n\t" \ 4723 "sw $4, 20($29) \n\t" \ 4724 "lw $4, 4(%1) \n\t" \ 4725 "lw $5, 8(%1) \n\t" \ 4726 "lw $6, 12(%1) \n\t" \ 4727 "lw $7, 16(%1) \n\t" \ 4728 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4729 VALGRIND_CALL_NOREDIR_T9 \ 4730 "addu $29, $29, 32 \n\t" \ 4731 "lw $28, 0($29) \n\t" \ 4732 "lw $31, 4($29) \n\t" \ 4733 "addu $29, $29, 8 \n\t" \ 4734 "move %0, $2\n" \ 4735 : /*out*/ "=r" (_res) \ 4736 : /*in*/ "0" (&_argvec[0]) \ 4737 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4738 ); \ 4739 lval = (__typeof__(lval)) _res; \ 4740 } while (0) 4741 4742 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4743 arg7) \ 4744 do { \ 4745 volatile OrigFn _orig = (orig); \ 4746 volatile unsigned long _argvec[8]; \ 4747 volatile unsigned long _res; \ 4748 _argvec[0] = (unsigned long)_orig.nraddr; \ 4749 _argvec[1] = (unsigned long)(arg1); \ 4750 _argvec[2] = (unsigned long)(arg2); \ 4751 _argvec[3] = (unsigned long)(arg3); \ 4752 _argvec[4] = (unsigned long)(arg4); \ 4753 _argvec[5] = (unsigned long)(arg5); \ 4754 _argvec[6] = (unsigned long)(arg6); \ 4755 _argvec[7] = (unsigned long)(arg7); \ 4756 __asm__ volatile( \ 4757 "subu $29, $29, 8 \n\t" \ 4758 "sw $28, 0($29) \n\t" \ 4759 "sw $31, 4($29) \n\t" \ 4760 "lw $4, 20(%1) \n\t" \ 4761 "subu $29, $29, 32\n\t" \ 4762 "sw $4, 16($29) \n\t" \ 4763 "lw $4, 24(%1) \n\t" \ 4764 "sw $4, 20($29) \n\t" \ 4765 "lw $4, 28(%1) \n\t" \ 4766 "sw $4, 24($29) \n\t" \ 4767 "lw $4, 4(%1) \n\t" \ 4768 "lw $5, 8(%1) \n\t" \ 4769 "lw $6, 12(%1) \n\t" \ 4770 "lw $7, 16(%1) \n\t" \ 4771 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4772 VALGRIND_CALL_NOREDIR_T9 \ 4773 "addu $29, $29, 32 \n\t" \ 4774 "lw $28, 0($29) \n\t" \ 4775 "lw $31, 4($29) \n\t" \ 4776 "addu $29, $29, 8 \n\t" \ 4777 "move %0, $2\n" \ 4778 : /*out*/ "=r" (_res) \ 4779 : /*in*/ "0" (&_argvec[0]) \ 4780 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4781 ); \ 4782 lval = (__typeof__(lval)) _res; \ 4783 } while (0) 4784 4785 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4786 arg7,arg8) \ 4787 do { \ 4788 volatile OrigFn _orig = (orig); \ 4789 volatile unsigned long _argvec[9]; \ 4790 volatile unsigned long _res; \ 4791 _argvec[0] = (unsigned long)_orig.nraddr; \ 4792 _argvec[1] = (unsigned long)(arg1); \ 4793 _argvec[2] = (unsigned long)(arg2); \ 4794 _argvec[3] = (unsigned long)(arg3); \ 4795 _argvec[4] = (unsigned long)(arg4); \ 4796 _argvec[5] = (unsigned long)(arg5); \ 4797 _argvec[6] = (unsigned long)(arg6); \ 4798 _argvec[7] = (unsigned long)(arg7); \ 4799 _argvec[8] = (unsigned long)(arg8); \ 4800 __asm__ volatile( \ 4801 "subu $29, $29, 8 \n\t" \ 4802 "sw $28, 0($29) \n\t" \ 4803 "sw $31, 4($29) \n\t" \ 4804 "lw $4, 20(%1) \n\t" \ 4805 "subu $29, $29, 40\n\t" \ 4806 "sw $4, 16($29) \n\t" \ 4807 "lw $4, 24(%1) \n\t" \ 4808 "sw $4, 20($29) \n\t" \ 4809 "lw $4, 28(%1) \n\t" \ 4810 "sw $4, 24($29) \n\t" \ 4811 "lw $4, 32(%1) \n\t" \ 4812 "sw $4, 28($29) \n\t" \ 4813 "lw $4, 4(%1) \n\t" \ 4814 "lw $5, 8(%1) \n\t" \ 4815 "lw $6, 12(%1) \n\t" \ 4816 "lw $7, 16(%1) \n\t" \ 4817 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4818 VALGRIND_CALL_NOREDIR_T9 \ 4819 "addu $29, $29, 40 \n\t" \ 4820 "lw $28, 0($29) \n\t" \ 4821 "lw $31, 4($29) \n\t" \ 4822 "addu $29, $29, 8 \n\t" \ 4823 "move %0, $2\n" \ 4824 : /*out*/ "=r" (_res) \ 4825 : /*in*/ "0" (&_argvec[0]) \ 4826 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4827 ); \ 4828 lval = (__typeof__(lval)) _res; \ 4829 } while (0) 4830 4831 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4832 arg7,arg8,arg9) \ 4833 do { \ 4834 volatile OrigFn _orig = (orig); \ 4835 volatile unsigned long _argvec[10]; \ 4836 volatile unsigned long _res; \ 4837 _argvec[0] = (unsigned long)_orig.nraddr; \ 4838 _argvec[1] = (unsigned long)(arg1); \ 4839 _argvec[2] = (unsigned long)(arg2); \ 4840 _argvec[3] = (unsigned long)(arg3); \ 4841 _argvec[4] = (unsigned long)(arg4); \ 4842 _argvec[5] = (unsigned long)(arg5); \ 4843 _argvec[6] = (unsigned long)(arg6); \ 4844 _argvec[7] = (unsigned long)(arg7); \ 4845 _argvec[8] = (unsigned long)(arg8); \ 4846 _argvec[9] = (unsigned long)(arg9); \ 4847 __asm__ volatile( \ 4848 "subu $29, $29, 8 \n\t" \ 4849 "sw $28, 0($29) \n\t" \ 4850 "sw $31, 4($29) \n\t" \ 4851 "lw $4, 20(%1) \n\t" \ 4852 "subu $29, $29, 40\n\t" \ 4853 "sw $4, 16($29) \n\t" \ 4854 "lw $4, 24(%1) \n\t" \ 4855 "sw $4, 20($29) \n\t" \ 4856 "lw $4, 28(%1) \n\t" \ 4857 "sw $4, 24($29) \n\t" \ 4858 "lw $4, 32(%1) \n\t" \ 4859 "sw $4, 28($29) \n\t" \ 4860 "lw $4, 36(%1) \n\t" \ 4861 "sw $4, 32($29) \n\t" \ 4862 "lw $4, 4(%1) \n\t" \ 4863 "lw $5, 8(%1) \n\t" \ 4864 "lw $6, 12(%1) \n\t" \ 4865 "lw $7, 16(%1) \n\t" \ 4866 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4867 VALGRIND_CALL_NOREDIR_T9 \ 4868 "addu $29, $29, 40 \n\t" \ 4869 "lw $28, 0($29) \n\t" \ 4870 "lw $31, 4($29) \n\t" \ 4871 "addu $29, $29, 8 \n\t" \ 4872 "move %0, $2\n" \ 4873 : /*out*/ "=r" (_res) \ 4874 : /*in*/ "0" (&_argvec[0]) \ 4875 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4876 ); \ 4877 lval = (__typeof__(lval)) _res; \ 4878 } while (0) 4879 4880 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4881 arg7,arg8,arg9,arg10) \ 4882 do { \ 4883 volatile OrigFn _orig = (orig); \ 4884 volatile unsigned long _argvec[11]; \ 4885 volatile unsigned long _res; \ 4886 _argvec[0] = (unsigned long)_orig.nraddr; \ 4887 _argvec[1] = (unsigned long)(arg1); \ 4888 _argvec[2] = (unsigned long)(arg2); \ 4889 _argvec[3] = (unsigned long)(arg3); \ 4890 _argvec[4] = (unsigned long)(arg4); \ 4891 _argvec[5] = (unsigned long)(arg5); \ 4892 _argvec[6] = (unsigned long)(arg6); \ 4893 _argvec[7] = (unsigned long)(arg7); \ 4894 _argvec[8] = (unsigned long)(arg8); \ 4895 _argvec[9] = (unsigned long)(arg9); \ 4896 _argvec[10] = (unsigned long)(arg10); \ 4897 __asm__ volatile( \ 4898 "subu $29, $29, 8 \n\t" \ 4899 "sw $28, 0($29) \n\t" \ 4900 "sw $31, 4($29) \n\t" \ 4901 "lw $4, 20(%1) \n\t" \ 4902 "subu $29, $29, 48\n\t" \ 4903 "sw $4, 16($29) \n\t" \ 4904 "lw $4, 24(%1) \n\t" \ 4905 "sw $4, 20($29) \n\t" \ 4906 "lw $4, 28(%1) \n\t" \ 4907 "sw $4, 24($29) \n\t" \ 4908 "lw $4, 32(%1) \n\t" \ 4909 "sw $4, 28($29) \n\t" \ 4910 "lw $4, 36(%1) \n\t" \ 4911 "sw $4, 32($29) \n\t" \ 4912 "lw $4, 40(%1) \n\t" \ 4913 "sw $4, 36($29) \n\t" \ 4914 "lw $4, 4(%1) \n\t" \ 4915 "lw $5, 8(%1) \n\t" \ 4916 "lw $6, 12(%1) \n\t" \ 4917 "lw $7, 16(%1) \n\t" \ 4918 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4919 VALGRIND_CALL_NOREDIR_T9 \ 4920 "addu $29, $29, 48 \n\t" \ 4921 "lw $28, 0($29) \n\t" \ 4922 "lw $31, 4($29) \n\t" \ 4923 "addu $29, $29, 8 \n\t" \ 4924 "move %0, $2\n" \ 4925 : /*out*/ "=r" (_res) \ 4926 : /*in*/ "0" (&_argvec[0]) \ 4927 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4928 ); \ 4929 lval = (__typeof__(lval)) _res; \ 4930 } while (0) 4931 4932 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 4933 arg6,arg7,arg8,arg9,arg10, \ 4934 arg11) \ 4935 do { \ 4936 volatile OrigFn _orig = (orig); \ 4937 volatile unsigned long _argvec[12]; \ 4938 volatile unsigned long _res; \ 4939 _argvec[0] = (unsigned long)_orig.nraddr; \ 4940 _argvec[1] = (unsigned long)(arg1); \ 4941 _argvec[2] = (unsigned long)(arg2); \ 4942 _argvec[3] = (unsigned long)(arg3); \ 4943 _argvec[4] = (unsigned long)(arg4); \ 4944 _argvec[5] = (unsigned long)(arg5); \ 4945 _argvec[6] = (unsigned long)(arg6); \ 4946 _argvec[7] = (unsigned long)(arg7); \ 4947 _argvec[8] = (unsigned long)(arg8); \ 4948 _argvec[9] = (unsigned long)(arg9); \ 4949 _argvec[10] = (unsigned long)(arg10); \ 4950 _argvec[11] = (unsigned long)(arg11); \ 4951 __asm__ volatile( \ 4952 "subu $29, $29, 8 \n\t" \ 4953 "sw $28, 0($29) \n\t" \ 4954 "sw $31, 4($29) \n\t" \ 4955 "lw $4, 20(%1) \n\t" \ 4956 "subu $29, $29, 48\n\t" \ 4957 "sw $4, 16($29) \n\t" \ 4958 "lw $4, 24(%1) \n\t" \ 4959 "sw $4, 20($29) \n\t" \ 4960 "lw $4, 28(%1) \n\t" \ 4961 "sw $4, 24($29) \n\t" \ 4962 "lw $4, 32(%1) \n\t" \ 4963 "sw $4, 28($29) \n\t" \ 4964 "lw $4, 36(%1) \n\t" \ 4965 "sw $4, 32($29) \n\t" \ 4966 "lw $4, 40(%1) \n\t" \ 4967 "sw $4, 36($29) \n\t" \ 4968 "lw $4, 44(%1) \n\t" \ 4969 "sw $4, 40($29) \n\t" \ 4970 "lw $4, 4(%1) \n\t" \ 4971 "lw $5, 8(%1) \n\t" \ 4972 "lw $6, 12(%1) \n\t" \ 4973 "lw $7, 16(%1) \n\t" \ 4974 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4975 VALGRIND_CALL_NOREDIR_T9 \ 4976 "addu $29, $29, 48 \n\t" \ 4977 "lw $28, 0($29) \n\t" \ 4978 "lw $31, 4($29) \n\t" \ 4979 "addu $29, $29, 8 \n\t" \ 4980 "move %0, $2\n" \ 4981 : /*out*/ "=r" (_res) \ 4982 : /*in*/ "0" (&_argvec[0]) \ 4983 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4984 ); \ 4985 lval = (__typeof__(lval)) _res; \ 4986 } while (0) 4987 4988 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 4989 arg6,arg7,arg8,arg9,arg10, \ 4990 arg11,arg12) \ 4991 do { \ 4992 volatile OrigFn _orig = (orig); \ 4993 volatile unsigned long _argvec[13]; \ 4994 volatile unsigned long _res; \ 4995 _argvec[0] = (unsigned long)_orig.nraddr; \ 4996 _argvec[1] = (unsigned long)(arg1); \ 4997 _argvec[2] = (unsigned long)(arg2); \ 4998 _argvec[3] = (unsigned long)(arg3); \ 4999 _argvec[4] = (unsigned long)(arg4); \ 5000 _argvec[5] = (unsigned long)(arg5); \ 5001 _argvec[6] = (unsigned long)(arg6); \ 5002 _argvec[7] = (unsigned long)(arg7); \ 5003 _argvec[8] = (unsigned long)(arg8); \ 5004 _argvec[9] = (unsigned long)(arg9); \ 5005 _argvec[10] = (unsigned long)(arg10); \ 5006 _argvec[11] = (unsigned long)(arg11); \ 5007 _argvec[12] = (unsigned long)(arg12); \ 5008 __asm__ volatile( \ 5009 "subu $29, $29, 8 \n\t" \ 5010 "sw $28, 0($29) \n\t" \ 5011 "sw $31, 4($29) \n\t" \ 5012 "lw $4, 20(%1) \n\t" \ 5013 "subu $29, $29, 56\n\t" \ 5014 "sw $4, 16($29) \n\t" \ 5015 "lw $4, 24(%1) \n\t" \ 5016 "sw $4, 20($29) \n\t" \ 5017 "lw $4, 28(%1) \n\t" \ 5018 "sw $4, 24($29) \n\t" \ 5019 "lw $4, 32(%1) \n\t" \ 5020 "sw $4, 28($29) \n\t" \ 5021 "lw $4, 36(%1) \n\t" \ 5022 "sw $4, 32($29) \n\t" \ 5023 "lw $4, 40(%1) \n\t" \ 5024 "sw $4, 36($29) \n\t" \ 5025 "lw $4, 44(%1) \n\t" \ 5026 "sw $4, 40($29) \n\t" \ 5027 "lw $4, 48(%1) \n\t" \ 5028 "sw $4, 44($29) \n\t" \ 5029 "lw $4, 4(%1) \n\t" \ 5030 "lw $5, 8(%1) \n\t" \ 5031 "lw $6, 12(%1) \n\t" \ 5032 "lw $7, 16(%1) \n\t" \ 5033 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5034 VALGRIND_CALL_NOREDIR_T9 \ 5035 "addu $29, $29, 56 \n\t" \ 5036 "lw $28, 0($29) \n\t" \ 5037 "lw $31, 4($29) \n\t" \ 5038 "addu $29, $29, 8 \n\t" \ 5039 "move %0, $2\n" \ 5040 : /*out*/ "=r" (_res) \ 5041 : /*in*/ "r" (&_argvec[0]) \ 5042 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5043 ); \ 5044 lval = (__typeof__(lval)) _res; \ 5045 } while (0) 5046 5047 #endif /* PLAT_mips32_linux */ 5048 5049 /* ------------------------- mips64-linux ------------------------- */ 5050 5051 #if defined(PLAT_mips64_linux) 5052 5053 /* These regs are trashed by the hidden call. */ 5054 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \ 5055 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \ 5056 "$25", "$31" 5057 5058 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned 5059 long) == 4. */ 5060 5061 #define CALL_FN_W_v(lval, orig) \ 5062 do { \ 5063 volatile OrigFn _orig = (orig); \ 5064 volatile unsigned long _argvec[1]; \ 5065 volatile unsigned long _res; \ 5066 _argvec[0] = (unsigned long)_orig.nraddr; \ 5067 __asm__ volatile( \ 5068 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5069 VALGRIND_CALL_NOREDIR_T9 \ 5070 "move %0, $2\n" \ 5071 : /*out*/ "=r" (_res) \ 5072 : /*in*/ "0" (&_argvec[0]) \ 5073 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5074 ); \ 5075 lval = (__typeof__(lval)) _res; \ 5076 } while (0) 5077 5078 #define CALL_FN_W_W(lval, orig, arg1) \ 5079 do { \ 5080 volatile OrigFn _orig = (orig); \ 5081 volatile unsigned long _argvec[2]; \ 5082 volatile unsigned long _res; \ 5083 _argvec[0] = (unsigned long)_orig.nraddr; \ 5084 _argvec[1] = (unsigned long)(arg1); \ 5085 __asm__ volatile( \ 5086 "ld $4, 8(%1)\n\t" /* arg1*/ \ 5087 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5088 VALGRIND_CALL_NOREDIR_T9 \ 5089 "move %0, $2\n" \ 5090 : /*out*/ "=r" (_res) \ 5091 : /*in*/ "r" (&_argvec[0]) \ 5092 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5093 ); \ 5094 lval = (__typeof__(lval)) _res; \ 5095 } while (0) 5096 5097 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 5098 do { \ 5099 volatile OrigFn _orig = (orig); \ 5100 volatile unsigned long _argvec[3]; \ 5101 volatile unsigned long _res; \ 5102 _argvec[0] = (unsigned long)_orig.nraddr; \ 5103 _argvec[1] = (unsigned long)(arg1); \ 5104 _argvec[2] = (unsigned long)(arg2); \ 5105 __asm__ volatile( \ 5106 "ld $4, 8(%1)\n\t" \ 5107 "ld $5, 16(%1)\n\t" \ 5108 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5109 VALGRIND_CALL_NOREDIR_T9 \ 5110 "move %0, $2\n" \ 5111 : /*out*/ "=r" (_res) \ 5112 : /*in*/ "r" (&_argvec[0]) \ 5113 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5114 ); \ 5115 lval = (__typeof__(lval)) _res; \ 5116 } while (0) 5117 5118 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 5119 do { \ 5120 volatile OrigFn _orig = (orig); \ 5121 volatile unsigned long _argvec[4]; \ 5122 volatile unsigned long _res; \ 5123 _argvec[0] = (unsigned long)_orig.nraddr; \ 5124 _argvec[1] = (unsigned long)(arg1); \ 5125 _argvec[2] = (unsigned long)(arg2); \ 5126 _argvec[3] = (unsigned long)(arg3); \ 5127 __asm__ volatile( \ 5128 "ld $4, 8(%1)\n\t" \ 5129 "ld $5, 16(%1)\n\t" \ 5130 "ld $6, 24(%1)\n\t" \ 5131 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5132 VALGRIND_CALL_NOREDIR_T9 \ 5133 "move %0, $2\n" \ 5134 : /*out*/ "=r" (_res) \ 5135 : /*in*/ "r" (&_argvec[0]) \ 5136 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5137 ); \ 5138 lval = (__typeof__(lval)) _res; \ 5139 } while (0) 5140 5141 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 5142 do { \ 5143 volatile OrigFn _orig = (orig); \ 5144 volatile unsigned long _argvec[5]; \ 5145 volatile unsigned long _res; \ 5146 _argvec[0] = (unsigned long)_orig.nraddr; \ 5147 _argvec[1] = (unsigned long)(arg1); \ 5148 _argvec[2] = (unsigned long)(arg2); \ 5149 _argvec[3] = (unsigned long)(arg3); \ 5150 _argvec[4] = (unsigned long)(arg4); \ 5151 __asm__ volatile( \ 5152 "ld $4, 8(%1)\n\t" \ 5153 "ld $5, 16(%1)\n\t" \ 5154 "ld $6, 24(%1)\n\t" \ 5155 "ld $7, 32(%1)\n\t" \ 5156 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5157 VALGRIND_CALL_NOREDIR_T9 \ 5158 "move %0, $2\n" \ 5159 : /*out*/ "=r" (_res) \ 5160 : /*in*/ "r" (&_argvec[0]) \ 5161 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5162 ); \ 5163 lval = (__typeof__(lval)) _res; \ 5164 } while (0) 5165 5166 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 5167 do { \ 5168 volatile OrigFn _orig = (orig); \ 5169 volatile unsigned long _argvec[6]; \ 5170 volatile unsigned long _res; \ 5171 _argvec[0] = (unsigned long)_orig.nraddr; \ 5172 _argvec[1] = (unsigned long)(arg1); \ 5173 _argvec[2] = (unsigned long)(arg2); \ 5174 _argvec[3] = (unsigned long)(arg3); \ 5175 _argvec[4] = (unsigned long)(arg4); \ 5176 _argvec[5] = (unsigned long)(arg5); \ 5177 __asm__ volatile( \ 5178 "ld $4, 8(%1)\n\t" \ 5179 "ld $5, 16(%1)\n\t" \ 5180 "ld $6, 24(%1)\n\t" \ 5181 "ld $7, 32(%1)\n\t" \ 5182 "ld $8, 40(%1)\n\t" \ 5183 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5184 VALGRIND_CALL_NOREDIR_T9 \ 5185 "move %0, $2\n" \ 5186 : /*out*/ "=r" (_res) \ 5187 : /*in*/ "r" (&_argvec[0]) \ 5188 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5189 ); \ 5190 lval = (__typeof__(lval)) _res; \ 5191 } while (0) 5192 5193 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 5194 do { \ 5195 volatile OrigFn _orig = (orig); \ 5196 volatile unsigned long _argvec[7]; \ 5197 volatile unsigned long _res; \ 5198 _argvec[0] = (unsigned long)_orig.nraddr; \ 5199 _argvec[1] = (unsigned long)(arg1); \ 5200 _argvec[2] = (unsigned long)(arg2); \ 5201 _argvec[3] = (unsigned long)(arg3); \ 5202 _argvec[4] = (unsigned long)(arg4); \ 5203 _argvec[5] = (unsigned long)(arg5); \ 5204 _argvec[6] = (unsigned long)(arg6); \ 5205 __asm__ volatile( \ 5206 "ld $4, 8(%1)\n\t" \ 5207 "ld $5, 16(%1)\n\t" \ 5208 "ld $6, 24(%1)\n\t" \ 5209 "ld $7, 32(%1)\n\t" \ 5210 "ld $8, 40(%1)\n\t" \ 5211 "ld $9, 48(%1)\n\t" \ 5212 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5213 VALGRIND_CALL_NOREDIR_T9 \ 5214 "move %0, $2\n" \ 5215 : /*out*/ "=r" (_res) \ 5216 : /*in*/ "r" (&_argvec[0]) \ 5217 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5218 ); \ 5219 lval = (__typeof__(lval)) _res; \ 5220 } while (0) 5221 5222 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5223 arg7) \ 5224 do { \ 5225 volatile OrigFn _orig = (orig); \ 5226 volatile unsigned long _argvec[8]; \ 5227 volatile unsigned long _res; \ 5228 _argvec[0] = (unsigned long)_orig.nraddr; \ 5229 _argvec[1] = (unsigned long)(arg1); \ 5230 _argvec[2] = (unsigned long)(arg2); \ 5231 _argvec[3] = (unsigned long)(arg3); \ 5232 _argvec[4] = (unsigned long)(arg4); \ 5233 _argvec[5] = (unsigned long)(arg5); \ 5234 _argvec[6] = (unsigned long)(arg6); \ 5235 _argvec[7] = (unsigned long)(arg7); \ 5236 __asm__ volatile( \ 5237 "ld $4, 8(%1)\n\t" \ 5238 "ld $5, 16(%1)\n\t" \ 5239 "ld $6, 24(%1)\n\t" \ 5240 "ld $7, 32(%1)\n\t" \ 5241 "ld $8, 40(%1)\n\t" \ 5242 "ld $9, 48(%1)\n\t" \ 5243 "ld $10, 56(%1)\n\t" \ 5244 "ld $25, 0(%1) \n\t" /* target->t9 */ \ 5245 VALGRIND_CALL_NOREDIR_T9 \ 5246 "move %0, $2\n" \ 5247 : /*out*/ "=r" (_res) \ 5248 : /*in*/ "r" (&_argvec[0]) \ 5249 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5250 ); \ 5251 lval = (__typeof__(lval)) _res; \ 5252 } while (0) 5253 5254 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5255 arg7,arg8) \ 5256 do { \ 5257 volatile OrigFn _orig = (orig); \ 5258 volatile unsigned long _argvec[9]; \ 5259 volatile unsigned long _res; \ 5260 _argvec[0] = (unsigned long)_orig.nraddr; \ 5261 _argvec[1] = (unsigned long)(arg1); \ 5262 _argvec[2] = (unsigned long)(arg2); \ 5263 _argvec[3] = (unsigned long)(arg3); \ 5264 _argvec[4] = (unsigned long)(arg4); \ 5265 _argvec[5] = (unsigned long)(arg5); \ 5266 _argvec[6] = (unsigned long)(arg6); \ 5267 _argvec[7] = (unsigned long)(arg7); \ 5268 _argvec[8] = (unsigned long)(arg8); \ 5269 __asm__ volatile( \ 5270 "ld $4, 8(%1)\n\t" \ 5271 "ld $5, 16(%1)\n\t" \ 5272 "ld $6, 24(%1)\n\t" \ 5273 "ld $7, 32(%1)\n\t" \ 5274 "ld $8, 40(%1)\n\t" \ 5275 "ld $9, 48(%1)\n\t" \ 5276 "ld $10, 56(%1)\n\t" \ 5277 "ld $11, 64(%1)\n\t" \ 5278 "ld $25, 0(%1) \n\t" /* target->t9 */ \ 5279 VALGRIND_CALL_NOREDIR_T9 \ 5280 "move %0, $2\n" \ 5281 : /*out*/ "=r" (_res) \ 5282 : /*in*/ "r" (&_argvec[0]) \ 5283 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5284 ); \ 5285 lval = (__typeof__(lval)) _res; \ 5286 } while (0) 5287 5288 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5289 arg7,arg8,arg9) \ 5290 do { \ 5291 volatile OrigFn _orig = (orig); \ 5292 volatile unsigned long _argvec[10]; \ 5293 volatile unsigned long _res; \ 5294 _argvec[0] = (unsigned long)_orig.nraddr; \ 5295 _argvec[1] = (unsigned long)(arg1); \ 5296 _argvec[2] = (unsigned long)(arg2); \ 5297 _argvec[3] = (unsigned long)(arg3); \ 5298 _argvec[4] = (unsigned long)(arg4); \ 5299 _argvec[5] = (unsigned long)(arg5); \ 5300 _argvec[6] = (unsigned long)(arg6); \ 5301 _argvec[7] = (unsigned long)(arg7); \ 5302 _argvec[8] = (unsigned long)(arg8); \ 5303 _argvec[9] = (unsigned long)(arg9); \ 5304 __asm__ volatile( \ 5305 "dsubu $29, $29, 8\n\t" \ 5306 "ld $4, 72(%1)\n\t" \ 5307 "sd $4, 0($29)\n\t" \ 5308 "ld $4, 8(%1)\n\t" \ 5309 "ld $5, 16(%1)\n\t" \ 5310 "ld $6, 24(%1)\n\t" \ 5311 "ld $7, 32(%1)\n\t" \ 5312 "ld $8, 40(%1)\n\t" \ 5313 "ld $9, 48(%1)\n\t" \ 5314 "ld $10, 56(%1)\n\t" \ 5315 "ld $11, 64(%1)\n\t" \ 5316 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5317 VALGRIND_CALL_NOREDIR_T9 \ 5318 "daddu $29, $29, 8\n\t" \ 5319 "move %0, $2\n" \ 5320 : /*out*/ "=r" (_res) \ 5321 : /*in*/ "r" (&_argvec[0]) \ 5322 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5323 ); \ 5324 lval = (__typeof__(lval)) _res; \ 5325 } while (0) 5326 5327 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5328 arg7,arg8,arg9,arg10) \ 5329 do { \ 5330 volatile OrigFn _orig = (orig); \ 5331 volatile unsigned long _argvec[11]; \ 5332 volatile unsigned long _res; \ 5333 _argvec[0] = (unsigned long)_orig.nraddr; \ 5334 _argvec[1] = (unsigned long)(arg1); \ 5335 _argvec[2] = (unsigned long)(arg2); \ 5336 _argvec[3] = (unsigned long)(arg3); \ 5337 _argvec[4] = (unsigned long)(arg4); \ 5338 _argvec[5] = (unsigned long)(arg5); \ 5339 _argvec[6] = (unsigned long)(arg6); \ 5340 _argvec[7] = (unsigned long)(arg7); \ 5341 _argvec[8] = (unsigned long)(arg8); \ 5342 _argvec[9] = (unsigned long)(arg9); \ 5343 _argvec[10] = (unsigned long)(arg10); \ 5344 __asm__ volatile( \ 5345 "dsubu $29, $29, 16\n\t" \ 5346 "ld $4, 72(%1)\n\t" \ 5347 "sd $4, 0($29)\n\t" \ 5348 "ld $4, 80(%1)\n\t" \ 5349 "sd $4, 8($29)\n\t" \ 5350 "ld $4, 8(%1)\n\t" \ 5351 "ld $5, 16(%1)\n\t" \ 5352 "ld $6, 24(%1)\n\t" \ 5353 "ld $7, 32(%1)\n\t" \ 5354 "ld $8, 40(%1)\n\t" \ 5355 "ld $9, 48(%1)\n\t" \ 5356 "ld $10, 56(%1)\n\t" \ 5357 "ld $11, 64(%1)\n\t" \ 5358 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5359 VALGRIND_CALL_NOREDIR_T9 \ 5360 "daddu $29, $29, 16\n\t" \ 5361 "move %0, $2\n" \ 5362 : /*out*/ "=r" (_res) \ 5363 : /*in*/ "r" (&_argvec[0]) \ 5364 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5365 ); \ 5366 lval = (__typeof__(lval)) _res; \ 5367 } while (0) 5368 5369 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 5370 arg6,arg7,arg8,arg9,arg10, \ 5371 arg11) \ 5372 do { \ 5373 volatile OrigFn _orig = (orig); \ 5374 volatile unsigned long _argvec[12]; \ 5375 volatile unsigned long _res; \ 5376 _argvec[0] = (unsigned long)_orig.nraddr; \ 5377 _argvec[1] = (unsigned long)(arg1); \ 5378 _argvec[2] = (unsigned long)(arg2); \ 5379 _argvec[3] = (unsigned long)(arg3); \ 5380 _argvec[4] = (unsigned long)(arg4); \ 5381 _argvec[5] = (unsigned long)(arg5); \ 5382 _argvec[6] = (unsigned long)(arg6); \ 5383 _argvec[7] = (unsigned long)(arg7); \ 5384 _argvec[8] = (unsigned long)(arg8); \ 5385 _argvec[9] = (unsigned long)(arg9); \ 5386 _argvec[10] = (unsigned long)(arg10); \ 5387 _argvec[11] = (unsigned long)(arg11); \ 5388 __asm__ volatile( \ 5389 "dsubu $29, $29, 24\n\t" \ 5390 "ld $4, 72(%1)\n\t" \ 5391 "sd $4, 0($29)\n\t" \ 5392 "ld $4, 80(%1)\n\t" \ 5393 "sd $4, 8($29)\n\t" \ 5394 "ld $4, 88(%1)\n\t" \ 5395 "sd $4, 16($29)\n\t" \ 5396 "ld $4, 8(%1)\n\t" \ 5397 "ld $5, 16(%1)\n\t" \ 5398 "ld $6, 24(%1)\n\t" \ 5399 "ld $7, 32(%1)\n\t" \ 5400 "ld $8, 40(%1)\n\t" \ 5401 "ld $9, 48(%1)\n\t" \ 5402 "ld $10, 56(%1)\n\t" \ 5403 "ld $11, 64(%1)\n\t" \ 5404 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5405 VALGRIND_CALL_NOREDIR_T9 \ 5406 "daddu $29, $29, 24\n\t" \ 5407 "move %0, $2\n" \ 5408 : /*out*/ "=r" (_res) \ 5409 : /*in*/ "r" (&_argvec[0]) \ 5410 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5411 ); \ 5412 lval = (__typeof__(lval)) _res; \ 5413 } while (0) 5414 5415 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 5416 arg6,arg7,arg8,arg9,arg10, \ 5417 arg11,arg12) \ 5418 do { \ 5419 volatile OrigFn _orig = (orig); \ 5420 volatile unsigned long _argvec[13]; \ 5421 volatile unsigned long _res; \ 5422 _argvec[0] = (unsigned long)_orig.nraddr; \ 5423 _argvec[1] = (unsigned long)(arg1); \ 5424 _argvec[2] = (unsigned long)(arg2); \ 5425 _argvec[3] = (unsigned long)(arg3); \ 5426 _argvec[4] = (unsigned long)(arg4); \ 5427 _argvec[5] = (unsigned long)(arg5); \ 5428 _argvec[6] = (unsigned long)(arg6); \ 5429 _argvec[7] = (unsigned long)(arg7); \ 5430 _argvec[8] = (unsigned long)(arg8); \ 5431 _argvec[9] = (unsigned long)(arg9); \ 5432 _argvec[10] = (unsigned long)(arg10); \ 5433 _argvec[11] = (unsigned long)(arg11); \ 5434 _argvec[12] = (unsigned long)(arg12); \ 5435 __asm__ volatile( \ 5436 "dsubu $29, $29, 32\n\t" \ 5437 "ld $4, 72(%1)\n\t" \ 5438 "sd $4, 0($29)\n\t" \ 5439 "ld $4, 80(%1)\n\t" \ 5440 "sd $4, 8($29)\n\t" \ 5441 "ld $4, 88(%1)\n\t" \ 5442 "sd $4, 16($29)\n\t" \ 5443 "ld $4, 96(%1)\n\t" \ 5444 "sd $4, 24($29)\n\t" \ 5445 "ld $4, 8(%1)\n\t" \ 5446 "ld $5, 16(%1)\n\t" \ 5447 "ld $6, 24(%1)\n\t" \ 5448 "ld $7, 32(%1)\n\t" \ 5449 "ld $8, 40(%1)\n\t" \ 5450 "ld $9, 48(%1)\n\t" \ 5451 "ld $10, 56(%1)\n\t" \ 5452 "ld $11, 64(%1)\n\t" \ 5453 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5454 VALGRIND_CALL_NOREDIR_T9 \ 5455 "daddu $29, $29, 32\n\t" \ 5456 "move %0, $2\n" \ 5457 : /*out*/ "=r" (_res) \ 5458 : /*in*/ "r" (&_argvec[0]) \ 5459 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5460 ); \ 5461 lval = (__typeof__(lval)) _res; \ 5462 } while (0) 5463 5464 #endif /* PLAT_mips64_linux */ 5465 5466 5467 /* ------------------------------------------------------------------ */ 5468 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */ 5469 /* */ 5470 /* ------------------------------------------------------------------ */ 5471 5472 /* Some request codes. There are many more of these, but most are not 5473 exposed to end-user view. These are the public ones, all of the 5474 form 0x1000 + small_number. 5475 5476 Core ones are in the range 0x00000000--0x0000ffff. The non-public 5477 ones start at 0x2000. 5478 */ 5479 5480 /* These macros are used by tools -- they must be public, but don't 5481 embed them into other programs. */ 5482 #define VG_USERREQ_TOOL_BASE(a,b) \ 5483 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16)) 5484 #define VG_IS_TOOL_USERREQ(a, b, v) \ 5485 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000)) 5486 5487 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !! 5488 This enum comprises an ABI exported by Valgrind to programs 5489 which use client requests. DO NOT CHANGE THE ORDER OF THESE 5490 ENTRIES, NOR DELETE ANY -- add new ones at the end. */ 5491 typedef 5492 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001, 5493 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002, 5494 5495 /* These allow any function to be called from the simulated 5496 CPU but run on the real CPU. Nb: the first arg passed to 5497 the function is always the ThreadId of the running 5498 thread! So CLIENT_CALL0 actually requires a 1 arg 5499 function, etc. */ 5500 VG_USERREQ__CLIENT_CALL0 = 0x1101, 5501 VG_USERREQ__CLIENT_CALL1 = 0x1102, 5502 VG_USERREQ__CLIENT_CALL2 = 0x1103, 5503 VG_USERREQ__CLIENT_CALL3 = 0x1104, 5504 5505 /* Can be useful in regression testing suites -- eg. can 5506 send Valgrind's output to /dev/null and still count 5507 errors. */ 5508 VG_USERREQ__COUNT_ERRORS = 0x1201, 5509 5510 /* Allows the client program and/or gdbserver to execute a monitor 5511 command. */ 5512 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202, 5513 5514 /* These are useful and can be interpreted by any tool that 5515 tracks malloc() et al, by using vg_replace_malloc.c. */ 5516 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301, 5517 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b, 5518 VG_USERREQ__FREELIKE_BLOCK = 0x1302, 5519 /* Memory pool support. */ 5520 VG_USERREQ__CREATE_MEMPOOL = 0x1303, 5521 VG_USERREQ__DESTROY_MEMPOOL = 0x1304, 5522 VG_USERREQ__MEMPOOL_ALLOC = 0x1305, 5523 VG_USERREQ__MEMPOOL_FREE = 0x1306, 5524 VG_USERREQ__MEMPOOL_TRIM = 0x1307, 5525 VG_USERREQ__MOVE_MEMPOOL = 0x1308, 5526 VG_USERREQ__MEMPOOL_CHANGE = 0x1309, 5527 VG_USERREQ__MEMPOOL_EXISTS = 0x130a, 5528 5529 /* Allow printfs to valgrind log. */ 5530 /* The first two pass the va_list argument by value, which 5531 assumes it is the same size as or smaller than a UWord, 5532 which generally isn't the case. Hence are deprecated. 5533 The second two pass the vargs by reference and so are 5534 immune to this problem. */ 5535 /* both :: char* fmt, va_list vargs (DEPRECATED) */ 5536 VG_USERREQ__PRINTF = 0x1401, 5537 VG_USERREQ__PRINTF_BACKTRACE = 0x1402, 5538 /* both :: char* fmt, va_list* vargs */ 5539 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403, 5540 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404, 5541 5542 /* Stack support. */ 5543 VG_USERREQ__STACK_REGISTER = 0x1501, 5544 VG_USERREQ__STACK_DEREGISTER = 0x1502, 5545 VG_USERREQ__STACK_CHANGE = 0x1503, 5546 5547 /* Wine support */ 5548 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601, 5549 5550 /* Querying of debug info. */ 5551 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701, 5552 5553 /* Disable/enable error reporting level. Takes a single 5554 Word arg which is the delta to this thread's error 5555 disablement indicator. Hence 1 disables or further 5556 disables errors, and -1 moves back towards enablement. 5557 Other values are not allowed. */ 5558 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801, 5559 5560 /* Initialise IR injection */ 5561 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901 5562 } Vg_ClientRequest; 5563 5564 #if !defined(__GNUC__) 5565 # define __extension__ /* */ 5566 #endif 5567 5568 5569 /* Returns the number of Valgrinds this code is running under. That 5570 is, 0 if running natively, 1 if running under Valgrind, 2 if 5571 running under Valgrind which is running under another Valgrind, 5572 etc. */ 5573 #define RUNNING_ON_VALGRIND \ 5574 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \ 5575 VG_USERREQ__RUNNING_ON_VALGRIND, \ 5576 0, 0, 0, 0, 0) \ 5577 5578 5579 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr + 5580 _qzz_len - 1]. Useful if you are debugging a JITter or some such, 5581 since it provides a way to make sure valgrind will retranslate the 5582 invalidated area. Returns no value. */ 5583 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \ 5584 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \ 5585 _qzz_addr, _qzz_len, 0, 0, 0) 5586 5587 5588 /* These requests are for getting Valgrind itself to print something. 5589 Possibly with a backtrace. This is a really ugly hack. The return value 5590 is the number of characters printed, excluding the "**<pid>** " part at the 5591 start and the backtrace (if present). */ 5592 5593 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER) 5594 /* Modern GCC will optimize the static routine out if unused, 5595 and unused attribute will shut down warnings about it. */ 5596 static int VALGRIND_PRINTF(const char *format, ...) 5597 __attribute__((format(__printf__, 1, 2), __unused__)); 5598 #endif 5599 static int 5600 #if defined(_MSC_VER) 5601 __inline 5602 #endif 5603 VALGRIND_PRINTF(const char *format, ...) 5604 { 5605 #if defined(NVALGRIND) 5606 return 0; 5607 #else /* NVALGRIND */ 5608 #if defined(_MSC_VER) || defined(__MINGW64__) 5609 uintptr_t _qzz_res; 5610 #else 5611 unsigned long _qzz_res; 5612 #endif 5613 va_list vargs; 5614 va_start(vargs, format); 5615 #if defined(_MSC_VER) || defined(__MINGW64__) 5616 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 5617 VG_USERREQ__PRINTF_VALIST_BY_REF, 5618 (uintptr_t)format, 5619 (uintptr_t)&vargs, 5620 0, 0, 0); 5621 #else 5622 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 5623 VG_USERREQ__PRINTF_VALIST_BY_REF, 5624 (unsigned long)format, 5625 (unsigned long)&vargs, 5626 0, 0, 0); 5627 #endif 5628 va_end(vargs); 5629 return (int)_qzz_res; 5630 #endif /* NVALGRIND */ 5631 } 5632 5633 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER) 5634 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...) 5635 __attribute__((format(__printf__, 1, 2), __unused__)); 5636 #endif 5637 static int 5638 #if defined(_MSC_VER) 5639 __inline 5640 #endif 5641 VALGRIND_PRINTF_BACKTRACE(const char *format, ...) 5642 { 5643 #if defined(NVALGRIND) 5644 return 0; 5645 #else /* NVALGRIND */ 5646 #if defined(_MSC_VER) || defined(__MINGW64__) 5647 uintptr_t _qzz_res; 5648 #else 5649 unsigned long _qzz_res; 5650 #endif 5651 va_list vargs; 5652 va_start(vargs, format); 5653 #if defined(_MSC_VER) || defined(__MINGW64__) 5654 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 5655 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF, 5656 (uintptr_t)format, 5657 (uintptr_t)&vargs, 5658 0, 0, 0); 5659 #else 5660 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 5661 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF, 5662 (unsigned long)format, 5663 (unsigned long)&vargs, 5664 0, 0, 0); 5665 #endif 5666 va_end(vargs); 5667 return (int)_qzz_res; 5668 #endif /* NVALGRIND */ 5669 } 5670 5671 5672 /* These requests allow control to move from the simulated CPU to the 5673 real CPU, calling an arbitary function. 5674 5675 Note that the current ThreadId is inserted as the first argument. 5676 So this call: 5677 5678 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2) 5679 5680 requires f to have this signature: 5681 5682 Word f(Word tid, Word arg1, Word arg2) 5683 5684 where "Word" is a word-sized type. 5685 5686 Note that these client requests are not entirely reliable. For example, 5687 if you call a function with them that subsequently calls printf(), 5688 there's a high chance Valgrind will crash. Generally, your prospects of 5689 these working are made higher if the called function does not refer to 5690 any global variables, and does not refer to any libc or other functions 5691 (printf et al). Any kind of entanglement with libc or dynamic linking is 5692 likely to have a bad outcome, for tricky reasons which we've grappled 5693 with a lot in the past. 5694 */ 5695 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \ 5696 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 5697 VG_USERREQ__CLIENT_CALL0, \ 5698 _qyy_fn, \ 5699 0, 0, 0, 0) 5700 5701 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \ 5702 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 5703 VG_USERREQ__CLIENT_CALL1, \ 5704 _qyy_fn, \ 5705 _qyy_arg1, 0, 0, 0) 5706 5707 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \ 5708 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 5709 VG_USERREQ__CLIENT_CALL2, \ 5710 _qyy_fn, \ 5711 _qyy_arg1, _qyy_arg2, 0, 0) 5712 5713 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \ 5714 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 5715 VG_USERREQ__CLIENT_CALL3, \ 5716 _qyy_fn, \ 5717 _qyy_arg1, _qyy_arg2, \ 5718 _qyy_arg3, 0) 5719 5720 5721 /* Counts the number of errors that have been recorded by a tool. Nb: 5722 the tool must record the errors with VG_(maybe_record_error)() or 5723 VG_(unique_error)() for them to be counted. */ 5724 #define VALGRIND_COUNT_ERRORS \ 5725 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 5726 0 /* default return */, \ 5727 VG_USERREQ__COUNT_ERRORS, \ 5728 0, 0, 0, 0, 0) 5729 5730 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing 5731 when heap blocks are allocated in order to give accurate results. This 5732 happens automatically for the standard allocator functions such as 5733 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete, 5734 delete[], etc. 5735 5736 But if your program uses a custom allocator, this doesn't automatically 5737 happen, and Valgrind will not do as well. For example, if you allocate 5738 superblocks with mmap() and then allocates chunks of the superblocks, all 5739 Valgrind's observations will be at the mmap() level and it won't know that 5740 the chunks should be considered separate entities. In Memcheck's case, 5741 that means you probably won't get heap block overrun detection (because 5742 there won't be redzones marked as unaddressable) and you definitely won't 5743 get any leak detection. 5744 5745 The following client requests allow a custom allocator to be annotated so 5746 that it can be handled accurately by Valgrind. 5747 5748 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated 5749 by a malloc()-like function. For Memcheck (an illustrative case), this 5750 does two things: 5751 5752 - It records that the block has been allocated. This means any addresses 5753 within the block mentioned in error messages will be 5754 identified as belonging to the block. It also means that if the block 5755 isn't freed it will be detected by the leak checker. 5756 5757 - It marks the block as being addressable and undefined (if 'is_zeroed' is 5758 not set), or addressable and defined (if 'is_zeroed' is set). This 5759 controls how accesses to the block by the program are handled. 5760 5761 'addr' is the start of the usable block (ie. after any 5762 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator 5763 can apply redzones -- these are blocks of padding at the start and end of 5764 each block. Adding redzones is recommended as it makes it much more likely 5765 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is 5766 zeroed (or filled with another predictable value), as is the case for 5767 calloc(). 5768 5769 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a 5770 heap block -- that will be used by the client program -- is allocated. 5771 It's best to put it at the outermost level of the allocator if possible; 5772 for example, if you have a function my_alloc() which calls 5773 internal_alloc(), and the client request is put inside internal_alloc(), 5774 stack traces relating to the heap block will contain entries for both 5775 my_alloc() and internal_alloc(), which is probably not what you want. 5776 5777 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out 5778 custom blocks from within a heap block, B, that has been allocated with 5779 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking 5780 -- the custom blocks will take precedence. 5781 5782 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For 5783 Memcheck, it does two things: 5784 5785 - It records that the block has been deallocated. This assumes that the 5786 block was annotated as having been allocated via 5787 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued. 5788 5789 - It marks the block as being unaddressable. 5790 5791 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a 5792 heap block is deallocated. 5793 5794 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For 5795 Memcheck, it does four things: 5796 5797 - It records that the size of a block has been changed. This assumes that 5798 the block was annotated as having been allocated via 5799 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued. 5800 5801 - If the block shrunk, it marks the freed memory as being unaddressable. 5802 5803 - If the block grew, it marks the new area as undefined and defines a red 5804 zone past the end of the new block. 5805 5806 - The V-bits of the overlap between the old and the new block are preserved. 5807 5808 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block 5809 and before deallocation of the old block. 5810 5811 In many cases, these three client requests will not be enough to get your 5812 allocator working well with Memcheck. More specifically, if your allocator 5813 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call 5814 will be necessary to mark the memory as addressable just before the zeroing 5815 occurs, otherwise you'll get a lot of invalid write errors. For example, 5816 you'll need to do this if your allocator recycles freed blocks, but it 5817 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK). 5818 Alternatively, if your allocator reuses freed blocks for allocator-internal 5819 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary. 5820 5821 Really, what's happening is a blurring of the lines between the client 5822 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the 5823 memory should be considered unaddressable to the client program, but the 5824 allocator knows more than the rest of the client program and so may be able 5825 to safely access it. Extra client requests are necessary for Valgrind to 5826 understand the distinction between the allocator and the rest of the 5827 program. 5828 5829 Ignored if addr == 0. 5830 */ 5831 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \ 5832 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \ 5833 addr, sizeB, rzB, is_zeroed, 0) 5834 5835 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details. 5836 Ignored if addr == 0. 5837 */ 5838 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \ 5839 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \ 5840 addr, oldSizeB, newSizeB, rzB, 0) 5841 5842 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details. 5843 Ignored if addr == 0. 5844 */ 5845 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \ 5846 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \ 5847 addr, rzB, 0, 0, 0) 5848 5849 /* Create a memory pool. */ 5850 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \ 5851 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \ 5852 pool, rzB, is_zeroed, 0, 0) 5853 5854 /* Destroy a memory pool. */ 5855 #define VALGRIND_DESTROY_MEMPOOL(pool) \ 5856 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \ 5857 pool, 0, 0, 0, 0) 5858 5859 /* Associate a piece of memory with a memory pool. */ 5860 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \ 5861 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \ 5862 pool, addr, size, 0, 0) 5863 5864 /* Disassociate a piece of memory from a memory pool. */ 5865 #define VALGRIND_MEMPOOL_FREE(pool, addr) \ 5866 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \ 5867 pool, addr, 0, 0, 0) 5868 5869 /* Disassociate any pieces outside a particular range. */ 5870 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \ 5871 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \ 5872 pool, addr, size, 0, 0) 5873 5874 /* Resize and/or move a piece associated with a memory pool. */ 5875 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \ 5876 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \ 5877 poolA, poolB, 0, 0, 0) 5878 5879 /* Resize and/or move a piece associated with a memory pool. */ 5880 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \ 5881 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \ 5882 pool, addrA, addrB, size, 0) 5883 5884 /* Return 1 if a mempool exists, else 0. */ 5885 #define VALGRIND_MEMPOOL_EXISTS(pool) \ 5886 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 5887 VG_USERREQ__MEMPOOL_EXISTS, \ 5888 pool, 0, 0, 0, 0) 5889 5890 /* Mark a piece of memory as being a stack. Returns a stack id. */ 5891 #define VALGRIND_STACK_REGISTER(start, end) \ 5892 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 5893 VG_USERREQ__STACK_REGISTER, \ 5894 start, end, 0, 0, 0) 5895 5896 /* Unmark the piece of memory associated with a stack id as being a 5897 stack. */ 5898 #define VALGRIND_STACK_DEREGISTER(id) \ 5899 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \ 5900 id, 0, 0, 0, 0) 5901 5902 /* Change the start and end address of the stack id. */ 5903 #define VALGRIND_STACK_CHANGE(id, start, end) \ 5904 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \ 5905 id, start, end, 0, 0) 5906 5907 /* Load PDB debug info for Wine PE image_map. */ 5908 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \ 5909 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \ 5910 fd, ptr, total_size, delta, 0) 5911 5912 /* Map a code address to a source file name and line number. buf64 5913 must point to a 64-byte buffer in the caller's address space. The 5914 result will be dumped in there and is guaranteed to be zero 5915 terminated. If no info is found, the first byte is set to zero. */ 5916 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \ 5917 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 5918 VG_USERREQ__MAP_IP_TO_SRCLOC, \ 5919 addr, buf64, 0, 0, 0) 5920 5921 /* Disable error reporting for this thread. Behaves in a stack like 5922 way, so you can safely call this multiple times provided that 5923 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times 5924 to re-enable reporting. The first call of this macro disables 5925 reporting. Subsequent calls have no effect except to increase the 5926 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable 5927 reporting. Child threads do not inherit this setting from their 5928 parents -- they are always created with reporting enabled. */ 5929 #define VALGRIND_DISABLE_ERROR_REPORTING \ 5930 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \ 5931 1, 0, 0, 0, 0) 5932 5933 /* Re-enable error reporting, as per comments on 5934 VALGRIND_DISABLE_ERROR_REPORTING. */ 5935 #define VALGRIND_ENABLE_ERROR_REPORTING \ 5936 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \ 5937 -1, 0, 0, 0, 0) 5938 5939 /* Execute a monitor command from the client program. 5940 If a connection is opened with GDB, the output will be sent 5941 according to the output mode set for vgdb. 5942 If no connection is opened, output will go to the log output. 5943 Returns 1 if command not recognised, 0 otherwise. */ 5944 #define VALGRIND_MONITOR_COMMAND(command) \ 5945 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \ 5946 command, 0, 0, 0, 0) 5947 5948 5949 #undef PLAT_x86_darwin 5950 #undef PLAT_amd64_darwin 5951 #undef PLAT_x86_win32 5952 #undef PLAT_amd64_win64 5953 #undef PLAT_x86_linux 5954 #undef PLAT_amd64_linux 5955 #undef PLAT_ppc32_linux 5956 #undef PLAT_ppc64_linux 5957 #undef PLAT_arm_linux 5958 #undef PLAT_s390x_linux 5959 #undef PLAT_mips32_linux 5960 #undef PLAT_mips64_linux 5961 5962 #endif /* __VALGRIND_H */ 5963 5964 #pragma clang diagnostic pop 5965