[PATCH 11/27] [AARCH64] Syscalls for ILP32 are passed always via 64bit values.

From: Yury Norov
Date: Tue Jun 21 2016 - 01:10:59 EST


From: Andrew Pinski <apinski@xxxxxxxxxx>

This patch adds support for ILP32 syscalls, sign and zero extending
where needed. Unlike LP64, pointers are 32bit and need to be zero
extended rather than the standard sign extend that the code would do.
We take advatage of ssize_t being long rather than int for ILP32,
to get this correct.

* sysdeps/unix/sysv/linux/aarch64/sysdep.h
(INLINE_VSYSCALL): Use long long instead of long.
(INTERNAL_VSYSCALL): Likewise.
(INLINE_SYSCALL): Likewise.
(INTERNAL_SYSCALL_RAW): Likewise.
(ARGIFY): New macro.
(LOAD_ARGS_0): Use long long instead of long.
(LOAD_ARGS_1): Use long long instead of long
and use ARGIFY.
(LOAD_ARGS_2): Likewise.
(LOAD_ARGS_3): Likewise.
(LOAD_ARGS_4): Likewise.
(LOAD_ARGS_5): Likewise.
(LOAD_ARGS_6): Likewise.
(LOAD_ARGS_7): Likewise.

Signed-off-by: Yury Norov <ynorov@xxxxxxxxxxxxxxxxxx>
---
sysdeps/unix/sysv/linux/aarch64/sysdep.h | 48 ++++++++++++++++++++------------
1 file changed, 30 insertions(+), 18 deletions(-)

diff --git a/sysdeps/unix/sysv/linux/aarch64/sysdep.h b/sysdeps/unix/sysv/linux/aarch64/sysdep.h
index 1ffabc2..42f89c8 100644
--- a/sysdeps/unix/sysv/linux/aarch64/sysdep.h
+++ b/sysdeps/unix/sysv/linux/aarch64/sysdep.h
@@ -161,11 +161,11 @@
call. */
# undef INLINE_SYSCALL
# define INLINE_SYSCALL(name, nr, args...) \
- ({ unsigned long _sys_result = INTERNAL_SYSCALL (name, , nr, args); \
+ ({ unsigned long long _sys_result = INTERNAL_SYSCALL (name, , nr, args); \
if (__builtin_expect (INTERNAL_SYSCALL_ERROR_P (_sys_result, ), 0))\
{ \
__set_errno (INTERNAL_SYSCALL_ERRNO (_sys_result, )); \
- _sys_result = (unsigned long) -1; \
+ _sys_result = (unsigned long long) -1; \
} \
(long) _sys_result; })

@@ -174,10 +174,10 @@

# undef INTERNAL_SYSCALL_RAW
# define INTERNAL_SYSCALL_RAW(name, err, nr, args...) \
- ({ long _sys_result; \
+ ({ long long _sys_result; \
{ \
LOAD_ARGS_##nr (args) \
- register long _x8 asm ("x8") = (name); \
+ register long long _x8 asm ("x8") = (name); \
asm volatile ("svc 0 // syscall " # name \
: "=r" (_x0) : "r"(_x8) ASM_ARGS_##nr : "memory"); \
_sys_result = _x0; \
@@ -199,36 +199,48 @@
# undef INTERNAL_SYSCALL_ERRNO
# define INTERNAL_SYSCALL_ERRNO(val, err) (-(val))

+/* Convert X to a long long, without losing any bits if it is one
+ already or warning if it is a 32-bit pointer. This zero extends
+ 32-bit pointers and sign extends other signed types. Note this only
+ works because ssize_t is long and short-short is promoted to int. */
+#define ARGIFY(X) \
+ ((unsigned long long) \
+ __builtin_choose_expr(__builtin_types_compatible_p(__typeof__(X), __typeof__((X) - (X))), \
+ (X), \
+ __builtin_choose_expr(__builtin_types_compatible_p(int, __typeof__((X) - (X))), \
+ (X), \
+ (unsigned long)(X))))
+
# define LOAD_ARGS_0() \
- register long _x0 asm ("x0");
+ register long long _x0 asm ("x0");
# define LOAD_ARGS_1(x0) \
- long _x0tmp = (long) (x0); \
+ long long _x0tmp = ARGIFY (x0); \
LOAD_ARGS_0 () \
_x0 = _x0tmp;
# define LOAD_ARGS_2(x0, x1) \
- long _x1tmp = (long) (x1); \
+ long long _x1tmp = ARGIFY (x1); \
LOAD_ARGS_1 (x0) \
- register long _x1 asm ("x1") = _x1tmp;
+ register long long _x1 asm ("x1") = _x1tmp;
# define LOAD_ARGS_3(x0, x1, x2) \
- long _x2tmp = (long) (x2); \
+ long long _x2tmp = ARGIFY (x2); \
LOAD_ARGS_2 (x0, x1) \
- register long _x2 asm ("x2") = _x2tmp;
+ register long long _x2 asm ("x2") = _x2tmp;
# define LOAD_ARGS_4(x0, x1, x2, x3) \
- long _x3tmp = (long) (x3); \
+ long long _x3tmp = ARGIFY (x3); \
LOAD_ARGS_3 (x0, x1, x2) \
- register long _x3 asm ("x3") = _x3tmp;
+ register long long _x3 asm ("x3") = _x3tmp;
# define LOAD_ARGS_5(x0, x1, x2, x3, x4) \
- long _x4tmp = (long) (x4); \
+ long long _x4tmp = ARGIFY (x4); \
LOAD_ARGS_4 (x0, x1, x2, x3) \
- register long _x4 asm ("x4") = _x4tmp;
+ register long long _x4 asm ("x4") = _x4tmp;
# define LOAD_ARGS_6(x0, x1, x2, x3, x4, x5) \
- long _x5tmp = (long) (x5); \
+ long long _x5tmp = ARGIFY (x5); \
LOAD_ARGS_5 (x0, x1, x2, x3, x4) \
- register long _x5 asm ("x5") = _x5tmp;
+ register long long _x5 asm ("x5") = _x5tmp;
# define LOAD_ARGS_7(x0, x1, x2, x3, x4, x5, x6)\
- long _x6tmp = (long) (x6); \
+ long long _x6tmp = ARGIFY (x6); \
LOAD_ARGS_6 (x0, x1, x2, x3, x4, x5) \
- register long _x6 asm ("x6") = _x6tmp;
+ register long long _x6 asm ("x6") = _x6tmp;

# define ASM_ARGS_0
# define ASM_ARGS_1 , "r" (_x0)
--
2.7.4