libc: Add ftruncate64 and improve 64-bit parameter syscall handling.
This patch improves the handling of 64-bit parameters in syscalls on ARM.
The ARM EABI mandates that 64-bit quantities be passed in even/odd register
pairs, which requires special treatment.
This allows us to simplify our implementations of pread() and pwrite()
and remove the C stubs for pread64() and pwrite64().
Also add ftruncate64() to <unistd.h>
Change-Id: I407e2fd223ba0093dd2d0b04c6152fadfc9ce3ef
Bug 3107933
diff --git a/libc/arch-sh/syscalls.mk b/libc/arch-sh/syscalls.mk
index 493f437..9575905 100644
--- a/libc/arch-sh/syscalls.mk
+++ b/libc/arch-sh/syscalls.mk
@@ -45,8 +45,8 @@
syscall_src += arch-sh/syscalls/acct.S
syscall_src += arch-sh/syscalls/read.S
syscall_src += arch-sh/syscalls/write.S
-syscall_src += arch-sh/syscalls/__pread64.S
-syscall_src += arch-sh/syscalls/__pwrite64.S
+syscall_src += arch-sh/syscalls/pread64.S
+syscall_src += arch-sh/syscalls/pwrite64.S
syscall_src += arch-sh/syscalls/__open.S
syscall_src += arch-sh/syscalls/__openat.S
syscall_src += arch-sh/syscalls/close.S
@@ -73,6 +73,7 @@
syscall_src += arch-sh/syscalls/dup2.S
syscall_src += arch-sh/syscalls/select.S
syscall_src += arch-sh/syscalls/ftruncate.S
+syscall_src += arch-sh/syscalls/ftruncate64.S
syscall_src += arch-sh/syscalls/getdents.S
syscall_src += arch-sh/syscalls/fsync.S
syscall_src += arch-sh/syscalls/fdatasync.S
diff --git a/libc/arch-sh/syscalls/__pwrite64.S b/libc/arch-sh/syscalls/ftruncate64.S
similarity index 63%
copy from libc/arch-sh/syscalls/__pwrite64.S
copy to libc/arch-sh/syscalls/ftruncate64.S
index a722242..f4c7c1e 100644
--- a/libc/arch-sh/syscalls/__pwrite64.S
+++ b/libc/arch-sh/syscalls/ftruncate64.S
@@ -2,22 +2,19 @@
#include <sys/linux-syscalls.h>
.text
- .type __pwrite64, @function
- .globl __pwrite64
+ .type ftruncate64, @function
+ .globl ftruncate64
.align 4
-__pwrite64:
-
- /* get ready for additonal arg */
- mov.l @r15, r0
+ftruncate64:
/* invoke trap */
mov.l 0f, r3 /* trap num */
- trapa #(5 + 0x10)
+ trapa #(3 + 0x10)
/* check return value */
cmp/pz r0
- bt __NR_pwrite64_end
+ bt __NR_ftruncate64_end
/* keep error number */
sts.l pr, @-r15
@@ -26,10 +23,10 @@
mov r0, r4
lds.l @r15+, pr
-__NR_pwrite64_end:
+__NR_ftruncate64_end:
rts
nop
.align 2
-0: .long __NR_pwrite64
+0: .long __NR_ftruncate64
1: .long __set_syscall_errno
diff --git a/libc/arch-sh/syscalls/__pread64.S b/libc/arch-sh/syscalls/pread64.S
similarity index 89%
rename from libc/arch-sh/syscalls/__pread64.S
rename to libc/arch-sh/syscalls/pread64.S
index 474add3..702a402 100644
--- a/libc/arch-sh/syscalls/__pread64.S
+++ b/libc/arch-sh/syscalls/pread64.S
@@ -2,11 +2,11 @@
#include <sys/linux-syscalls.h>
.text
- .type __pread64, @function
- .globl __pread64
+ .type pread64, @function
+ .globl pread64
.align 4
-__pread64:
+pread64:
/* get ready for additonal arg */
mov.l @r15, r0
diff --git a/libc/arch-sh/syscalls/__pwrite64.S b/libc/arch-sh/syscalls/pwrite64.S
similarity index 89%
rename from libc/arch-sh/syscalls/__pwrite64.S
rename to libc/arch-sh/syscalls/pwrite64.S
index a722242..3f6c192 100644
--- a/libc/arch-sh/syscalls/__pwrite64.S
+++ b/libc/arch-sh/syscalls/pwrite64.S
@@ -2,11 +2,11 @@
#include <sys/linux-syscalls.h>
.text
- .type __pwrite64, @function
- .globl __pwrite64
+ .type pwrite64, @function
+ .globl pwrite64
.align 4
-__pwrite64:
+pwrite64:
/* get ready for additonal arg */
mov.l @r15, r0