AArch64: Remove PTR_ARG/SIZE_ARG defines

This series removes various ILP32 defines that are now
no longer needed.

Remove PTR_ARG/SIZE_ARG.

Reviewed-by: Adhemerval Zanella  <adhemerval.zanella@linaro.org>
This commit is contained in:
Wilco Dijkstra 2024-12-31 18:07:36 +00:00
parent be0cfd848d
commit ce2f26a22e
35 changed files with 0 additions and 106 deletions

View File

@ -47,8 +47,6 @@ ENTRY (__longjmp)
cfi_offset(d14, JB_D14<<3)
cfi_offset(d15, JB_D15<<3)
PTR_ARG (0)
#if IS_IN(libc)
/* Disable ZA state of SME in libc.a and libc.so, but not in ld.so. */
# if HAVE_AARCH64_PAC_RET

View File

@ -40,9 +40,6 @@
#define zva_val x4
ENTRY (__libc_mtag_tag_region)
PTR_ARG (0)
SIZE_ARG (1)
add dstend, dstin, count
cmp count, 96

View File

@ -40,9 +40,6 @@
#define zva_val x4
ENTRY (__libc_mtag_tag_zero_region)
PTR_ARG (0)
SIZE_ARG (1)
add dstend, dstin, count
cmp count, 96

View File

@ -28,7 +28,6 @@ ENTRY (_start)
/* Load and relocate all library dependencies. */
mov x0, sp
PTR_ARG (0)
bl _dl_start
/* Returns user entry point in x0. */
mov PTR_REG (21), PTR_REG (0)

View File

@ -75,7 +75,6 @@
.align 2
_dl_tlsdesc_return:
BTI_C
PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE]
RET
cfi_endproc
@ -99,7 +98,6 @@ _dl_tlsdesc_undefweak:
BTI_C
str x1, [sp, #-16]!
cfi_adjust_cfa_offset (16)
PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE]
mrs x1, tpidr_el0
sub PTR_REG (0), PTR_REG (0), PTR_REG (1)
@ -145,7 +143,6 @@ _dl_tlsdesc_undefweak:
.align 2
_dl_tlsdesc_dynamic:
BTI_C
PTR_ARG (0)
/* Save just enough registers to support fast path, if we fall
into slow path we will save additional registers. */

View File

@ -57,8 +57,6 @@
exactly which byte matched. */
ENTRY (MEMCHR)
PTR_ARG (0)
SIZE_ARG (2)
bic src, srcin, 15
cbz cntin, L(nomatch)
ld1 {vdata.16b}, [src]

View File

@ -44,10 +44,6 @@
ENTRY (memcmp)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
cmp limit, 16
b.lo L(less16)
ldp data1, data3, [src1]

View File

@ -70,10 +70,6 @@
from the end. */
ENTRY (MEMCPY)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
cmp count, 128
@ -187,10 +183,6 @@ libc_hidden_builtin_def (MEMCPY)
ENTRY (MEMMOVE)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
cmp count, 128

View File

@ -55,8 +55,6 @@
exactly which byte matched. */
ENTRY (__memrchr)
PTR_ARG (0)
SIZE_ARG (2)
add end, srcin, cntin
sub endm1, end, 1
bic src, endm1, 15

View File

@ -39,9 +39,6 @@
#define dstend2 x5
ENTRY (MEMSET)
PTR_ARG (0)
SIZE_ARG (2)
dup v0.16B, valw
cmp count, 16
b.lo L(set_small)

View File

@ -60,9 +60,6 @@
ENTRY (__memchr_nosimd)
PTR_ARG (0)
SIZE_ARG (2)
/* Do not dereference srcin if no bytes to compare. */
cbz cntin, L(none_chr)

View File

@ -96,10 +96,6 @@
ENTRY (__memcpy_a64fx)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
cntb vlen
cmp n, vlen, lsl 1
b.hi L(copy_small)
@ -236,10 +232,6 @@ END (__memcpy_a64fx)
ENTRY_ALIGN (__memmove_a64fx, 4)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
/* Fast case for up to 2 vectors. */
cntb vlen
cmp n, vlen, lsl 1

View File

@ -26,10 +26,6 @@
*/
ENTRY (__memcpy_mops)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
mov x3, x0
.inst 0x19010443 /* cpyfp [x3]!, [x1]!, x2! */
.inst 0x19410443 /* cpyfm [x3]!, [x1]!, x2! */

View File

@ -63,10 +63,6 @@
ENTRY (__memmove_oryon1)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
sub tmp1, dstin, src
cmp count, 96
ccmp tmp1, count, 2, hi
@ -77,10 +73,6 @@ END (__memmove_oryon1)
ENTRY (__memcpy_oryon1)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
cmp count, 16

View File

@ -61,10 +61,6 @@
.arch armv8.2-a+sve
ENTRY (__memcpy_sve)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
cmp count, 128
b.hi L(copy_long)
cntb vlen
@ -144,10 +140,6 @@ END (__memcpy_sve)
ENTRY (__memmove_sve)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
cmp count, 128
b.hi L(move_long)
cntb vlen

View File

@ -26,10 +26,6 @@
*/
ENTRY (__memmove_mops)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
mov x3, x0
.inst 0x1d010443 /* cpyp [x3]!, [x1]!, x2! */
.inst 0x1d410443 /* cpym [x3]!, [x1]!, x2! */

View File

@ -55,8 +55,6 @@
#define BTI_C
ENTRY (__memset_a64fx)
PTR_ARG (0)
SIZE_ARG (2)
cntb vector_length
dup z0.b, valw

View File

@ -34,9 +34,6 @@
ENTRY (__memset_emag)
PTR_ARG (0)
SIZE_ARG (2)
bfi valw, valw, 8, 8
bfi valw, valw, 16, 16
bfi val, val, 32, 32

View File

@ -33,9 +33,6 @@
ENTRY (__memset_kunpeng)
PTR_ARG (0)
SIZE_ARG (2)
dup v0.16B, valw
add dstend, dstin, count

View File

@ -26,9 +26,6 @@
*/
ENTRY (__memset_mops)
PTR_ARG (0)
SIZE_ARG (2)
mov x3, x0
.inst 0x19c10443 /* setp [x3]!, x2!, x1 */
.inst 0x19c14443 /* setm [x3]!, x2!, x1 */

View File

@ -33,9 +33,6 @@
ENTRY (__memset_oryon1)
PTR_ARG (0)
SIZE_ARG (2)
bfi valw, valw, 8, 8
bfi valw, valw, 16, 16
bfi val, val, 32, 32

View File

@ -87,7 +87,6 @@
character, return the length, if not, continue in the main loop. */
ENTRY (__strlen_asimd)
PTR_ARG (0)
and tmp1, srcin, MIN_PAGE_SIZE - 1
cmp tmp1, MIN_PAGE_SIZE - 32
b.hi L(page_cross)

View File

@ -34,8 +34,6 @@ END (_setjmp)
libc_hidden_def (_setjmp)
ENTRY (__sigsetjmp)
PTR_ARG (0)
1:
stp x19, x20, [x0, #JB_X19<<3]
stp x21, x22, [x0, #JB_X21<<3]

View File

@ -52,7 +52,6 @@
If it is not a multiple of 4, there was no match. */
ENTRY (strchr)
PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src]

View File

@ -51,7 +51,6 @@
exactly which byte matched. */
ENTRY (__strchrnul)
PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src]

View File

@ -62,8 +62,6 @@
NUL too in big-endian, byte-reverse the data before the NUL check. */
ENTRY(strcmp)
PTR_ARG (0)
PTR_ARG (1)
sub off2, src2, src1
mov zeroones, REP8_01
and tmp, src1, 7

View File

@ -69,8 +69,6 @@
exactly which byte matched. */
ENTRY (STRCPY)
PTR_ARG (0)
PTR_ARG (1)
bic src, srcin, 15
ld1 {vdata.16b}, [src]
cmeq vhas_nul.16b, vdata.16b, 0

View File

@ -49,7 +49,6 @@
identifies the first zero byte. */
ENTRY (STRLEN)
PTR_ARG (0)
bic src, srcin, 15
ld1 {vdata.16b}, [src]
cmeq vhas_nul.16b, vdata.16b, 0

View File

@ -49,8 +49,6 @@
identifies the first zero byte. */
ENTRY (__strnlen)
PTR_ARG (0)
SIZE_ARG (1)
bic src, srcin, 15
cbz cntin, L(nomatch)
ld1 {vdata.16b}, [src]

View File

@ -55,7 +55,6 @@
if the relevant byte matched the NUL end of string. */
ENTRY (strrchr)
PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
movi vrepmask.16b, 0x33

View File

@ -33,12 +33,6 @@
*/
.text
ENTRY(__clone)
PTR_ARG (0)
PTR_ARG (1)
PTR_ARG (3)
PTR_ARG (4)
PTR_ARG (5)
PTR_ARG (6)
/* Save args for the child. */
mov x10, x0
mov x11, x2

View File

@ -36,10 +36,6 @@
.text
ENTRY(__clone3)
PTR_ARG (0)
PTR_ARG (1)
PTR_ARG (3)
PTR_ARG (4)
/* Save args for the child. */
mov x10, x0 /* cl_args */
mov x11, x2 /* func */

View File

@ -30,7 +30,6 @@
.text
ENTRY(__getcontext)
PTR_ARG (0)
/* The saved context will return to the getcontext() call point
with a return value of 0 */
str xzr, [x0, oX0 + 0 * SZREG]

View File

@ -34,7 +34,6 @@
.text
ENTRY (__setcontext)
PTR_ARG (0)
/* Save a copy of UCP. */
mov x9, x0

View File

@ -27,7 +27,6 @@
.text
ENTRY(__swapcontext)
PTR_ARG (0)
/* Set the value returned when swapcontext() returns in this context.
And set up x1 to become the return address of the caller, so we
can return there with a normal RET instead of an indirect jump. */