diff --git a/lib/csu/aarch64c/crt1_c.c b/lib/csu/aarch64c/crt1_c.c index ca3cf8d39ee3..01a6ed4622b5 100644 --- a/lib/csu/aarch64c/crt1_c.c +++ b/lib/csu/aarch64c/crt1_c.c @@ -68,12 +68,11 @@ Elf_Auxinfo *__auxargs; * This restriction only applies to statically linked binaries since the dynamic * linker takes care of initialization otherwise. */ -void -_start(void *auxv, +__dead2 static void +__start(void *auxv, void (*cleanup)(void), /* from shared loader */ struct Struct_Obj_Entry *obj) /* from shared loader */ { - __asm__ volatile(".cfi_undefined c30"); int argc = 0; char **argv = NULL; char **env = NULL; @@ -131,3 +130,20 @@ _start(void *auxv, __libc_start1(argc, argv, env, cleanup, main, data_cap, code_cap); } + +/* + * The real entry point _start just sets the unwind info for CRA to undefined + * which tells libunwind to stop unwinding and then calls the real __start. + * This is needed because ".cfi_undefined" inline assembly in a non-naked + * function could be overridden by the default unwinding information. + */ +__attribute__((naked, used)) void +_start(void *auxv, + void (*cleanup)(void), /* from shared loader */ + struct Struct_Obj_Entry *obj) /* from shared loader */ +{ + __asm__( + ".cfi_undefined c30\n" + "bl %0" + :: "i"(__start)); +} diff --git a/lib/csu/riscv/crt1_c.c b/lib/csu/riscv/crt1_c.c index 41ae7cff9f2a..04d6e7b26749 100644 --- a/lib/csu/riscv/crt1_c.c +++ b/lib/csu/riscv/crt1_c.c @@ -57,7 +57,6 @@ void __start(int argc, char **argv, char **env, void (*cleanup)(void)) __dead2; void __start(int argc, char **argv, char **env, void (*cleanup)(void)) { - __asm__ volatile(".cfi_undefined ra"); #ifdef SHOULD_PROCESS_CAP_RELOCS /* * Initialize __cap_relocs for static executables. The run-time linker diff --git a/lib/csu/riscv/crt1_s.S b/lib/csu/riscv/crt1_s.S index 8f77d2358ae7..bf69d1435bb8 100644 --- a/lib/csu/riscv/crt1_s.S +++ b/lib/csu/riscv/crt1_s.S @@ -35,6 +35,7 @@ #include ENTRY(_start) + .cfi_undefined ra # Stop unwinding here mv a3, a2 # cleanup addi a1, a0, 8 # get argv ld a0, 0(a0) # load argc diff --git a/lib/csu/riscv64c/crt1_c.c b/lib/csu/riscv64c/crt1_c.c index 3108ebf562c9..5faea8a0956d 100644 --- a/lib/csu/riscv64c/crt1_c.c +++ b/lib/csu/riscv64c/crt1_c.c @@ -68,12 +68,11 @@ Elf_Auxinfo *__auxargs; * This restriction only applies to statically linked binaries since the dynamic * linker takes care of initialization otherwise. */ -void -_start(void *auxv, +__dead2 static void +__start(void *auxv, void (*cleanup)(void), /* from shared loader */ struct Struct_Obj_Entry *obj) /* from shared loader */ { - __asm__ volatile(".cfi_undefined cra"); int argc = 0; char **argv = NULL; char **env = NULL; @@ -128,3 +127,20 @@ _start(void *auxv, __libc_start1(argc, argv, env, cleanup, main, NULL, NULL); } + +/* + * The real entry point _start just sets the unwind info for CRA to undefined + * which tells libunwind to stop unwinding and then calls the real __start. + * This is needed because ".cfi_undefined" inline assembly in a non-naked + * function could be overridden by the default unwinding information. + */ +__attribute__((naked, used)) void +_start(void *auxv, + void (*cleanup)(void), /* from shared loader */ + struct Struct_Obj_Entry *obj) /* from shared loader */ +{ + __asm__( + ".cfi_undefined cra\n" + "ccall %0" + :: "i"(__start)); +} diff --git a/libexec/rtld-elf/riscv/rtld_start.S b/libexec/rtld-elf/riscv/rtld_start.S index e220a158f695..9246b6622fe8 100644 --- a/libexec/rtld-elf/riscv/rtld_start.S +++ b/libexec/rtld-elf/riscv/rtld_start.S @@ -38,6 +38,49 @@ #include #endif +#define INT_SIZE 8 +#define INT(x) x +#define LOAD_INT PTR(ld) +#define STORE_INT PTR(sd) + +#ifdef __CHERI_PURE_CAPABILITY__ +#define LOAD_PTR LOAD_CAP +#define STORE_PTR STORE_CAP +#define ADD_PTR cincoffset +#define MV_PTR cmove +#define PTR_SIZE CAP_SIZE +#define PTR(x) CAP(x) +#else +#define LOAD_PTR LOAD_INT +#define STORE_PTR STORE_INT +#define ADD_PTR add +#define MV_PTR mv +#define PTR_SIZE INT_SIZE +#define PTR(x) INT(x) +#endif + +#define JR_PTR PTR(jr) + +#if __has_feature(capabilities) +#define LOAD_CAP PTR(lc) +#define STORE_CAP PTR(sc) +#define CAP_SIZE 16 +#define CAP(x) c ## x +#else +#define LOAD_CAP ld +#define STORE_CAP sd +#define CAP_SIZE 8 +#define CAP(x) x +#endif + +#ifdef __riscv_float_abi_double +#define FLT_SIZE 8 +#define LOAD_FLT PTR(fld) +#define STORE_FLT PTR(fsd) +#else +#define FLT_SIZE 0 +#endif + /* * Pure-capability: * @@ -56,6 +99,7 @@ */ ENTRY(.rtld_start) + .cfi_undefined PTR(ra) #ifdef __CHERI_PURE_CAPABILITY__ cmove cs0, ca0 /* Put aux in a callee-saved register */ cmove cs1, csp /* And the stack pointer */ @@ -102,49 +146,6 @@ ENTRY(.rtld_start) #endif /* defined(__CHERI_PURE_CAPABILITY__) */ END(.rtld_start) -#define INT_SIZE 8 -#define INT(x) x -#define LOAD_INT PTR(ld) -#define STORE_INT PTR(sd) - -#ifdef __CHERI_PURE_CAPABILITY__ -#define LOAD_PTR LOAD_CAP -#define STORE_PTR STORE_CAP -#define ADD_PTR cincoffset -#define MV_PTR cmove -#define PTR_SIZE CAP_SIZE -#define PTR(x) CAP(x) -#else -#define LOAD_PTR LOAD_INT -#define STORE_PTR STORE_INT -#define ADD_PTR add -#define MV_PTR mv -#define PTR_SIZE INT_SIZE -#define PTR(x) INT(x) -#endif - -#define JR_PTR PTR(jr) - -#if __has_feature(capabilities) -#define LOAD_CAP PTR(lc) -#define STORE_CAP PTR(sc) -#define CAP_SIZE 16 -#define CAP(x) c ## x -#else -#define LOAD_CAP ld -#define STORE_CAP sd -#define CAP_SIZE 8 -#define CAP(x) x -#endif - -#ifdef __riscv_float_abi_double -#define FLT_SIZE 8 -#define LOAD_FLT PTR(fld) -#define STORE_FLT PTR(fsd) -#else -#define FLT_SIZE 0 -#endif - /* * t0 = obj pointer * t1 = reloc offset