@@ -1990,13 +1990,18 @@ mono_arch_allocate_vars (MonoCompile *cfg)
1990
1990
}
1991
1991
case ArgSwiftError : {
1992
1992
ins -> opcode = OP_REGOFFSET ;
1993
- ins -> inst_basereg = cfg -> frame_reg ;
1994
- ins -> inst_offset = ainfo -> offset + ARGS_OFFSET ;
1995
1993
offset = ALIGN_TO (offset , sizeof (target_mgreg_t ));
1994
+ ins -> inst_basereg = cfg -> frame_reg ;
1995
+ ins -> inst_offset = offset ;
1996
1996
offset += sizeof (target_mgreg_t );
1997
1997
1998
1998
cfg -> arch .swift_error_var = ins ;
1999
- cfg -> used_int_regs |= (size_t )(1 << AMD64_R12 );
1999
+
2000
+ /* In the n2m case, the error register functions as an extra return register
2001
+ * and is thus is not treated as callee-saved.
2002
+ */
2003
+ if (cfg -> method -> wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE )
2004
+ cfg -> used_int_regs |= (size_t )(1 << AMD64_R12 );
2000
2005
}
2001
2006
break ;
2002
2007
default :
@@ -4280,8 +4285,13 @@ emit_move_return_value (MonoCompile *cfg, MonoInst *ins, guint8 *code)
4280
4285
guint32 quad ;
4281
4286
4282
4287
if (cfg -> arch .swift_error_var ) {
4283
- amd64_mov_reg_membase (code , AMD64_R11 , cfg -> arch .swift_error_var -> inst_basereg , cfg -> arch .swift_error_var -> inst_offset , sizeof (target_mgreg_t ));
4284
- amd64_mov_membase_reg (code , AMD64_R11 , 0 , AMD64_R12 , sizeof (target_mgreg_t ));
4288
+ if (cfg -> method -> wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE ) {
4289
+ amd64_mov_reg_membase (code , AMD64_R11 , cfg -> arch .swift_error_var -> inst_basereg , cfg -> arch .swift_error_var -> inst_offset , sizeof (target_mgreg_t ));
4290
+ amd64_mov_membase_reg (code , AMD64_R11 , 0 , AMD64_R12 , sizeof (target_mgreg_t ));
4291
+ }
4292
+ else if (cfg -> method -> wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED ) {
4293
+ amd64_mov_reg_membase (code , AMD64_R12 , cfg -> arch .swift_error_var -> inst_basereg , cfg -> arch .swift_error_var -> inst_offset , sizeof (target_mgreg_t ));
4294
+ }
4285
4295
}
4286
4296
4287
4297
/* Move return value to the target register */
@@ -8248,11 +8258,17 @@ MONO_RESTORE_WARNING
8248
8258
amd64_mov_membase_reg (code , ins -> inst_basereg , ins -> inst_offset , ainfo -> reg , 8 );
8249
8259
break ;
8250
8260
case ArgSwiftError :
8251
- if (ainfo -> offset ) {
8252
- amd64_mov_reg_membase (code , AMD64_R11 , AMD64_RBP , ARGS_OFFSET + ainfo -> offset , 8 );
8253
- amd64_mov_membase_reg (code , cfg -> arch .swift_error_var -> inst_basereg , cfg -> arch .swift_error_var -> inst_offset , AMD64_R11 , sizeof (target_mgreg_t ));
8254
- } else {
8255
- amd64_mov_membase_reg (code , cfg -> arch .swift_error_var -> inst_basereg , cfg -> arch .swift_error_var -> inst_offset , ainfo -> reg , sizeof (target_mgreg_t ));
8261
+ if (cfg -> method -> wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE ) {
8262
+ if (ainfo -> offset ) {
8263
+ amd64_mov_reg_membase (code , AMD64_R11 , AMD64_RBP , ARGS_OFFSET + ainfo -> offset , 8 );
8264
+ amd64_mov_membase_reg (code , cfg -> arch .swift_error_var -> inst_basereg , cfg -> arch .swift_error_var -> inst_offset , AMD64_R11 , sizeof (target_mgreg_t ));
8265
+ } else {
8266
+ amd64_mov_membase_reg (code , cfg -> arch .swift_error_var -> inst_basereg , cfg -> arch .swift_error_var -> inst_offset , ainfo -> reg , sizeof (target_mgreg_t ));
8267
+ }
8268
+ } else if (cfg -> method -> wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED ) {
8269
+ /* Relies on arguments being passed on the stack */
8270
+ amd64_lea_membase (code , AMD64_R11 , cfg -> arch .swift_error_var -> inst_basereg , cfg -> arch .swift_error_var -> inst_offset );
8271
+ amd64_mov_membase_reg (code , ins -> inst_basereg , ins -> inst_offset , AMD64_R11 , 8 );
8256
8272
}
8257
8273
break ;
8258
8274
default :
0 commit comments