@@ -5,7 +5,7 @@ use crate::abi::call::{ArgAbi, FnAbi, Reg, Uniform};
55
66const NUM_ARG_GPR : u64 = 6 ;
77const MAX_ARG_IN_REGS_SIZE : u64 = 4 * 32 ;
8- // const MAX_ARG_DIRECT_SIZE : u64 = MAX_ARG_IN_REGS_SIZE;
8+ // const MAX_ARG_DIRECT_SIZE: u64 = MAX_ARG_IN_REGS_SIZE;
99const MAX_RET_IN_REGS_SIZE : u64 = 2 * 32 ;
1010
1111fn classify_ret_ty < Ty > ( arg : & mut ArgAbi < ' _ , Ty > , xlen : u64 ) {
@@ -21,45 +21,69 @@ fn classify_arg_ty<Ty>(arg: &mut ArgAbi<'_, Ty>, xlen: u64, remaining_gpr: &mut
2121 // register pairs, so may consume 3 registers.
2222
2323 let mut stack_required = false ;
24- let mut required_gpr = 1u64 ; // at least one to start
25- let arg_size = arg. layout . size . bits ( ) ;
26- // let alignment = arg.layout.details.align.abi.bits();
24+ let arg_size = arg. layout . size ;
25+ let alignment = arg. layout . details . align . abi ;
2726
2827
29- if arg_size > xlen && arg_size <= MAX_ARG_IN_REGS_SIZE {
30- required_gpr = arg_size + ( xlen - 1 ) / xlen;
28+ let mut required_gpr = 1u64 ; // at least one per arg
29+ if alignment. bits ( ) == 2 * xlen {
30+ required_gpr = 2 + ( * remaining_gpr % 2 ) ;
31+ } else if arg_size. bits ( ) > xlen && arg_size. bits ( ) <= MAX_ARG_IN_REGS_SIZE {
32+ required_gpr = arg_size. bits ( ) + ( xlen - 1 ) / xlen;
3133 }
3234
3335 if required_gpr > * remaining_gpr {
3436 stack_required = true ;
35- required_gpr = * remaining_gpr; }
36-
37-
37+ required_gpr = * remaining_gpr;
38+ }
3839 * remaining_gpr -= required_gpr;
3940
4041 // if a value can fit in a reg and the
4142 // stack is not required, extend
42- if !arg. layout . is_aggregate ( ) {
43- if arg_size < xlen && !stack_required {
43+ if !arg. layout . is_aggregate ( ) { // non-aggregate types
44+ if arg_size. bits ( ) < xlen && !stack_required {
4445 arg. extend_integer_width_to ( xlen) ;
4546 }
46- return ;
47+ } else if arg_size. bits ( ) as u64 <= MAX_ARG_IN_REGS_SIZE { // aggregate types
48+ // Aggregates which are <= 4*32 will be passed in registers if possible,
49+ // so coerce to integers.
50+
51+ // Use a single XLen int if possible, 2*XLen if 2*XLen alignment is
52+ // required, and a 2-element XLen array if only XLen alignment is
53+ // required.
54+ // if alignment == 2 * xlen {
55+ // arg.extend_integer_width_to(xlen * 2);
56+ // } else {
57+ // arg.extend_integer_width_to(arg_size + (xlen - 1) / xlen);
58+ // }
59+ if alignment. bits ( ) == 2 * xlen {
60+ arg. cast_to ( Uniform {
61+ unit : Reg :: i64 ( ) ,
62+ total : arg_size
63+ } ) ;
64+ } else {
65+ //TODO array type - this should be a homogenous array type
66+ // arg.extend_integer_width_to(arg_size + (xlen - 1) / xlen);
67+ }
68+
69+ } else {
70+ // if we get here the stack is required
71+ assert ! ( stack_required) ;
72+ arg. make_indirect ( ) ;
4773 }
4874
4975
50- if arg_size as u64 <= MAX_ARG_IN_REGS_SIZE {
51- let align = arg. layout . align . abi . bytes ( ) ;
52- let total = arg. layout . size ;
53- arg. cast_to ( Uniform {
54- unit : if align <= 4 { Reg :: i32 ( ) } else { Reg :: i64 ( ) } ,
55- total
56- } ) ;
57- return ;
58- }
76+ // if arg_size as u64 <= MAX_ARG_IN_REGS_SIZE {
77+ // let align = arg.layout.align.abi.bytes();
78+ // let total = arg.layout.size;
79+ // arg.cast_to(Uniform {
80+ // unit: if align <= 4 { Reg::i32() } else { Reg::i64() },
81+ // total
82+ // });
83+ // return;
84+ // }
5985
60- // if we get here the stack is required
61- // assert!(stack_required);
62- arg. make_indirect ( ) ;
86+
6387}
6488
6589pub fn compute_abi_info < Ty > ( fabi : & mut FnAbi < ' _ , Ty > , xlen : u64 ) {
0 commit comments