@@ -2024,14 +2024,10 @@ pub unsafe fn __msa_binsl_d(a: v2u64, b: v2u64, c: v2u64) -> v2u64 {
20242024#[inline]
20252025#[target_feature(enable = "msa")]
20262026#[cfg_attr(test, assert_instr(binsli.b, imm3 = 0b111))]
2027- #[rustc_args_required_const(2)]
2028- pub unsafe fn __msa_binsli_b(a: v16u8, b: v16u8, imm3: i32) -> v16u8 {
2029- macro_rules! call {
2030- ($imm3:expr) => {
2031- msa_binsli_b(a, mem::transmute(b), $imm3)
2032- };
2033- }
2034- constify_imm3!(imm3, call)
2027+ #[rustc_legacy_const_generics(2)]
2028+ pub unsafe fn __msa_binsli_b<const IMM3: i32>(a: v16u8, b: v16u8) -> v16u8 {
2029+ static_assert_imm3!(IMM3);
2030+ msa_binsli_b(a, mem::transmute(b), IMM3)
20352031}
20362032
20372033/// Immediate Bit Insert Left
@@ -2043,14 +2039,10 @@ pub unsafe fn __msa_binsli_b(a: v16u8, b: v16u8, imm3: i32) -> v16u8 {
20432039#[inline]
20442040#[target_feature(enable = "msa")]
20452041#[cfg_attr(test, assert_instr(binsli.h, imm4 = 0b1111))]
2046- #[rustc_args_required_const(2)]
2047- pub unsafe fn __msa_binsli_h(a: v8u16, b: v8u16, imm4: i32) -> v8u16 {
2048- macro_rules! call {
2049- ($imm4:expr) => {
2050- msa_binsli_h(a, mem::transmute(b), $imm4)
2051- };
2052- }
2053- constify_imm4!(imm4, call)
2042+ #[rustc_legacy_const_generics(2)]
2043+ pub unsafe fn __msa_binsli_h<const IMM4: i32>(a: v8u16, b: v8u16) -> v8u16 {
2044+ static_assert_imm4!(IMM4);
2045+ msa_binsli_h(a, mem::transmute(b), IMM4)
20542046}
20552047
20562048/// Immediate Bit Insert Left
@@ -2062,14 +2054,10 @@ pub unsafe fn __msa_binsli_h(a: v8u16, b: v8u16, imm4: i32) -> v8u16 {
20622054#[inline]
20632055#[target_feature(enable = "msa")]
20642056#[cfg_attr(test, assert_instr(binsli.w, imm5 = 0b11111))]
2065- #[rustc_args_required_const(2)]
2066- pub unsafe fn __msa_binsli_w(a: v4u32, b: v4u32, imm5: i32) -> v4u32 {
2067- macro_rules! call {
2068- ($imm5:expr) => {
2069- msa_binsli_w(a, mem::transmute(b), $imm5)
2070- };
2071- }
2072- constify_imm5!(imm5, call)
2057+ #[rustc_legacy_const_generics(2)]
2058+ pub unsafe fn __msa_binsli_w<const IMM5: i32>(a: v4u32, b: v4u32) -> v4u32 {
2059+ static_assert_imm5!(IMM5);
2060+ msa_binsli_w(a, mem::transmute(b), IMM5)
20732061}
20742062
20752063/// Immediate Bit Insert Left
@@ -2081,14 +2069,10 @@ pub unsafe fn __msa_binsli_w(a: v4u32, b: v4u32, imm5: i32) -> v4u32 {
20812069#[inline]
20822070#[target_feature(enable = "msa")]
20832071#[cfg_attr(test, assert_instr(binsli.d, imm6 = 0b111111))]
2084- #[rustc_args_required_const(2)]
2085- pub unsafe fn __msa_binsli_d(a: v2u64, b: v2u64, imm6: i32) -> v2u64 {
2086- macro_rules! call {
2087- ($imm6:expr) => {
2088- msa_binsli_d(a, mem::transmute(b), $imm6)
2089- };
2090- }
2091- constify_imm6!(imm6, call)
2072+ #[rustc_legacy_const_generics(2)]
2073+ pub unsafe fn __msa_binsli_d<const IMM6: i32>(a: v2u64, b: v2u64) -> v2u64 {
2074+ static_assert_imm6!(IMM6);
2075+ msa_binsli_d(a, mem::transmute(b), IMM6)
20922076}
20932077
20942078/// Vector Bit Insert Right
@@ -2156,14 +2140,10 @@ pub unsafe fn __msa_binsr_d(a: v2u64, b: v2u64, c: v2u64) -> v2u64 {
21562140#[inline]
21572141#[target_feature(enable = "msa")]
21582142#[cfg_attr(test, assert_instr(binsri.b, imm3 = 0b111))]
2159- #[rustc_args_required_const(2)]
2160- pub unsafe fn __msa_binsri_b(a: v16u8, b: v16u8, imm3: i32) -> v16u8 {
2161- macro_rules! call {
2162- ($imm3:expr) => {
2163- msa_binsri_b(a, mem::transmute(b), $imm3)
2164- };
2165- }
2166- constify_imm3!(imm3, call)
2143+ #[rustc_legacy_const_generics(2)]
2144+ pub unsafe fn __msa_binsri_b<const IMM3: i32>(a: v16u8, b: v16u8) -> v16u8 {
2145+ static_assert_imm3!(IMM3);
2146+ msa_binsri_b(a, mem::transmute(b), IMM3)
21672147}
21682148
21692149/// Immediate Bit Insert Right
@@ -2175,14 +2155,10 @@ pub unsafe fn __msa_binsri_b(a: v16u8, b: v16u8, imm3: i32) -> v16u8 {
21752155#[inline]
21762156#[target_feature(enable = "msa")]
21772157#[cfg_attr(test, assert_instr(binsri.h, imm4 = 0b1111))]
2178- #[rustc_args_required_const(2)]
2179- pub unsafe fn __msa_binsri_h(a: v8u16, b: v8u16, imm4: i32) -> v8u16 {
2180- macro_rules! call {
2181- ($imm4:expr) => {
2182- msa_binsri_h(a, mem::transmute(b), $imm4)
2183- };
2184- }
2185- constify_imm4!(imm4, call)
2158+ #[rustc_legacy_const_generics(2)]
2159+ pub unsafe fn __msa_binsri_h<const IMM4: i32>(a: v8u16, b: v8u16) -> v8u16 {
2160+ static_assert_imm4!(IMM4);
2161+ msa_binsri_h(a, mem::transmute(b), IMM4)
21862162}
21872163
21882164/// Immediate Bit Insert Right
@@ -2194,14 +2170,10 @@ pub unsafe fn __msa_binsri_h(a: v8u16, b: v8u16, imm4: i32) -> v8u16 {
21942170#[inline]
21952171#[target_feature(enable = "msa")]
21962172#[cfg_attr(test, assert_instr(binsri.w, imm5 = 0b11111))]
2197- #[rustc_args_required_const(2)]
2198- pub unsafe fn __msa_binsri_w(a: v4u32, b: v4u32, imm5: i32) -> v4u32 {
2199- macro_rules! call {
2200- ($imm5:expr) => {
2201- msa_binsri_w(a, mem::transmute(b), $imm5)
2202- };
2203- }
2204- constify_imm5!(imm5, call)
2173+ #[rustc_legacy_const_generics(2)]
2174+ pub unsafe fn __msa_binsri_w<const IMM5: i32>(a: v4u32, b: v4u32) -> v4u32 {
2175+ static_assert_imm5!(IMM5);
2176+ msa_binsri_w(a, mem::transmute(b), IMM5)
22052177}
22062178
22072179/// Immediate Bit Insert Right
@@ -2213,14 +2185,10 @@ pub unsafe fn __msa_binsri_w(a: v4u32, b: v4u32, imm5: i32) -> v4u32 {
22132185#[inline]
22142186#[target_feature(enable = "msa")]
22152187#[cfg_attr(test, assert_instr(binsri.d, imm6 = 0b111111))]
2216- #[rustc_args_required_const(2)]
2217- pub unsafe fn __msa_binsri_d(a: v2u64, b: v2u64, imm6: i32) -> v2u64 {
2218- macro_rules! call {
2219- ($imm6:expr) => {
2220- msa_binsri_d(a, mem::transmute(b), $imm6)
2221- };
2222- }
2223- constify_imm6!(imm6, call)
2188+ #[rustc_legacy_const_generics(2)]
2189+ pub unsafe fn __msa_binsri_d<const IMM6: i32>(a: v2u64, b: v2u64) -> v2u64 {
2190+ static_assert_imm6!(IMM6);
2191+ msa_binsri_d(a, mem::transmute(b), IMM6)
22242192}
22252193
22262194/// Vector Bit Move If Not Zero
@@ -2246,14 +2214,10 @@ pub unsafe fn __msa_bmnz_v(a: v16u8, b: v16u8, c: v16u8) -> v16u8 {
22462214#[inline]
22472215#[target_feature(enable = "msa")]
22482216#[cfg_attr(test, assert_instr(bmnzi.b, imm8 = 0b11111111))]
2249- #[rustc_args_required_const(2)]
2250- pub unsafe fn __msa_bmnzi_b(a: v16u8, b: v16u8, imm8: i32) -> v16u8 {
2251- macro_rules! call {
2252- ($imm8:expr) => {
2253- msa_bmnzi_b(a, mem::transmute(b), $imm8)
2254- };
2255- }
2256- constify_imm8!(imm8, call)
2217+ #[rustc_legacy_const_generics(2)]
2218+ pub unsafe fn __msa_bmnzi_b<const IMM8: i32>(a: v16u8, b: v16u8) -> v16u8 {
2219+ static_assert_imm8!(IMM8);
2220+ msa_bmnzi_b(a, mem::transmute(b), IMM8)
22572221}
22582222
22592223/// Vector Bit Move If Zero
@@ -2279,14 +2243,10 @@ pub unsafe fn __msa_bmz_v(a: v16u8, b: v16u8, c: v16u8) -> v16u8 {
22792243#[inline]
22802244#[target_feature(enable = "msa")]
22812245#[cfg_attr(test, assert_instr(bmzi.b, imm8 = 0b11111111))]
2282- #[rustc_args_required_const(2)]
2283- pub unsafe fn __msa_bmzi_b(a: v16u8, b: v16u8, imm8: i32) -> v16u8 {
2284- macro_rules! call {
2285- ($imm8:expr) => {
2286- msa_bmzi_b(a, mem::transmute(b), $imm8)
2287- };
2288- }
2289- constify_imm8!(imm8, call)
2246+ #[rustc_legacy_const_generics(2)]
2247+ pub unsafe fn __msa_bmzi_b<const IMM8: i32>(a: v16u8, b: v16u8) -> v16u8 {
2248+ static_assert_imm8!(IMM8);
2249+ msa_bmzi_b(a, mem::transmute(b), IMM8)
22902250}
22912251
22922252/// Vector Bit Negate
@@ -2484,14 +2444,10 @@ pub unsafe fn __msa_bsel_v(a: v16u8, b: v16u8, c: v16u8) -> v16u8 {
24842444#[inline]
24852445#[target_feature(enable = "msa")]
24862446#[cfg_attr(test, assert_instr(bseli.b, imm8 = 0b11111111))]
2487- #[rustc_args_required_const(2)]
2488- pub unsafe fn __msa_bseli_b(a: v16u8, b: v16u8, imm8: i32) -> v16u8 {
2489- macro_rules! call {
2490- ($imm8:expr) => {
2491- msa_bseli_b(a, mem::transmute(b), $imm8)
2492- };
2493- }
2494- constify_imm8!(imm8, call)
2447+ #[rustc_legacy_const_generics(2)]
2448+ pub unsafe fn __msa_bseli_b<const IMM8: i32>(a: v16u8, b: v16u8) -> v16u8 {
2449+ static_assert_imm8!(IMM8);
2450+ msa_bseli_b(a, mem::transmute(b), IMM8)
24952451}
24962452
24972453/// Vector Bit Set
@@ -7450,14 +7406,10 @@ pub unsafe fn __msa_sld_d(a: v2i64, b: v2i64, c: i32) -> v2i64 {
74507406#[inline]
74517407#[target_feature(enable = "msa")]
74527408#[cfg_attr(test, assert_instr(sldi.b, imm4 = 0b1111))]
7453- #[rustc_args_required_const(2)]
7454- pub unsafe fn __msa_sldi_b(a: v16i8, b: v16i8, imm4: i32) -> v16i8 {
7455- macro_rules! call {
7456- ($imm4:expr) => {
7457- msa_sldi_b(a, mem::transmute(b), $imm4)
7458- };
7459- }
7460- constify_imm4!(imm4, call)
7409+ #[rustc_legacy_const_generics(2)]
7410+ pub unsafe fn __msa_sldi_b<const IMM4: i32>(a: v16i8, b: v16i8) -> v16i8 {
7411+ static_assert_imm4!(IMM4);
7412+ msa_sldi_b(a, mem::transmute(b), IMM4)
74617413}
74627414
74637415/// Immediate Columns Slide
@@ -7474,14 +7426,10 @@ pub unsafe fn __msa_sldi_b(a: v16i8, b: v16i8, imm4: i32) -> v16i8 {
74747426#[inline]
74757427#[target_feature(enable = "msa")]
74767428#[cfg_attr(test, assert_instr(sldi.h, imm3 = 0b111))]
7477- #[rustc_args_required_const(2)]
7478- pub unsafe fn __msa_sldi_h(a: v8i16, b: v8i16, imm3: i32) -> v8i16 {
7479- macro_rules! call {
7480- ($imm3:expr) => {
7481- msa_sldi_h(a, mem::transmute(b), $imm3)
7482- };
7483- }
7484- constify_imm3!(imm3, call)
7429+ #[rustc_legacy_const_generics(2)]
7430+ pub unsafe fn __msa_sldi_h<const IMM3: i32>(a: v8i16, b: v8i16) -> v8i16 {
7431+ static_assert_imm3!(IMM3);
7432+ msa_sldi_h(a, mem::transmute(b), IMM3)
74857433}
74867434
74877435/// Immediate Columns Slide
@@ -7498,14 +7446,10 @@ pub unsafe fn __msa_sldi_h(a: v8i16, b: v8i16, imm3: i32) -> v8i16 {
74987446#[inline]
74997447#[target_feature(enable = "msa")]
75007448#[cfg_attr(test, assert_instr(sldi.w, imm2 = 0b11))]
7501- #[rustc_args_required_const(2)]
7502- pub unsafe fn __msa_sldi_w(a: v4i32, b: v4i32, imm2: i32) -> v4i32 {
7503- macro_rules! call {
7504- ($imm2:expr) => {
7505- msa_sldi_w(a, mem::transmute(b), $imm2)
7506- };
7507- }
7508- constify_imm2!(imm2, call)
7449+ #[rustc_legacy_const_generics(2)]
7450+ pub unsafe fn __msa_sldi_w<const IMM2: i32>(a: v4i32, b: v4i32) -> v4i32 {
7451+ static_assert_imm2!(IMM2);
7452+ msa_sldi_w(a, mem::transmute(b), IMM2)
75097453}
75107454
75117455/// Immediate Columns Slide
@@ -7522,14 +7466,10 @@ pub unsafe fn __msa_sldi_w(a: v4i32, b: v4i32, imm2: i32) -> v4i32 {
75227466#[inline]
75237467#[target_feature(enable = "msa")]
75247468#[cfg_attr(test, assert_instr(sldi.d, imm1 = 0b1))]
7525- #[rustc_args_required_const(2)]
7526- pub unsafe fn __msa_sldi_d(a: v2i64, b: v2i64, imm1: i32) -> v2i64 {
7527- macro_rules! call {
7528- ($imm1:expr) => {
7529- msa_sldi_d(a, mem::transmute(b), $imm1)
7530- };
7531- }
7532- constify_imm1!(imm1, call)
7469+ #[rustc_legacy_const_generics(2)]
7470+ pub unsafe fn __msa_sldi_d<const IMM1: i32>(a: v2i64, b: v2i64) -> v2i64 {
7471+ static_assert_imm1!(IMM1);
7472+ msa_sldi_d(a, mem::transmute(b), IMM1)
75337473}
75347474
75357475/// Vector Shift Left
@@ -8249,14 +8189,10 @@ pub unsafe fn __msa_srlri_d<const IMM6: i32>(a: v2i64) -> v2i64 {
82498189#[inline]
82508190#[target_feature(enable = "msa")]
82518191#[cfg_attr(test, assert_instr(st.b, imm_s10 = 0b1111111111))]
8252- #[rustc_args_required_const(2)]
8253- pub unsafe fn __msa_st_b(a: v16i8, mem_addr: *mut u8, imm_s10: i32) -> () {
8254- macro_rules! call {
8255- ($imm_s10:expr) => {
8256- msa_st_b(a, mem_addr, $imm_s10)
8257- };
8258- }
8259- constify_imm_s10!(imm_s10, call)
8192+ #[rustc_legacy_const_generics(2)]
8193+ pub unsafe fn __msa_st_b<const IMM_S10: i32>(a: v16i8, mem_addr: *mut u8) -> () {
8194+ static_assert_imm_s10!(IMM_S10);
8195+ msa_st_b(a, mem_addr, IMM_S10)
82608196}
82618197
82628198/// Vector Store
@@ -8268,14 +8204,11 @@ pub unsafe fn __msa_st_b(a: v16i8, mem_addr: *mut u8, imm_s10: i32) -> () {
82688204#[inline]
82698205#[target_feature(enable = "msa")]
82708206#[cfg_attr(test, assert_instr(st.h, imm_s11 = 0b11111111111))]
8271- #[rustc_args_required_const(2)]
8272- pub unsafe fn __msa_st_h(a: v8i16, mem_addr: *mut u8, imm_s11: i32) -> () {
8273- macro_rules! call {
8274- ($imm_s11:expr) => {
8275- msa_st_h(a, mem_addr, $imm_s11)
8276- };
8277- }
8278- constify_imm_s11!(imm_s11, call)
8207+ #[rustc_legacy_const_generics(2)]
8208+ pub unsafe fn __msa_st_h<const IMM_S11: i32>(a: v8i16, mem_addr: *mut u8) -> () {
8209+ static_assert_imm_s11!(IMM_S11);
8210+ static_assert!(IMM_S11: i32 where IMM_S11 % 2 == 0);
8211+ msa_st_h(a, mem_addr, IMM_S11)
82798212}
82808213
82818214/// Vector Store
@@ -8287,14 +8220,11 @@ pub unsafe fn __msa_st_h(a: v8i16, mem_addr: *mut u8, imm_s11: i32) -> () {
82878220#[inline]
82888221#[target_feature(enable = "msa")]
82898222#[cfg_attr(test, assert_instr(st.w, imm_s12 = 0b111111111111))]
8290- #[rustc_args_required_const(2)]
8291- pub unsafe fn __msa_st_w(a: v4i32, mem_addr: *mut u8, imm_s12: i32) -> () {
8292- macro_rules! call {
8293- ($imm_s12:expr) => {
8294- msa_st_w(a, mem_addr, $imm_s12)
8295- };
8296- }
8297- constify_imm_s12!(imm_s12, call)
8223+ #[rustc_legacy_const_generics(2)]
8224+ pub unsafe fn __msa_st_w<const IMM_S12: i32>(a: v4i32, mem_addr: *mut u8) -> () {
8225+ static_assert_imm_s12!(IMM_S12);
8226+ static_assert!(IMM_S12: i32 where IMM_S12 % 4 == 0);
8227+ msa_st_w(a, mem_addr, IMM_S12)
82988228}
82998229
83008230/// Vector Store
@@ -8306,14 +8236,11 @@ pub unsafe fn __msa_st_w(a: v4i32, mem_addr: *mut u8, imm_s12: i32) -> () {
83068236#[inline]
83078237#[target_feature(enable = "msa")]
83088238#[cfg_attr(test, assert_instr(st.d, imm_s13 = 0b1111111111111))]
8309- #[rustc_args_required_const(2)]
8310- pub unsafe fn __msa_st_d(a: v2i64, mem_addr: *mut u8, imm_s13: i32) -> () {
8311- macro_rules! call {
8312- ($imm_s13:expr) => {
8313- msa_st_d(a, mem_addr, $imm_s13)
8314- };
8315- }
8316- constify_imm_s13!(imm_s13, call)
8239+ #[rustc_legacy_const_generics(2)]
8240+ pub unsafe fn __msa_st_d<const IMM_S13: i32>(a: v2i64, mem_addr: *mut u8) -> () {
8241+ static_assert_imm_s13!(IMM_S13);
8242+ static_assert!(IMM_S13: i32 where IMM_S13 % 8 == 0);
8243+ msa_st_d(a, mem_addr, IMM_S13)
83178244}
83188245
83198246/// Vector Signed Saturated Subtract of Signed Values
0 commit comments