|
39 | 39 | # MREDUCE_4X(_v0, _v1, _v2, _v3) |
40 | 40 | # |
41 | 41 | .macro MREDUCE_4X _v0 _v1 _v2 _v3 |
42 | | - lxvd2x 32+13, 0, 3 |
43 | | - addi 3, 3, 16 |
44 | | - lxvd2x 32+18, 0, 3 |
45 | | - addi 3, 3, 16 |
46 | | - lxvd2x 32+23, 0, 3 |
47 | | - addi 3, 3, 16 |
48 | | - lxvd2x 32+7, 0, 3 |
49 | | - addi 3, 3, 16 |
50 | | - |
51 | | - vmladduhm 15, 13, V1353, 3 |
52 | | - vmladduhm 20, 18, V1353, 3 |
53 | | - vmladduhm 25, 23, V1353, 3 |
54 | | - vmladduhm 9, 7, V1353, 3 |
55 | | - |
56 | | - vmhraddshs 14, 13, V1353, 3 |
57 | | - vmhraddshs 19, 18, V1353, 3 |
58 | | - vmhraddshs 24, 23, V1353, 3 |
59 | | - vmhraddshs 8, 7, V1353, 3 |
60 | | - |
61 | | - vmladduhm 15, 15, V_QINV, 3 |
62 | | - vmladduhm 20, 20, V_QINV, 3 |
63 | | - vmladduhm 25, 25, V_QINV, 3 |
64 | | - vmladduhm 9, 9, V_QINV, 3 |
65 | | - |
66 | | - vmhraddshs 15, 15, V_NMKQ, 14 |
67 | | - vmhraddshs 20, 20, V_NMKQ, 19 |
68 | | - vmhraddshs 25, 25, V_NMKQ, 24 |
69 | | - vmhraddshs 9, 9, V_NMKQ, 8 |
70 | | - |
71 | | - vsrah \_v0, 15, 4 # >> 1 |
72 | | - vsrah \_v1, 20, 4 # >> 1 |
73 | | - vsrah \_v2, 25, 4 # >> 1 |
74 | | - vsrah \_v3, 9, 4 # >> 1 |
| 42 | + lxvd2x 32+13, 0, 3 |
| 43 | + addi 3, 3, 16 |
| 44 | + lxvd2x 32+18, 0, 3 |
| 45 | + addi 3, 3, 16 |
| 46 | + lxvd2x 32+23, 0, 3 |
| 47 | + addi 3, 3, 16 |
| 48 | + lxvd2x 32+7, 0, 3 |
| 49 | + addi 3, 3, 16 |
| 50 | + |
| 51 | + vmladduhm 15, 13, V1353, 3 |
| 52 | + vmladduhm 20, 18, V1353, 3 |
| 53 | + vmladduhm 25, 23, V1353, 3 |
| 54 | + vmladduhm 9, 7, V1353, 3 |
| 55 | + |
| 56 | + vmhraddshs 14, 13, V1353, 3 |
| 57 | + vmhraddshs 19, 18, V1353, 3 |
| 58 | + vmhraddshs 24, 23, V1353, 3 |
| 59 | + vmhraddshs 8, 7, V1353, 3 |
| 60 | + |
| 61 | + vmladduhm 15, 15, V_QINV, 3 |
| 62 | + vmladduhm 20, 20, V_QINV, 3 |
| 63 | + vmladduhm 25, 25, V_QINV, 3 |
| 64 | + vmladduhm 9, 9, V_QINV, 3 |
| 65 | + |
| 66 | + vmhraddshs 15, 15, V_NMKQ, 14 |
| 67 | + vmhraddshs 20, 20, V_NMKQ, 19 |
| 68 | + vmhraddshs 25, 25, V_NMKQ, 24 |
| 69 | + vmhraddshs 9, 9, V_NMKQ, 8 |
| 70 | + |
| 71 | + vsrah \_v0, 15, 4 # >> 1 |
| 72 | + vsrah \_v1, 20, 4 # >> 1 |
| 73 | + vsrah \_v2, 25, 4 # >> 1 |
| 74 | + vsrah \_v3, 9, 4 # >> 1 |
75 | 75 | .endm |
76 | 76 |
|
77 | 77 | .macro Write_8X |
78 | | - stxvd2x 32+27, 4, 3 |
79 | | - stxvd2x 32+28, 5, 3 |
80 | | - stxvd2x 32+29, 6, 3 |
81 | | - stxvd2x 32+30, 7, 3 |
82 | | - stxvd2x 32+13, 8, 3 |
83 | | - stxvd2x 32+18, 9, 3 |
84 | | - stxvd2x 32+23, 10, 3 |
85 | | - stxvd2x 32+7, 11, 3 |
| 78 | + stxvd2x 32+27, 4, 3 |
| 79 | + stxvd2x 32+28, 5, 3 |
| 80 | + stxvd2x 32+29, 6, 3 |
| 81 | + stxvd2x 32+30, 7, 3 |
| 82 | + stxvd2x 32+13, 8, 3 |
| 83 | + stxvd2x 32+18, 9, 3 |
| 84 | + stxvd2x 32+23, 10, 3 |
| 85 | + stxvd2x 32+7, 11, 3 |
86 | 86 | .endm |
87 | 87 |
|
88 | 88 | .align 4 |
89 | 89 | .globl MLK_ASM_NAMESPACE(poly_tomont_ppc) |
90 | 90 | MLK_ASM_FN_SYMBOL(poly_tomont_ppc) |
91 | | - stdu 1, -320(1) |
92 | | - mflr 0 |
93 | | - |
94 | | - stxv 32+20, 128(1) |
95 | | - stxv 32+21, 144(1) |
96 | | - stxv 32+22, 160(1) |
97 | | - stxv 32+23, 176(1) |
98 | | - stxv 32+24, 192(1) |
99 | | - stxv 32+25, 208(1) |
100 | | - stxv 32+26, 224(1) |
101 | | - stxv 32+27, 240(1) |
102 | | - stxv 32+28, 256(1) |
103 | | - stxv 32+29, 272(1) |
104 | | - stxv 32+30, 288(1) |
105 | | - |
106 | | - lxv 32+V_NMKQ, NQ_OFFSET(4) |
107 | | - lxv 32+V_QINV, QINV_OFFSET(4) |
108 | | - lxv 32+V1353, C1353_OFFSET(4) |
109 | | - |
110 | | - vxor 3, 3, 3 |
111 | | - vspltish 4, 1 |
112 | | - |
113 | | - li 4, -128 |
114 | | - li 5, -112 |
115 | | - li 6, -96 |
116 | | - li 7, -80 |
117 | | - li 8, -64 |
118 | | - li 9, -48 |
119 | | - li 10, -32 |
120 | | - li 11, -16 |
121 | | - |
122 | | - MREDUCE_4X 27, 28, 29, 30 |
123 | | - MREDUCE_4X 13, 18, 23, 7 |
124 | | - Write_8X |
125 | | - |
126 | | - MREDUCE_4X 27, 28, 29, 30 |
127 | | - MREDUCE_4X 13, 18, 23, 7 |
128 | | - Write_8X |
129 | | - |
130 | | - MREDUCE_4X 27, 28, 29, 30 |
131 | | - MREDUCE_4X 13, 18, 23, 7 |
132 | | - Write_8X |
133 | | - |
134 | | - MREDUCE_4X 27, 28, 29, 30 |
135 | | - MREDUCE_4X 13, 18, 23, 7 |
136 | | - Write_8X |
137 | | - |
138 | | - lxv 32+20, 128(1) |
139 | | - lxv 32+21, 144(1) |
140 | | - lxv 32+22, 160(1) |
141 | | - lxv 32+23, 176(1) |
142 | | - lxv 32+24, 192(1) |
143 | | - lxv 32+25, 208(1) |
144 | | - lxv 32+26, 224(1) |
145 | | - lxv 32+27, 240(1) |
146 | | - lxv 32+28, 256(1) |
147 | | - lxv 32+29, 272(1) |
148 | | - lxv 32+30, 288(1) |
149 | | - mtlr 0 |
150 | | - addi 1, 1, 320 |
151 | | - blr |
| 91 | + stdu 1, -320(1) |
| 92 | + mflr 0 |
| 93 | + |
| 94 | + stxv 32+20, 128(1) |
| 95 | + stxv 32+21, 144(1) |
| 96 | + stxv 32+22, 160(1) |
| 97 | + stxv 32+23, 176(1) |
| 98 | + stxv 32+24, 192(1) |
| 99 | + stxv 32+25, 208(1) |
| 100 | + stxv 32+26, 224(1) |
| 101 | + stxv 32+27, 240(1) |
| 102 | + stxv 32+28, 256(1) |
| 103 | + stxv 32+29, 272(1) |
| 104 | + stxv 32+30, 288(1) |
| 105 | + |
| 106 | + lxv 32+V_NMKQ, NQ_OFFSET(4) |
| 107 | + lxv 32+V_QINV, QINV_OFFSET(4) |
| 108 | + lxv 32+V1353, C1353_OFFSET(4) |
| 109 | + |
| 110 | + vxor 3, 3, 3 |
| 111 | + vspltish 4, 1 |
| 112 | + |
| 113 | + li 4, -128 |
| 114 | + li 5, -112 |
| 115 | + li 6, -96 |
| 116 | + li 7, -80 |
| 117 | + li 8, -64 |
| 118 | + li 9, -48 |
| 119 | + li 10, -32 |
| 120 | + li 11, -16 |
| 121 | + |
| 122 | + MREDUCE_4X 27, 28, 29, 30 |
| 123 | + MREDUCE_4X 13, 18, 23, 7 |
| 124 | + Write_8X |
| 125 | + |
| 126 | + MREDUCE_4X 27, 28, 29, 30 |
| 127 | + MREDUCE_4X 13, 18, 23, 7 |
| 128 | + Write_8X |
| 129 | + |
| 130 | + MREDUCE_4X 27, 28, 29, 30 |
| 131 | + MREDUCE_4X 13, 18, 23, 7 |
| 132 | + Write_8X |
| 133 | + |
| 134 | + MREDUCE_4X 27, 28, 29, 30 |
| 135 | + MREDUCE_4X 13, 18, 23, 7 |
| 136 | + Write_8X |
| 137 | + |
| 138 | + lxv 32+20, 128(1) |
| 139 | + lxv 32+21, 144(1) |
| 140 | + lxv 32+22, 160(1) |
| 141 | + lxv 32+23, 176(1) |
| 142 | + lxv 32+24, 192(1) |
| 143 | + lxv 32+25, 208(1) |
| 144 | + lxv 32+26, 224(1) |
| 145 | + lxv 32+27, 240(1) |
| 146 | + lxv 32+28, 256(1) |
| 147 | + lxv 32+29, 272(1) |
| 148 | + lxv 32+30, 288(1) |
| 149 | + mtlr 0 |
| 150 | + addi 1, 1, 320 |
| 151 | + blr |
152 | 152 |
|
153 | 153 | /* To facilitate single-compilation-unit (SCU) builds, undefine all macros. |
154 | 154 | * Don't modify by hand -- this is auto-generated by scripts/autogen. */ |
|
0 commit comments