9
9
10
10
#include <stdint.h>
11
11
12
+ #include "int128.h"
13
+
12
14
#ifdef VERIFY
13
15
#define VERIFY_BITS (x , n ) VERIFY_CHECK(((x) >> (n)) == 0)
16
+ #define VERIFY_BITS_128 (x , n ) VERIFY_CHECK(secp256k1_u128_check_bits((x), (n)))
14
17
#else
15
18
#define VERIFY_BITS (x , n ) do { } while(0)
19
+ #define VERIFY_BITS_128 (x , n ) do { } while(0)
16
20
#endif
17
21
18
22
SECP256K1_INLINE static void secp256k1_fe_mul_inner (uint64_t * r , const uint64_t * a , const uint64_t * SECP256K1_RESTRICT b ) {
19
- uint128_t c , d ;
23
+ secp256k1_uint128 c , d ;
20
24
uint64_t t3 , t4 , tx , u0 ;
21
25
uint64_t a0 = a [0 ], a1 = a [1 ], a2 = a [2 ], a3 = a [3 ], a4 = a [4 ];
22
26
const uint64_t M = 0xFFFFFFFFFFFFFULL , R = 0x1000003D10ULL ;
@@ -40,121 +44,119 @@ SECP256K1_INLINE static void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t
40
44
* Note that [x 0 0 0 0 0] = [x*R].
41
45
*/
42
46
43
- d = ( uint128_t ) a0 * b [3 ]
44
- + ( uint128_t ) a1 * b [2 ]
45
- + ( uint128_t ) a2 * b [1 ]
46
- + ( uint128_t ) a3 * b [0 ];
47
- VERIFY_BITS ( d , 114 );
47
+ secp256k1_u128_mul ( & d , a0 , b [3 ]);
48
+ secp256k1_u128_accum_mul ( & d , a1 , b [2 ]);
49
+ secp256k1_u128_accum_mul ( & d , a2 , b [1 ]);
50
+ secp256k1_u128_accum_mul ( & d , a3 , b [0 ]) ;
51
+ VERIFY_BITS_128 ( & d , 114 );
48
52
/* [d 0 0 0] = [p3 0 0 0] */
49
- c = ( uint128_t ) a4 * b [4 ];
50
- VERIFY_BITS ( c , 112 );
53
+ secp256k1_u128_mul ( & c , a4 , b [4 ]) ;
54
+ VERIFY_BITS_128 ( & c , 112 );
51
55
/* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
52
- d += ( uint128_t ) R * ( uint64_t ) c ; c >>= 64 ;
53
- VERIFY_BITS ( d , 115 );
54
- VERIFY_BITS ( c , 48 );
56
+ secp256k1_u128_accum_mul ( & d , R , secp256k1_u128_to_u64 ( & c )); secp256k1_u128_rshift ( & c , 64 ) ;
57
+ VERIFY_BITS_128 ( & d , 115 );
58
+ VERIFY_BITS_128 ( & c , 48 );
55
59
/* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
56
- t3 = d & M ; d >>= 52 ;
60
+ t3 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
57
61
VERIFY_BITS (t3 , 52 );
58
- VERIFY_BITS ( d , 63 );
62
+ VERIFY_BITS_128 ( & d , 63 );
59
63
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
60
64
61
- d += ( uint128_t ) a0 * b [4 ]
62
- + ( uint128_t ) a1 * b [3 ]
63
- + ( uint128_t ) a2 * b [2 ]
64
- + ( uint128_t ) a3 * b [1 ]
65
- + ( uint128_t ) a4 * b [0 ];
66
- VERIFY_BITS ( d , 115 );
65
+ secp256k1_u128_accum_mul ( & d , a0 , b [4 ]);
66
+ secp256k1_u128_accum_mul ( & d , a1 , b [3 ]);
67
+ secp256k1_u128_accum_mul ( & d , a2 , b [2 ]);
68
+ secp256k1_u128_accum_mul ( & d , a3 , b [1 ]);
69
+ secp256k1_u128_accum_mul ( & d , a4 , b [0 ]) ;
70
+ VERIFY_BITS_128 ( & d , 115 );
67
71
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
68
- d += ( uint128_t )( R << 12 ) * ( uint64_t ) c ;
69
- VERIFY_BITS ( d , 116 );
72
+ secp256k1_u128_accum_mul ( & d , R << 12 , secp256k1_u128_to_u64 ( & c )) ;
73
+ VERIFY_BITS_128 ( & d , 116 );
70
74
/* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
71
- t4 = d & M ; d >>= 52 ;
75
+ t4 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
72
76
VERIFY_BITS (t4 , 52 );
73
- VERIFY_BITS ( d , 64 );
77
+ VERIFY_BITS_128 ( & d , 64 );
74
78
/* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
75
79
tx = (t4 >> 48 ); t4 &= (M >> 4 );
76
80
VERIFY_BITS (tx , 4 );
77
81
VERIFY_BITS (t4 , 48 );
78
82
/* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
79
83
80
- c = ( uint128_t ) a0 * b [0 ];
81
- VERIFY_BITS ( c , 112 );
84
+ secp256k1_u128_mul ( & c , a0 , b [0 ]) ;
85
+ VERIFY_BITS_128 ( & c , 112 );
82
86
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
83
- d += ( uint128_t ) a1 * b [4 ]
84
- + ( uint128_t ) a2 * b [3 ]
85
- + ( uint128_t ) a3 * b [2 ]
86
- + ( uint128_t ) a4 * b [1 ];
87
- VERIFY_BITS ( d , 115 );
87
+ secp256k1_u128_accum_mul ( & d , a1 , b [4 ]);
88
+ secp256k1_u128_accum_mul ( & d , a2 , b [3 ]);
89
+ secp256k1_u128_accum_mul ( & d , a3 , b [2 ]);
90
+ secp256k1_u128_accum_mul ( & d , a4 , b [1 ]) ;
91
+ VERIFY_BITS_128 ( & d , 115 );
88
92
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
89
- u0 = d & M ; d >>= 52 ;
93
+ u0 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
90
94
VERIFY_BITS (u0 , 52 );
91
- VERIFY_BITS ( d , 63 );
95
+ VERIFY_BITS_128 ( & d , 63 );
92
96
/* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
93
97
/* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
94
98
u0 = (u0 << 4 ) | tx ;
95
99
VERIFY_BITS (u0 , 56 );
96
100
/* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
97
- c += ( uint128_t ) u0 * ( R >> 4 );
98
- VERIFY_BITS ( c , 115 );
101
+ secp256k1_u128_accum_mul ( & c , u0 , R >> 4 );
102
+ VERIFY_BITS_128 ( & c , 115 );
99
103
/* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
100
- r [0 ] = c & M ; c >>= 52 ;
104
+ r [0 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
101
105
VERIFY_BITS (r [0 ], 52 );
102
- VERIFY_BITS ( c , 61 );
106
+ VERIFY_BITS_128 ( & c , 61 );
103
107
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
104
108
105
- c += ( uint128_t ) a0 * b [1 ]
106
- + ( uint128_t ) a1 * b [0 ];
107
- VERIFY_BITS ( c , 114 );
109
+ secp256k1_u128_accum_mul ( & c , a0 , b [1 ]);
110
+ secp256k1_u128_accum_mul ( & c , a1 , b [0 ]) ;
111
+ VERIFY_BITS_128 ( & c , 114 );
108
112
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
109
- d += ( uint128_t ) a2 * b [4 ]
110
- + ( uint128_t ) a3 * b [3 ]
111
- + ( uint128_t ) a4 * b [2 ];
112
- VERIFY_BITS ( d , 114 );
113
+ secp256k1_u128_accum_mul ( & d , a2 , b [4 ]);
114
+ secp256k1_u128_accum_mul ( & d , a3 , b [3 ]);
115
+ secp256k1_u128_accum_mul ( & d , a4 , b [2 ]) ;
116
+ VERIFY_BITS_128 ( & d , 114 );
113
117
/* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
114
- c += ( d & M ) * R ; d >>= 52 ;
115
- VERIFY_BITS ( c , 115 );
116
- VERIFY_BITS ( d , 62 );
118
+ secp256k1_u128_accum_mul ( & c , secp256k1_u128_to_u64 ( & d ) & M , R ); secp256k1_u128_rshift ( & d , 52 ) ;
119
+ VERIFY_BITS_128 ( & c , 115 );
120
+ VERIFY_BITS_128 ( & d , 62 );
117
121
/* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
118
- r [1 ] = c & M ; c >>= 52 ;
122
+ r [1 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
119
123
VERIFY_BITS (r [1 ], 52 );
120
- VERIFY_BITS ( c , 63 );
124
+ VERIFY_BITS_128 ( & c , 63 );
121
125
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
122
126
123
- c += ( uint128_t ) a0 * b [2 ]
124
- + ( uint128_t ) a1 * b [1 ]
125
- + ( uint128_t ) a2 * b [0 ];
126
- VERIFY_BITS ( c , 114 );
127
+ secp256k1_u128_accum_mul ( & c , a0 , b [2 ]);
128
+ secp256k1_u128_accum_mul ( & c , a1 , b [1 ]);
129
+ secp256k1_u128_accum_mul ( & c , a2 , b [0 ]) ;
130
+ VERIFY_BITS_128 ( & c , 114 );
127
131
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
128
- d += ( uint128_t ) a3 * b [4 ]
129
- + ( uint128_t ) a4 * b [3 ];
130
- VERIFY_BITS ( d , 114 );
132
+ secp256k1_u128_accum_mul ( & d , a3 , b [4 ]);
133
+ secp256k1_u128_accum_mul ( & d , a4 , b [3 ]) ;
134
+ VERIFY_BITS_128 ( & d , 114 );
131
135
/* [d 0 0 t4 t3 c t1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
132
- c += ( uint128_t ) R * ( uint64_t ) d ; d >>= 64 ;
133
- VERIFY_BITS ( c , 115 );
134
- VERIFY_BITS ( d , 50 );
136
+ secp256k1_u128_accum_mul ( & c , R , secp256k1_u128_to_u64 ( & d )); secp256k1_u128_rshift ( & d , 64 ) ;
137
+ VERIFY_BITS_128 ( & c , 115 );
138
+ VERIFY_BITS_128 ( & d , 50 );
135
139
/* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
136
140
137
- r [2 ] = c & M ; c >>= 52 ;
141
+ r [2 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
138
142
VERIFY_BITS (r [2 ], 52 );
139
- VERIFY_BITS ( c , 63 );
143
+ VERIFY_BITS_128 ( & c , 63 );
140
144
/* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
141
- c += (uint128_t )(R << 12 ) * (uint64_t )d + t3 ;
142
- VERIFY_BITS (c , 100 );
145
+ secp256k1_u128_accum_mul (& c , R << 12 , secp256k1_u128_to_u64 (& d ));
146
+ secp256k1_u128_accum_u64 (& c , t3 );
147
+ VERIFY_BITS_128 (& c , 100 );
143
148
/* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
144
- r [3 ] = c & M ; c >>= 52 ;
149
+ r [3 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
145
150
VERIFY_BITS (r [3 ], 52 );
146
- VERIFY_BITS ( c , 48 );
151
+ VERIFY_BITS_128 ( & c , 48 );
147
152
/* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
148
- c += t4 ;
149
- VERIFY_BITS (c , 49 );
150
- /* [c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
151
- r [4 ] = c ;
153
+ r [4 ] = secp256k1_u128_to_u64 (& c ) + t4 ;
152
154
VERIFY_BITS (r [4 ], 49 );
153
155
/* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
154
156
}
155
157
156
158
SECP256K1_INLINE static void secp256k1_fe_sqr_inner (uint64_t * r , const uint64_t * a ) {
157
- uint128_t c , d ;
159
+ secp256k1_uint128 c , d ;
158
160
uint64_t a0 = a [0 ], a1 = a [1 ], a2 = a [2 ], a3 = a [3 ], a4 = a [4 ];
159
161
int64_t t3 , t4 , tx , u0 ;
160
162
const uint64_t M = 0xFFFFFFFFFFFFFULL , R = 0x1000003D10ULL ;
@@ -170,107 +172,105 @@ SECP256K1_INLINE static void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t
170
172
* Note that [x 0 0 0 0 0] = [x*R].
171
173
*/
172
174
173
- d = ( uint128_t )( a0 * 2 ) * a3
174
- + ( uint128_t )( a1 * 2 ) * a2 ;
175
- VERIFY_BITS ( d , 114 );
175
+ secp256k1_u128_mul ( & d , a0 * 2 , a3 );
176
+ secp256k1_u128_accum_mul ( & d , a1 * 2 , a2 ) ;
177
+ VERIFY_BITS_128 ( & d , 114 );
176
178
/* [d 0 0 0] = [p3 0 0 0] */
177
- c = ( uint128_t ) a4 * a4 ;
178
- VERIFY_BITS ( c , 112 );
179
+ secp256k1_u128_mul ( & c , a4 , a4 ) ;
180
+ VERIFY_BITS_128 ( & c , 112 );
179
181
/* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
180
- d += ( uint128_t ) R * ( uint64_t ) c ; c >>= 64 ;
181
- VERIFY_BITS ( d , 115 );
182
- VERIFY_BITS ( c , 48 );
182
+ secp256k1_u128_accum_mul ( & d , R , secp256k1_u128_to_u64 ( & c )); secp256k1_u128_rshift ( & c , 64 ) ;
183
+ VERIFY_BITS_128 ( & d , 115 );
184
+ VERIFY_BITS_128 ( & c , 48 );
183
185
/* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
184
- t3 = d & M ; d >>= 52 ;
186
+ t3 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
185
187
VERIFY_BITS (t3 , 52 );
186
- VERIFY_BITS ( d , 63 );
188
+ VERIFY_BITS_128 ( & d , 63 );
187
189
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
188
190
189
191
a4 *= 2 ;
190
- d += ( uint128_t ) a0 * a4
191
- + ( uint128_t )( a1 * 2 ) * a3
192
- + ( uint128_t ) a2 * a2 ;
193
- VERIFY_BITS ( d , 115 );
192
+ secp256k1_u128_accum_mul ( & d , a0 , a4 );
193
+ secp256k1_u128_accum_mul ( & d , a1 * 2 , a3 );
194
+ secp256k1_u128_accum_mul ( & d , a2 , a2 ) ;
195
+ VERIFY_BITS_128 ( & d , 115 );
194
196
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
195
- d += ( uint128_t )( R << 12 ) * ( uint64_t ) c ;
196
- VERIFY_BITS ( d , 116 );
197
+ secp256k1_u128_accum_mul ( & d , R << 12 , secp256k1_u128_to_u64 ( & c )) ;
198
+ VERIFY_BITS_128 ( & d , 116 );
197
199
/* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
198
- t4 = d & M ; d >>= 52 ;
200
+ t4 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
199
201
VERIFY_BITS (t4 , 52 );
200
- VERIFY_BITS ( d , 64 );
202
+ VERIFY_BITS_128 ( & d , 64 );
201
203
/* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
202
204
tx = (t4 >> 48 ); t4 &= (M >> 4 );
203
205
VERIFY_BITS (tx , 4 );
204
206
VERIFY_BITS (t4 , 48 );
205
207
/* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
206
208
207
- c = ( uint128_t ) a0 * a0 ;
208
- VERIFY_BITS ( c , 112 );
209
+ secp256k1_u128_mul ( & c , a0 , a0 ) ;
210
+ VERIFY_BITS_128 ( & c , 112 );
209
211
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
210
- d += ( uint128_t ) a1 * a4
211
- + ( uint128_t )( a2 * 2 ) * a3 ;
212
- VERIFY_BITS ( d , 114 );
212
+ secp256k1_u128_accum_mul ( & d , a1 , a4 );
213
+ secp256k1_u128_accum_mul ( & d , a2 * 2 , a3 ) ;
214
+ VERIFY_BITS_128 ( & d , 114 );
213
215
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
214
- u0 = d & M ; d >>= 52 ;
216
+ u0 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
215
217
VERIFY_BITS (u0 , 52 );
216
- VERIFY_BITS ( d , 62 );
218
+ VERIFY_BITS_128 ( & d , 62 );
217
219
/* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
218
220
/* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
219
221
u0 = (u0 << 4 ) | tx ;
220
222
VERIFY_BITS (u0 , 56 );
221
223
/* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
222
- c += ( uint128_t ) u0 * ( R >> 4 );
223
- VERIFY_BITS ( c , 113 );
224
+ secp256k1_u128_accum_mul ( & c , u0 , R >> 4 );
225
+ VERIFY_BITS_128 ( & c , 113 );
224
226
/* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
225
- r [0 ] = c & M ; c >>= 52 ;
227
+ r [0 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
226
228
VERIFY_BITS (r [0 ], 52 );
227
- VERIFY_BITS ( c , 61 );
229
+ VERIFY_BITS_128 ( & c , 61 );
228
230
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
229
231
230
232
a0 *= 2 ;
231
- c += ( uint128_t ) a0 * a1 ;
232
- VERIFY_BITS ( c , 114 );
233
+ secp256k1_u128_accum_mul ( & c , a0 , a1 ) ;
234
+ VERIFY_BITS_128 ( & c , 114 );
233
235
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
234
- d += ( uint128_t ) a2 * a4
235
- + ( uint128_t ) a3 * a3 ;
236
- VERIFY_BITS ( d , 114 );
236
+ secp256k1_u128_accum_mul ( & d , a2 , a4 );
237
+ secp256k1_u128_accum_mul ( & d , a3 , a3 ) ;
238
+ VERIFY_BITS_128 ( & d , 114 );
237
239
/* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
238
- c += ( d & M ) * R ; d >>= 52 ;
239
- VERIFY_BITS ( c , 115 );
240
- VERIFY_BITS ( d , 62 );
240
+ secp256k1_u128_accum_mul ( & c , secp256k1_u128_to_u64 ( & d ) & M , R ); secp256k1_u128_rshift ( & d , 52 ) ;
241
+ VERIFY_BITS_128 ( & c , 115 );
242
+ VERIFY_BITS_128 ( & d , 62 );
241
243
/* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
242
- r [1 ] = c & M ; c >>= 52 ;
244
+ r [1 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
243
245
VERIFY_BITS (r [1 ], 52 );
244
- VERIFY_BITS ( c , 63 );
246
+ VERIFY_BITS_128 ( & c , 63 );
245
247
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
246
248
247
- c += ( uint128_t ) a0 * a2
248
- + ( uint128_t ) a1 * a1 ;
249
- VERIFY_BITS ( c , 114 );
249
+ secp256k1_u128_accum_mul ( & c , a0 , a2 );
250
+ secp256k1_u128_accum_mul ( & c , a1 , a1 ) ;
251
+ VERIFY_BITS_128 ( & c , 114 );
250
252
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
251
- d += ( uint128_t ) a3 * a4 ;
252
- VERIFY_BITS ( d , 114 );
253
+ secp256k1_u128_accum_mul ( & d , a3 , a4 ) ;
254
+ VERIFY_BITS_128 ( & d , 114 );
253
255
/* [d 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
254
- c += ( uint128_t ) R * ( uint64_t ) d ; d >>= 64 ;
255
- VERIFY_BITS ( c , 115 );
256
- VERIFY_BITS ( d , 50 );
256
+ secp256k1_u128_accum_mul ( & c , R , secp256k1_u128_to_u64 ( & d )); secp256k1_u128_rshift ( & d , 64 ) ;
257
+ VERIFY_BITS_128 ( & c , 115 );
258
+ VERIFY_BITS_128 ( & d , 50 );
257
259
/* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
258
- r [2 ] = c & M ; c >>= 52 ;
260
+ r [2 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
259
261
VERIFY_BITS (r [2 ], 52 );
260
- VERIFY_BITS ( c , 63 );
262
+ VERIFY_BITS_128 ( & c , 63 );
261
263
/* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
262
264
263
- c += (uint128_t )(R << 12 ) * (uint64_t )d + t3 ;
264
- VERIFY_BITS (c , 100 );
265
+ secp256k1_u128_accum_mul (& c , R << 12 , secp256k1_u128_to_u64 (& d ));
266
+ secp256k1_u128_accum_u64 (& c , t3 );
267
+ VERIFY_BITS_128 (& c , 100 );
265
268
/* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
266
- r [3 ] = c & M ; c >>= 52 ;
269
+ r [3 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
267
270
VERIFY_BITS (r [3 ], 52 );
268
- VERIFY_BITS ( c , 48 );
271
+ VERIFY_BITS_128 ( & c , 48 );
269
272
/* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
270
- c += t4 ;
271
- VERIFY_BITS (c , 49 );
272
- /* [c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
273
- r [4 ] = c ;
273
+ r [4 ] = secp256k1_u128_to_u64 (& c ) + t4 ;
274
274
VERIFY_BITS (r [4 ], 49 );
275
275
/* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
276
276
}
0 commit comments