11
11
12
12
#ifdef VERIFY
13
13
#define VERIFY_BITS (x , n ) VERIFY_CHECK(((x) >> (n)) == 0)
14
+ #define VERIFY_BITS_128 (x , n ) VERIFY_CHECK(secp256k1_u128_check_bits((x), (n)))
14
15
#else
15
16
#define VERIFY_BITS (x , n ) do { } while(0)
17
+ #define VERIFY_BITS_128 (x , n ) do { } while(0)
16
18
#endif
17
19
18
20
SECP256K1_INLINE static void secp256k1_fe_mul_inner (uint64_t * r , const uint64_t * a , const uint64_t * SECP256K1_RESTRICT b ) {
19
- uint128_t c , d ;
21
+ secp256k1_uint128 c , d ;
20
22
uint64_t t3 , t4 , tx , u0 ;
21
23
uint64_t a0 = a [0 ], a1 = a [1 ], a2 = a [2 ], a3 = a [3 ], a4 = a [4 ];
22
24
const uint64_t M = 0xFFFFFFFFFFFFFULL , R = 0x1000003D10ULL ;
@@ -40,122 +42,120 @@ SECP256K1_INLINE static void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t
40
42
* Note that [x 0 0 0 0 0] = [x*R].
41
43
*/
42
44
43
- d = ( uint128_t ) a0 * b [3 ]
44
- + ( uint128_t ) a1 * b [2 ]
45
- + ( uint128_t ) a2 * b [1 ]
46
- + ( uint128_t ) a3 * b [0 ];
47
- VERIFY_BITS ( d , 114 );
45
+ secp256k1_u128_mul ( & d , a0 , b [3 ]);
46
+ secp256k1_u128_accum_mul ( & d , a1 , b [2 ]);
47
+ secp256k1_u128_accum_mul ( & d , a2 , b [1 ]);
48
+ secp256k1_u128_accum_mul ( & d , a3 , b [0 ]) ;
49
+ VERIFY_BITS_128 ( & d , 114 );
48
50
/* [d 0 0 0] = [p3 0 0 0] */
49
- c = ( uint128_t ) a4 * b [4 ];
50
- VERIFY_BITS ( c , 112 );
51
+ secp256k1_u128_mul ( & c , a4 , b [4 ]) ;
52
+ VERIFY_BITS_128 ( & c , 112 );
51
53
/* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
52
- d += ( c & M ) * R ; c >>= 52 ;
53
- VERIFY_BITS ( d , 115 );
54
- VERIFY_BITS ( c , 60 );
54
+ secp256k1_u128_accum_mul ( & d , secp256k1_u128_to_u64 ( & c ) & M , R ); secp256k1_u128_rshift ( & c , 52 ) ;
55
+ VERIFY_BITS_128 ( & d , 115 );
56
+ VERIFY_BITS_128 ( & c , 60 );
55
57
/* [c 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
56
- t3 = d & M ; d >>= 52 ;
58
+ t3 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
57
59
VERIFY_BITS (t3 , 52 );
58
- VERIFY_BITS ( d , 63 );
60
+ VERIFY_BITS_128 ( & d , 63 );
59
61
/* [c 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
60
62
61
- d += ( uint128_t ) a0 * b [4 ]
62
- + ( uint128_t ) a1 * b [3 ]
63
- + ( uint128_t ) a2 * b [2 ]
64
- + ( uint128_t ) a3 * b [1 ]
65
- + ( uint128_t ) a4 * b [0 ];
66
- VERIFY_BITS ( d , 115 );
63
+ secp256k1_u128_accum_mul ( & d , a0 , b [4 ]);
64
+ secp256k1_u128_accum_mul ( & d , a1 , b [3 ]);
65
+ secp256k1_u128_accum_mul ( & d , a2 , b [2 ]);
66
+ secp256k1_u128_accum_mul ( & d , a3 , b [1 ]);
67
+ secp256k1_u128_accum_mul ( & d , a4 , b [0 ]) ;
68
+ VERIFY_BITS_128 ( & d , 115 );
67
69
/* [c 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
68
- d += c * R ;
69
- VERIFY_BITS ( d , 116 );
70
+ secp256k1_u128_accum_mul ( & d , secp256k1_u128_to_u64 ( & c ), R ) ;
71
+ VERIFY_BITS_128 ( & d , 116 );
70
72
/* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
71
- t4 = d & M ; d >>= 52 ;
73
+ t4 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
72
74
VERIFY_BITS (t4 , 52 );
73
- VERIFY_BITS ( d , 64 );
75
+ VERIFY_BITS_128 ( & d , 64 );
74
76
/* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
75
77
tx = (t4 >> 48 ); t4 &= (M >> 4 );
76
78
VERIFY_BITS (tx , 4 );
77
79
VERIFY_BITS (t4 , 48 );
78
80
/* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
79
81
80
- c = ( uint128_t ) a0 * b [0 ];
81
- VERIFY_BITS ( c , 112 );
82
+ secp256k1_u128_mul ( & c , a0 , b [0 ]) ;
83
+ VERIFY_BITS_128 ( & c , 112 );
82
84
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
83
- d += ( uint128_t ) a1 * b [4 ]
84
- + ( uint128_t ) a2 * b [3 ]
85
- + ( uint128_t ) a3 * b [2 ]
86
- + ( uint128_t ) a4 * b [1 ];
87
- VERIFY_BITS ( d , 115 );
85
+ secp256k1_u128_accum_mul ( & d , a1 , b [4 ]);
86
+ secp256k1_u128_accum_mul ( & d , a2 , b [3 ]);
87
+ secp256k1_u128_accum_mul ( & d , a3 , b [2 ]);
88
+ secp256k1_u128_accum_mul ( & d , a4 , b [1 ]) ;
89
+ VERIFY_BITS_128 ( & d , 115 );
88
90
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
89
- u0 = d & M ; d >>= 52 ;
91
+ u0 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
90
92
VERIFY_BITS (u0 , 52 );
91
- VERIFY_BITS ( d , 63 );
93
+ VERIFY_BITS_128 ( & d , 63 );
92
94
/* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
93
95
/* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
94
96
u0 = (u0 << 4 ) | tx ;
95
97
VERIFY_BITS (u0 , 56 );
96
98
/* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
97
- c += ( uint128_t ) u0 * ( R >> 4 );
98
- VERIFY_BITS ( c , 115 );
99
+ secp256k1_u128_accum_mul ( & c , u0 , R >> 4 );
100
+ VERIFY_BITS_128 ( & c , 115 );
99
101
/* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
100
- r [0 ] = c & M ; c >>= 52 ;
102
+ r [0 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
101
103
VERIFY_BITS (r [0 ], 52 );
102
- VERIFY_BITS ( c , 61 );
104
+ VERIFY_BITS_128 ( & c , 61 );
103
105
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
104
106
105
- c += ( uint128_t ) a0 * b [1 ]
106
- + ( uint128_t ) a1 * b [0 ];
107
- VERIFY_BITS ( c , 114 );
107
+ secp256k1_u128_accum_mul ( & c , a0 , b [1 ]);
108
+ secp256k1_u128_accum_mul ( & c , a1 , b [0 ]) ;
109
+ VERIFY_BITS_128 ( & c , 114 );
108
110
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
109
- d += ( uint128_t ) a2 * b [4 ]
110
- + ( uint128_t ) a3 * b [3 ]
111
- + ( uint128_t ) a4 * b [2 ];
112
- VERIFY_BITS ( d , 114 );
111
+ secp256k1_u128_accum_mul ( & d , a2 , b [4 ]);
112
+ secp256k1_u128_accum_mul ( & d , a3 , b [3 ]);
113
+ secp256k1_u128_accum_mul ( & d , a4 , b [2 ]) ;
114
+ VERIFY_BITS_128 ( & d , 114 );
113
115
/* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
114
- c += ( d & M ) * R ; d >>= 52 ;
115
- VERIFY_BITS ( c , 115 );
116
- VERIFY_BITS ( d , 62 );
116
+ secp256k1_u128_accum_mul ( & c , secp256k1_u128_to_u64 ( & d ) & M , R ); secp256k1_u128_rshift ( & d , 52 ) ;
117
+ VERIFY_BITS_128 ( & c , 115 );
118
+ VERIFY_BITS_128 ( & d , 62 );
117
119
/* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
118
- r [1 ] = c & M ; c >>= 52 ;
120
+ r [1 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
119
121
VERIFY_BITS (r [1 ], 52 );
120
- VERIFY_BITS ( c , 63 );
122
+ VERIFY_BITS_128 ( & c , 63 );
121
123
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
122
124
123
- c += ( uint128_t ) a0 * b [2 ]
124
- + ( uint128_t ) a1 * b [1 ]
125
- + ( uint128_t ) a2 * b [0 ];
126
- VERIFY_BITS ( c , 114 );
125
+ secp256k1_u128_accum_mul ( & c , a0 , b [2 ]);
126
+ secp256k1_u128_accum_mul ( & c , a1 , b [1 ]);
127
+ secp256k1_u128_accum_mul ( & c , a2 , b [0 ]) ;
128
+ VERIFY_BITS_128 ( & c , 114 );
127
129
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
128
- d += ( uint128_t ) a3 * b [4 ]
129
- + ( uint128_t ) a4 * b [3 ];
130
- VERIFY_BITS ( d , 114 );
130
+ secp256k1_u128_accum_mul ( & d , a3 , b [4 ]);
131
+ secp256k1_u128_accum_mul ( & d , a4 , b [3 ]) ;
132
+ VERIFY_BITS_128 ( & d , 114 );
131
133
/* [d 0 0 t4 t3 c t1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
132
- c += ( d & M ) * R ; d >>= 52 ;
133
- VERIFY_BITS ( c , 115 );
134
- VERIFY_BITS ( d , 62 );
134
+ secp256k1_u128_accum_mul ( & c , secp256k1_u128_to_u64 ( & d ) & M , R ); secp256k1_u128_rshift ( & d , 52 ) ;
135
+ VERIFY_BITS_128 ( & c , 115 );
136
+ VERIFY_BITS_128 ( & d , 62 );
135
137
/* [d 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
136
138
137
139
/* [d 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
138
- r [2 ] = c & M ; c >>= 52 ;
140
+ r [2 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
139
141
VERIFY_BITS (r [2 ], 52 );
140
- VERIFY_BITS ( c , 63 );
142
+ VERIFY_BITS_128 ( & c , 63 );
141
143
/* [d 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
142
- c += d * R + t3 ;
143
- VERIFY_BITS (c , 100 );
144
+ secp256k1_u128_accum_mul (& c , secp256k1_u128_to_u64 (& d ), R );
145
+ secp256k1_u128_accum_u64 (& c , t3 );
146
+ VERIFY_BITS_128 (& c , 100 );
144
147
/* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
145
- r [3 ] = c & M ; c >>= 52 ;
148
+ r [3 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
146
149
VERIFY_BITS (r [3 ], 52 );
147
- VERIFY_BITS ( c , 48 );
150
+ VERIFY_BITS_128 ( & c , 48 );
148
151
/* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
149
- c += t4 ;
150
- VERIFY_BITS (c , 49 );
151
- /* [c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
152
- r [4 ] = c ;
152
+ r [4 ] = secp256k1_u128_to_u64 (& c ) + t4 ;
153
153
VERIFY_BITS (r [4 ], 49 );
154
154
/* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
155
155
}
156
156
157
157
SECP256K1_INLINE static void secp256k1_fe_sqr_inner (uint64_t * r , const uint64_t * a ) {
158
- uint128_t c , d ;
158
+ secp256k1_uint128 c , d ;
159
159
uint64_t a0 = a [0 ], a1 = a [1 ], a2 = a [2 ], a3 = a [3 ], a4 = a [4 ];
160
160
int64_t t3 , t4 , tx , u0 ;
161
161
const uint64_t M = 0xFFFFFFFFFFFFFULL , R = 0x1000003D10ULL ;
@@ -171,107 +171,105 @@ SECP256K1_INLINE static void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t
171
171
* Note that [x 0 0 0 0 0] = [x*R].
172
172
*/
173
173
174
- d = ( uint128_t )( a0 * 2 ) * a3
175
- + ( uint128_t )( a1 * 2 ) * a2 ;
176
- VERIFY_BITS ( d , 114 );
174
+ secp256k1_u128_mul ( & d , a0 * 2 , a3 );
175
+ secp256k1_u128_accum_mul ( & d , a1 * 2 , a2 ) ;
176
+ VERIFY_BITS_128 ( & d , 114 );
177
177
/* [d 0 0 0] = [p3 0 0 0] */
178
- c = ( uint128_t ) a4 * a4 ;
179
- VERIFY_BITS ( c , 112 );
178
+ secp256k1_u128_mul ( & c , a4 , a4 ) ;
179
+ VERIFY_BITS_128 ( & c , 112 );
180
180
/* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
181
- d += ( c & M ) * R ; c >>= 52 ;
182
- VERIFY_BITS ( d , 115 );
183
- VERIFY_BITS ( c , 60 );
181
+ secp256k1_u128_accum_mul ( & d , secp256k1_u128_to_u64 ( & c ) & M , R ); secp256k1_u128_rshift ( & c , 52 ) ;
182
+ VERIFY_BITS_128 ( & d , 115 );
183
+ VERIFY_BITS_128 ( & c , 60 );
184
184
/* [c 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
185
- t3 = d & M ; d >>= 52 ;
185
+ t3 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
186
186
VERIFY_BITS (t3 , 52 );
187
- VERIFY_BITS ( d , 63 );
187
+ VERIFY_BITS_128 ( & d , 63 );
188
188
/* [c 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
189
189
190
190
a4 *= 2 ;
191
- d += ( uint128_t ) a0 * a4
192
- + ( uint128_t )( a1 * 2 ) * a3
193
- + ( uint128_t ) a2 * a2 ;
194
- VERIFY_BITS ( d , 115 );
191
+ secp256k1_u128_accum_mul ( & d , a0 , a4 );
192
+ secp256k1_u128_accum_mul ( & d , a1 * 2 , a3 );
193
+ secp256k1_u128_accum_mul ( & d , a2 , a2 ) ;
194
+ VERIFY_BITS_128 ( & d , 115 );
195
195
/* [c 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
196
- d += c * R ;
197
- VERIFY_BITS ( d , 116 );
196
+ secp256k1_u128_accum_mul ( & d , secp256k1_u128_to_u64 ( & c ), R ) ;
197
+ VERIFY_BITS_128 ( & d , 116 );
198
198
/* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
199
- t4 = d & M ; d >>= 52 ;
199
+ t4 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
200
200
VERIFY_BITS (t4 , 52 );
201
- VERIFY_BITS ( d , 64 );
201
+ VERIFY_BITS_128 ( & d , 64 );
202
202
/* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
203
203
tx = (t4 >> 48 ); t4 &= (M >> 4 );
204
204
VERIFY_BITS (tx , 4 );
205
205
VERIFY_BITS (t4 , 48 );
206
206
/* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
207
207
208
- c = ( uint128_t ) a0 * a0 ;
209
- VERIFY_BITS ( c , 112 );
208
+ secp256k1_u128_mul ( & c , a0 , a0 ) ;
209
+ VERIFY_BITS_128 ( & c , 112 );
210
210
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
211
- d += ( uint128_t ) a1 * a4
212
- + ( uint128_t )( a2 * 2 ) * a3 ;
213
- VERIFY_BITS ( d , 114 );
211
+ secp256k1_u128_accum_mul ( & d , a1 , a4 );
212
+ secp256k1_u128_accum_mul ( & d , a2 * 2 , a3 ) ;
213
+ VERIFY_BITS_128 ( & d , 114 );
214
214
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
215
- u0 = d & M ; d >>= 52 ;
215
+ u0 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
216
216
VERIFY_BITS (u0 , 52 );
217
- VERIFY_BITS ( d , 62 );
217
+ VERIFY_BITS_128 ( & d , 62 );
218
218
/* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
219
219
/* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
220
220
u0 = (u0 << 4 ) | tx ;
221
221
VERIFY_BITS (u0 , 56 );
222
222
/* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
223
- c += ( uint128_t ) u0 * ( R >> 4 );
224
- VERIFY_BITS ( c , 113 );
223
+ secp256k1_u128_accum_mul ( & c , u0 , R >> 4 );
224
+ VERIFY_BITS_128 ( & c , 113 );
225
225
/* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
226
- r [0 ] = c & M ; c >>= 52 ;
226
+ r [0 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
227
227
VERIFY_BITS (r [0 ], 52 );
228
- VERIFY_BITS ( c , 61 );
228
+ VERIFY_BITS_128 ( & c , 61 );
229
229
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
230
230
231
231
a0 *= 2 ;
232
- c += ( uint128_t ) a0 * a1 ;
233
- VERIFY_BITS ( c , 114 );
232
+ secp256k1_u128_accum_mul ( & c , a0 , a1 ) ;
233
+ VERIFY_BITS_128 ( & c , 114 );
234
234
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
235
- d += ( uint128_t ) a2 * a4
236
- + ( uint128_t ) a3 * a3 ;
237
- VERIFY_BITS ( d , 114 );
235
+ secp256k1_u128_accum_mul ( & d , a2 , a4 );
236
+ secp256k1_u128_accum_mul ( & d , a3 , a3 ) ;
237
+ VERIFY_BITS_128 ( & d , 114 );
238
238
/* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
239
- c += ( d & M ) * R ; d >>= 52 ;
240
- VERIFY_BITS ( c , 115 );
241
- VERIFY_BITS ( d , 62 );
239
+ secp256k1_u128_accum_mul ( & c , secp256k1_u128_to_u64 ( & d ) & M , R ); secp256k1_u128_rshift ( & d , 52 ) ;
240
+ VERIFY_BITS_128 ( & c , 115 );
241
+ VERIFY_BITS_128 ( & d , 62 );
242
242
/* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
243
- r [1 ] = c & M ; c >>= 52 ;
243
+ r [1 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
244
244
VERIFY_BITS (r [1 ], 52 );
245
- VERIFY_BITS ( c , 63 );
245
+ VERIFY_BITS_128 ( & c , 63 );
246
246
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
247
247
248
- c += ( uint128_t ) a0 * a2
249
- + ( uint128_t ) a1 * a1 ;
250
- VERIFY_BITS ( c , 114 );
248
+ secp256k1_u128_accum_mul ( & c , a0 , a2 );
249
+ secp256k1_u128_accum_mul ( & c , a1 , a1 ) ;
250
+ VERIFY_BITS_128 ( & c , 114 );
251
251
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
252
- d += ( uint128_t ) a3 * a4 ;
253
- VERIFY_BITS ( d , 114 );
252
+ secp256k1_u128_accum_mul ( & d , a3 , a4 ) ;
253
+ VERIFY_BITS_128 ( & d , 114 );
254
254
/* [d 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
255
- c += ( d & M ) * R ; d >>= 52 ;
256
- VERIFY_BITS ( c , 115 );
257
- VERIFY_BITS ( d , 62 );
255
+ secp256k1_u128_accum_mul ( & c , secp256k1_u128_to_u64 ( & d ) & M , R ); secp256k1_u128_rshift ( & d , 52 ) ;
256
+ VERIFY_BITS_128 ( & c , 115 );
257
+ VERIFY_BITS_128 ( & d , 62 );
258
258
/* [d 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
259
- r [2 ] = c & M ; c >>= 52 ;
259
+ r [2 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
260
260
VERIFY_BITS (r [2 ], 52 );
261
- VERIFY_BITS ( c , 63 );
261
+ VERIFY_BITS_128 ( & c , 63 );
262
262
/* [d 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
263
263
264
- c += d * R + t3 ;
265
- VERIFY_BITS (c , 100 );
264
+ secp256k1_u128_accum_mul (& c , secp256k1_u128_to_u64 (& d ), R );
265
+ secp256k1_u128_accum_u64 (& c , t3 );
266
+ VERIFY_BITS_128 (& c , 100 );
266
267
/* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
267
- r [3 ] = c & M ; c >>= 52 ;
268
+ r [3 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
268
269
VERIFY_BITS (r [3 ], 52 );
269
- VERIFY_BITS ( c , 48 );
270
+ VERIFY_BITS_128 ( & c , 48 );
270
271
/* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
271
- c += t4 ;
272
- VERIFY_BITS (c , 49 );
273
- /* [c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
274
- r [4 ] = c ;
272
+ r [4 ] = secp256k1_u128_to_u64 (& c ) + t4 ;
275
273
VERIFY_BITS (r [4 ], 49 );
276
274
/* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
277
275
}
0 commit comments