11
11
12
12
#ifdef VERIFY
13
13
#define VERIFY_BITS (x , n ) VERIFY_CHECK(((x) >> (n)) == 0)
14
+ #define VERIFY_BITS_128 (x , n ) VERIFY_CHECK(secp256k1_u128_check_bits((x), (n)))
14
15
#else
15
16
#define VERIFY_BITS (x , n ) do { } while(0)
17
+ #define VERIFY_BITS_128 (x , n ) do { } while(0)
16
18
#endif
17
19
18
20
SECP256K1_INLINE static void secp256k1_fe_mul_inner (uint64_t * r , const uint64_t * a , const uint64_t * SECP256K1_RESTRICT b ) {
19
- uint128_t c , d ;
21
+ secp256k1_uint128 c , d ;
20
22
uint64_t t3 , t4 , tx , u0 ;
21
23
uint64_t a0 = a [0 ], a1 = a [1 ], a2 = a [2 ], a3 = a [3 ], a4 = a [4 ];
22
24
const uint64_t M = 0xFFFFFFFFFFFFFULL , R = 0x1000003D10ULL ;
@@ -40,121 +42,119 @@ SECP256K1_INLINE static void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t
40
42
* Note that [x 0 0 0 0 0] = [x*R].
41
43
*/
42
44
43
- d = ( uint128_t ) a0 * b [3 ]
44
- + ( uint128_t ) a1 * b [2 ]
45
- + ( uint128_t ) a2 * b [1 ]
46
- + ( uint128_t ) a3 * b [0 ];
47
- VERIFY_BITS ( d , 114 );
45
+ secp256k1_u128_mul ( & d , a0 , b [3 ]);
46
+ secp256k1_u128_accum_mul ( & d , a1 , b [2 ]);
47
+ secp256k1_u128_accum_mul ( & d , a2 , b [1 ]);
48
+ secp256k1_u128_accum_mul ( & d , a3 , b [0 ]) ;
49
+ VERIFY_BITS_128 ( & d , 114 );
48
50
/* [d 0 0 0] = [p3 0 0 0] */
49
- c = ( uint128_t ) a4 * b [4 ];
50
- VERIFY_BITS ( c , 112 );
51
+ secp256k1_u128_mul ( & c , a4 , b [4 ]) ;
52
+ VERIFY_BITS_128 ( & c , 112 );
51
53
/* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
52
- d += ( uint128_t ) R * ( uint64_t ) c ; c >>= 64 ;
53
- VERIFY_BITS ( d , 115 );
54
- VERIFY_BITS ( c , 48 );
54
+ secp256k1_u128_accum_mul ( & d , R , secp256k1_u128_to_u64 ( & c )); secp256k1_u128_rshift ( & c , 64 ) ;
55
+ VERIFY_BITS_128 ( & d , 115 );
56
+ VERIFY_BITS_128 ( & c , 48 );
55
57
/* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
56
- t3 = d & M ; d >>= 52 ;
58
+ t3 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
57
59
VERIFY_BITS (t3 , 52 );
58
- VERIFY_BITS ( d , 63 );
60
+ VERIFY_BITS_128 ( & d , 63 );
59
61
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
60
62
61
- d += ( uint128_t ) a0 * b [4 ]
62
- + ( uint128_t ) a1 * b [3 ]
63
- + ( uint128_t ) a2 * b [2 ]
64
- + ( uint128_t ) a3 * b [1 ]
65
- + ( uint128_t ) a4 * b [0 ];
66
- VERIFY_BITS ( d , 115 );
63
+ secp256k1_u128_accum_mul ( & d , a0 , b [4 ]);
64
+ secp256k1_u128_accum_mul ( & d , a1 , b [3 ]);
65
+ secp256k1_u128_accum_mul ( & d , a2 , b [2 ]);
66
+ secp256k1_u128_accum_mul ( & d , a3 , b [1 ]);
67
+ secp256k1_u128_accum_mul ( & d , a4 , b [0 ]) ;
68
+ VERIFY_BITS_128 ( & d , 115 );
67
69
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
68
- d += ( uint128_t )( R << 12 ) * ( uint64_t ) c ;
69
- VERIFY_BITS ( d , 116 );
70
+ secp256k1_u128_accum_mul ( & d , R << 12 , secp256k1_u128_to_u64 ( & c )) ;
71
+ VERIFY_BITS_128 ( & d , 116 );
70
72
/* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
71
- t4 = d & M ; d >>= 52 ;
73
+ t4 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
72
74
VERIFY_BITS (t4 , 52 );
73
- VERIFY_BITS ( d , 64 );
75
+ VERIFY_BITS_128 ( & d , 64 );
74
76
/* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
75
77
tx = (t4 >> 48 ); t4 &= (M >> 4 );
76
78
VERIFY_BITS (tx , 4 );
77
79
VERIFY_BITS (t4 , 48 );
78
80
/* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
79
81
80
- c = ( uint128_t ) a0 * b [0 ];
81
- VERIFY_BITS ( c , 112 );
82
+ secp256k1_u128_mul ( & c , a0 , b [0 ]) ;
83
+ VERIFY_BITS_128 ( & c , 112 );
82
84
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
83
- d += ( uint128_t ) a1 * b [4 ]
84
- + ( uint128_t ) a2 * b [3 ]
85
- + ( uint128_t ) a3 * b [2 ]
86
- + ( uint128_t ) a4 * b [1 ];
87
- VERIFY_BITS ( d , 115 );
85
+ secp256k1_u128_accum_mul ( & d , a1 , b [4 ]);
86
+ secp256k1_u128_accum_mul ( & d , a2 , b [3 ]);
87
+ secp256k1_u128_accum_mul ( & d , a3 , b [2 ]);
88
+ secp256k1_u128_accum_mul ( & d , a4 , b [1 ]) ;
89
+ VERIFY_BITS_128 ( & d , 115 );
88
90
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
89
- u0 = d & M ; d >>= 52 ;
91
+ u0 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
90
92
VERIFY_BITS (u0 , 52 );
91
- VERIFY_BITS ( d , 63 );
93
+ VERIFY_BITS_128 ( & d , 63 );
92
94
/* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
93
95
/* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
94
96
u0 = (u0 << 4 ) | tx ;
95
97
VERIFY_BITS (u0 , 56 );
96
98
/* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
97
- c += ( uint128_t ) u0 * ( R >> 4 );
98
- VERIFY_BITS ( c , 115 );
99
+ secp256k1_u128_accum_mul ( & c , u0 , R >> 4 );
100
+ VERIFY_BITS_128 ( & c , 115 );
99
101
/* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
100
- r [0 ] = c & M ; c >>= 52 ;
102
+ r [0 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
101
103
VERIFY_BITS (r [0 ], 52 );
102
- VERIFY_BITS ( c , 61 );
104
+ VERIFY_BITS_128 ( & c , 61 );
103
105
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
104
106
105
- c += ( uint128_t ) a0 * b [1 ]
106
- + ( uint128_t ) a1 * b [0 ];
107
- VERIFY_BITS ( c , 114 );
107
+ secp256k1_u128_accum_mul ( & c , a0 , b [1 ]);
108
+ secp256k1_u128_accum_mul ( & c , a1 , b [0 ]) ;
109
+ VERIFY_BITS_128 ( & c , 114 );
108
110
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
109
- d += ( uint128_t ) a2 * b [4 ]
110
- + ( uint128_t ) a3 * b [3 ]
111
- + ( uint128_t ) a4 * b [2 ];
112
- VERIFY_BITS ( d , 114 );
111
+ secp256k1_u128_accum_mul ( & d , a2 , b [4 ]);
112
+ secp256k1_u128_accum_mul ( & d , a3 , b [3 ]);
113
+ secp256k1_u128_accum_mul ( & d , a4 , b [2 ]) ;
114
+ VERIFY_BITS_128 ( & d , 114 );
113
115
/* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
114
- c += ( d & M ) * R ; d >>= 52 ;
115
- VERIFY_BITS ( c , 115 );
116
- VERIFY_BITS ( d , 62 );
116
+ secp256k1_u128_accum_mul ( & c , secp256k1_u128_to_u64 ( & d ) & M , R ); secp256k1_u128_rshift ( & d , 52 ) ;
117
+ VERIFY_BITS_128 ( & c , 115 );
118
+ VERIFY_BITS_128 ( & d , 62 );
117
119
/* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
118
- r [1 ] = c & M ; c >>= 52 ;
120
+ r [1 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
119
121
VERIFY_BITS (r [1 ], 52 );
120
- VERIFY_BITS ( c , 63 );
122
+ VERIFY_BITS_128 ( & c , 63 );
121
123
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
122
124
123
- c += ( uint128_t ) a0 * b [2 ]
124
- + ( uint128_t ) a1 * b [1 ]
125
- + ( uint128_t ) a2 * b [0 ];
126
- VERIFY_BITS ( c , 114 );
125
+ secp256k1_u128_accum_mul ( & c , a0 , b [2 ]);
126
+ secp256k1_u128_accum_mul ( & c , a1 , b [1 ]);
127
+ secp256k1_u128_accum_mul ( & c , a2 , b [0 ]) ;
128
+ VERIFY_BITS_128 ( & c , 114 );
127
129
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
128
- d += ( uint128_t ) a3 * b [4 ]
129
- + ( uint128_t ) a4 * b [3 ];
130
- VERIFY_BITS ( d , 114 );
130
+ secp256k1_u128_accum_mul ( & d , a3 , b [4 ]);
131
+ secp256k1_u128_accum_mul ( & d , a4 , b [3 ]) ;
132
+ VERIFY_BITS_128 ( & d , 114 );
131
133
/* [d 0 0 t4 t3 c t1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
132
- c += ( uint128_t ) R * ( uint64_t ) d ; d >>= 64 ;
133
- VERIFY_BITS ( c , 115 );
134
- VERIFY_BITS ( d , 50 );
134
+ secp256k1_u128_accum_mul ( & c , R , secp256k1_u128_to_u64 ( & d )); secp256k1_u128_rshift ( & d , 64 ) ;
135
+ VERIFY_BITS_128 ( & c , 115 );
136
+ VERIFY_BITS_128 ( & d , 50 );
135
137
/* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
136
138
137
- r [2 ] = c & M ; c >>= 52 ;
139
+ r [2 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
138
140
VERIFY_BITS (r [2 ], 52 );
139
- VERIFY_BITS ( c , 63 );
141
+ VERIFY_BITS_128 ( & c , 63 );
140
142
/* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
141
- c += (uint128_t )(R << 12 ) * (uint64_t )d + t3 ;
142
- VERIFY_BITS (c , 100 );
143
+ secp256k1_u128_accum_mul (& c , R << 12 , secp256k1_u128_to_u64 (& d ));
144
+ secp256k1_u128_accum_u64 (& c , t3 );
145
+ VERIFY_BITS_128 (c , 100 );
143
146
/* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
144
- r [3 ] = c & M ; c >>= 52 ;
147
+ r [3 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
145
148
VERIFY_BITS (r [3 ], 52 );
146
- VERIFY_BITS ( c , 48 );
149
+ VERIFY_BITS_128 ( & c , 48 );
147
150
/* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
148
- c += t4 ;
149
- VERIFY_BITS (c , 49 );
150
- /* [c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
151
- r [4 ] = c ;
151
+ r [4 ] = secp256k1_u128_to_u64 (& c ) + t4 ;
152
152
VERIFY_BITS (r [4 ], 49 );
153
153
/* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
154
154
}
155
155
156
156
SECP256K1_INLINE static void secp256k1_fe_sqr_inner (uint64_t * r , const uint64_t * a ) {
157
- uint128_t c , d ;
157
+ secp256k1_uint128 c , d ;
158
158
uint64_t a0 = a [0 ], a1 = a [1 ], a2 = a [2 ], a3 = a [3 ], a4 = a [4 ];
159
159
int64_t t3 , t4 , tx , u0 ;
160
160
const uint64_t M = 0xFFFFFFFFFFFFFULL , R = 0x1000003D10ULL ;
@@ -170,107 +170,105 @@ SECP256K1_INLINE static void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t
170
170
* Note that [x 0 0 0 0 0] = [x*R].
171
171
*/
172
172
173
- d = ( uint128_t )( a0 * 2 ) * a3
174
- + ( uint128_t )( a1 * 2 ) * a2 ;
175
- VERIFY_BITS ( d , 114 );
173
+ secp256k1_u128_mul ( & d , a0 * 2 , a3 );
174
+ secp256k1_u128_accum_mul ( & d , a1 * 2 , a2 ) ;
175
+ VERIFY_BITS_128 ( & d , 114 );
176
176
/* [d 0 0 0] = [p3 0 0 0] */
177
- c = ( uint128_t ) a4 * a4 ;
178
- VERIFY_BITS ( c , 112 );
177
+ secp256k1_u128_mul ( & c , a4 , a4 ) ;
178
+ VERIFY_BITS_128 ( & c , 112 );
179
179
/* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
180
- d += ( uint128_t ) R * ( uint64_t ) c ; c >>= 64 ;
181
- VERIFY_BITS ( d , 115 );
182
- VERIFY_BITS ( c , 48 );
180
+ secp256k1_u128_accum_mul ( & d , R , secp256k1_u128_to_u64 ( & c )); secp256k1_u128_rshift ( & c , 64 ) ;
181
+ VERIFY_BITS_128 ( & d , 115 );
182
+ VERIFY_BITS_128 ( & c , 48 );
183
183
/* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
184
- t3 = d & M ; d >>= 52 ;
184
+ t3 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
185
185
VERIFY_BITS (t3 , 52 );
186
- VERIFY_BITS ( d , 63 );
186
+ VERIFY_BITS_128 ( & d , 63 );
187
187
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
188
188
189
189
a4 *= 2 ;
190
- d += ( uint128_t ) a0 * a4
191
- + ( uint128_t )( a1 * 2 ) * a3
192
- + ( uint128_t ) a2 * a2 ;
193
- VERIFY_BITS ( d , 115 );
190
+ secp256k1_u128_accum_mul ( & d , a0 , a4 );
191
+ secp256k1_u128_accum_mul ( & d , a1 * 2 , a3 );
192
+ secp256k1_u128_accum_mul ( & d , a2 , a2 ) ;
193
+ VERIFY_BITS_128 ( & d , 115 );
194
194
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
195
- d += ( uint128_t )( R << 12 ) * ( uint64_t ) c ;
196
- VERIFY_BITS ( d , 116 );
195
+ secp256k1_u128_accum_mul ( & d , R << 12 , secp256k1_u128_to_u64 ( & c )) ;
196
+ VERIFY_BITS_128 ( & d , 116 );
197
197
/* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
198
- t4 = d & M ; d >>= 52 ;
198
+ t4 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
199
199
VERIFY_BITS (t4 , 52 );
200
- VERIFY_BITS ( d , 64 );
200
+ VERIFY_BITS_128 ( & d , 64 );
201
201
/* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
202
202
tx = (t4 >> 48 ); t4 &= (M >> 4 );
203
203
VERIFY_BITS (tx , 4 );
204
204
VERIFY_BITS (t4 , 48 );
205
205
/* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
206
206
207
- c = ( uint128_t ) a0 * a0 ;
208
- VERIFY_BITS ( c , 112 );
207
+ secp256k1_u128_mul ( & c , a0 , a0 ) ;
208
+ VERIFY_BITS_128 ( & c , 112 );
209
209
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
210
- d += ( uint128_t ) a1 * a4
211
- + ( uint128_t )( a2 * 2 ) * a3 ;
212
- VERIFY_BITS ( d , 114 );
210
+ secp256k1_u128_accum_mul ( & d , a1 , a4 );
211
+ secp256k1_u128_accum_mul ( & d , a2 * 2 , a3 ) ;
212
+ VERIFY_BITS_128 ( & d , 114 );
213
213
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
214
- u0 = d & M ; d >>= 52 ;
214
+ u0 = secp256k1_u128_to_u64 ( & d ) & M ; secp256k1_u128_rshift ( & d , 52 ) ;
215
215
VERIFY_BITS (u0 , 52 );
216
- VERIFY_BITS ( d , 62 );
216
+ VERIFY_BITS_128 ( & d , 62 );
217
217
/* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
218
218
/* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
219
219
u0 = (u0 << 4 ) | tx ;
220
220
VERIFY_BITS (u0 , 56 );
221
221
/* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
222
- c += ( uint128_t ) u0 * ( R >> 4 );
223
- VERIFY_BITS ( c , 113 );
222
+ secp256k1_u128_accum_mul ( & c , u0 , R >> 4 );
223
+ VERIFY_BITS_128 ( & c , 113 );
224
224
/* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
225
- r [0 ] = c & M ; c >>= 52 ;
225
+ r [0 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
226
226
VERIFY_BITS (r [0 ], 52 );
227
- VERIFY_BITS ( c , 61 );
227
+ VERIFY_BITS_128 ( & c , 61 );
228
228
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
229
229
230
230
a0 *= 2 ;
231
- c += ( uint128_t ) a0 * a1 ;
232
- VERIFY_BITS ( c , 114 );
231
+ secp256k1_u128_accum_mul ( & c , a0 , a1 ) ;
232
+ VERIFY_BITS_128 ( & c , 114 );
233
233
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
234
- d += ( uint128_t ) a2 * a4
235
- + ( uint128_t ) a3 * a3 ;
236
- VERIFY_BITS ( d , 114 );
234
+ secp256k1_u128_accum_mul ( & d , a2 , a4 );
235
+ secp256k1_u128_accum_mul ( & d , a3 , a3 ) ;
236
+ VERIFY_BITS_128 ( & d , 114 );
237
237
/* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
238
- c += ( d & M ) * R ; d >>= 52 ;
239
- VERIFY_BITS ( c , 115 );
240
- VERIFY_BITS ( d , 62 );
238
+ secp256k1_u128_accum_mul ( & c , secp256k1_u128_to_u64 ( & d ) & M , R ); secp256k1_u128_rshift ( & d , 52 ) ;
239
+ VERIFY_BITS_128 ( & c , 115 );
240
+ VERIFY_BITS_128 ( & d , 62 );
241
241
/* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
242
- r [1 ] = c & M ; c >>= 52 ;
242
+ r [1 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
243
243
VERIFY_BITS (r [1 ], 52 );
244
- VERIFY_BITS ( c , 63 );
244
+ VERIFY_BITS_128 ( & c , 63 );
245
245
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
246
246
247
- c += ( uint128_t ) a0 * a2
248
- + ( uint128_t ) a1 * a1 ;
249
- VERIFY_BITS ( c , 114 );
247
+ secp256k1_u128_accum_mul ( & c , a0 , a2 );
248
+ secp256k1_u128_accum_mul ( & c , a1 , a1 ) ;
249
+ VERIFY_BITS_128 ( & c , 114 );
250
250
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
251
- d += ( uint128_t ) a3 * a4 ;
252
- VERIFY_BITS ( d , 114 );
251
+ secp256k1_u128_accum_mul ( & d , a3 , a4 ) ;
252
+ VERIFY_BITS_128 ( & d , 114 );
253
253
/* [d 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
254
- c += ( uint128_t ) R * ( uint64_t ) d ; d >>= 64 ;
255
- VERIFY_BITS ( c , 115 );
256
- VERIFY_BITS ( d , 50 );
254
+ secp256k1_u128_accum_mul ( & c , R , secp256k1_u128_to_u64 ( & d )); secp256k1_u128_rshift ( & d , 64 ) ;
255
+ VERIFY_BITS_128 ( & c , 115 );
256
+ VERIFY_BITS_128 ( & d , 50 );
257
257
/* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
258
- r [2 ] = c & M ; c >>= 52 ;
258
+ r [2 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
259
259
VERIFY_BITS (r [2 ], 52 );
260
- VERIFY_BITS ( c , 63 );
260
+ VERIFY_BITS_128 ( & c , 63 );
261
261
/* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
262
262
263
- c += (uint128_t )(R << 12 ) * (uint64_t )d + t3 ;
264
- VERIFY_BITS (c , 100 );
263
+ secp256k1_u128_accum_mul (& c , R << 12 , secp256k1_u128_to_u64 (& d ));
264
+ secp256k1_u128_accum_u64 (& c , t3 );
265
+ VERIFY_BITS_128 (& c , 100 );
265
266
/* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
266
- r [3 ] = c & M ; c >>= 52 ;
267
+ r [3 ] = secp256k1_u128_to_u64 ( & c ) & M ; secp256k1_u128_rshift ( & c , 52 ) ;
267
268
VERIFY_BITS (r [3 ], 52 );
268
- VERIFY_BITS ( c , 48 );
269
+ VERIFY_BITS_128 ( & c , 48 );
269
270
/* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
270
- c += t4 ;
271
- VERIFY_BITS (c , 49 );
272
- /* [c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
273
- r [4 ] = c ;
271
+ r [4 ] = secp256k1_u128_to_u64 (& c ) + t4 ;
274
272
VERIFY_BITS (r [4 ], 49 );
275
273
/* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
276
274
}
0 commit comments