Skip to content

Commit 73f3984

Browse files
WIP: Simulated int128 type.
1 parent ea5e8a9 commit 73f3984

5 files changed

+562
-279
lines changed

Makefile.am

+1
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ noinst_HEADERS += src/modinv64.h
4040
noinst_HEADERS += src/modinv64_impl.h
4141
noinst_HEADERS += src/assumptions.h
4242
noinst_HEADERS += src/util.h
43+
noinst_HEADERS += src/int128.h
4344
noinst_HEADERS += src/scratch.h
4445
noinst_HEADERS += src/scratch_impl.h
4546
noinst_HEADERS += src/selftest.h

src/field_5x52_int128_impl.h

+124-126
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,14 @@
1111

1212
#ifdef VERIFY
1313
#define VERIFY_BITS(x, n) VERIFY_CHECK(((x) >> (n)) == 0)
14+
#define VERIFY_BITS_128(x, n) VERIFY_CHECK(secp256k1_u128_check_bits((x), (n)))
1415
#else
1516
#define VERIFY_BITS(x, n) do { } while(0)
17+
#define VERIFY_BITS_128(x, n) do { } while(0)
1618
#endif
1719

1820
SECP256K1_INLINE static void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) {
19-
uint128_t c, d;
21+
secp256k1_uint128 c, d;
2022
uint64_t t3, t4, tx, u0;
2123
uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
2224
const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
@@ -40,121 +42,119 @@ SECP256K1_INLINE static void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t
4042
* Note that [x 0 0 0 0 0] = [x*R].
4143
*/
4244

43-
d = (uint128_t)a0 * b[3]
44-
+ (uint128_t)a1 * b[2]
45-
+ (uint128_t)a2 * b[1]
46-
+ (uint128_t)a3 * b[0];
47-
VERIFY_BITS(d, 114);
45+
secp256k1_u128_mul(&d, a0, b[3]);
46+
secp256k1_u128_accum_mul(&d, a1, b[2]);
47+
secp256k1_u128_accum_mul(&d, a2, b[1]);
48+
secp256k1_u128_accum_mul(&d, a3, b[0]);
49+
VERIFY_BITS_128(&d, 114);
4850
/* [d 0 0 0] = [p3 0 0 0] */
49-
c = (uint128_t)a4 * b[4];
50-
VERIFY_BITS(c, 112);
51+
secp256k1_u128_mul(&c, a4, b[4]);
52+
VERIFY_BITS_128(&c, 112);
5153
/* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
52-
d += (uint128_t)R * (uint64_t)c; c >>= 64;
53-
VERIFY_BITS(d, 115);
54-
VERIFY_BITS(c, 48);
54+
secp256k1_u128_accum_mul(&d, R, secp256k1_u128_to_u64(&c)); secp256k1_u128_rshift(&c, 64);
55+
VERIFY_BITS_128(&d, 115);
56+
VERIFY_BITS_128(&c, 48);
5557
/* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
56-
t3 = d & M; d >>= 52;
58+
t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
5759
VERIFY_BITS(t3, 52);
58-
VERIFY_BITS(d, 63);
60+
VERIFY_BITS_128(&d, 63);
5961
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
6062

61-
d += (uint128_t)a0 * b[4]
62-
+ (uint128_t)a1 * b[3]
63-
+ (uint128_t)a2 * b[2]
64-
+ (uint128_t)a3 * b[1]
65-
+ (uint128_t)a4 * b[0];
66-
VERIFY_BITS(d, 115);
63+
secp256k1_u128_accum_mul(&d, a0, b[4]);
64+
secp256k1_u128_accum_mul(&d, a1, b[3]);
65+
secp256k1_u128_accum_mul(&d, a2, b[2]);
66+
secp256k1_u128_accum_mul(&d, a3, b[1]);
67+
secp256k1_u128_accum_mul(&d, a4, b[0]);
68+
VERIFY_BITS_128(&d, 115);
6769
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
68-
d += (uint128_t)(R << 12) * (uint64_t)c;
69-
VERIFY_BITS(d, 116);
70+
secp256k1_u128_accum_mul(&d, R << 12, secp256k1_u128_to_u64(&c));
71+
VERIFY_BITS_128(&d, 116);
7072
/* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
71-
t4 = d & M; d >>= 52;
73+
t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
7274
VERIFY_BITS(t4, 52);
73-
VERIFY_BITS(d, 64);
75+
VERIFY_BITS_128(&d, 64);
7476
/* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
7577
tx = (t4 >> 48); t4 &= (M >> 4);
7678
VERIFY_BITS(tx, 4);
7779
VERIFY_BITS(t4, 48);
7880
/* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
7981

80-
c = (uint128_t)a0 * b[0];
81-
VERIFY_BITS(c, 112);
82+
secp256k1_u128_mul(&c, a0, b[0]);
83+
VERIFY_BITS_128(&c, 112);
8284
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
83-
d += (uint128_t)a1 * b[4]
84-
+ (uint128_t)a2 * b[3]
85-
+ (uint128_t)a3 * b[2]
86-
+ (uint128_t)a4 * b[1];
87-
VERIFY_BITS(d, 115);
85+
secp256k1_u128_accum_mul(&d, a1, b[4]);
86+
secp256k1_u128_accum_mul(&d, a2, b[3]);
87+
secp256k1_u128_accum_mul(&d, a3, b[2]);
88+
secp256k1_u128_accum_mul(&d, a4, b[1]);
89+
VERIFY_BITS_128(&d, 115);
8890
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
89-
u0 = d & M; d >>= 52;
91+
u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
9092
VERIFY_BITS(u0, 52);
91-
VERIFY_BITS(d, 63);
93+
VERIFY_BITS_128(&d, 63);
9294
/* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
9395
/* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
9496
u0 = (u0 << 4) | tx;
9597
VERIFY_BITS(u0, 56);
9698
/* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
97-
c += (uint128_t)u0 * (R >> 4);
98-
VERIFY_BITS(c, 115);
99+
secp256k1_u128_accum_mul(&c, u0, R >> 4);
100+
VERIFY_BITS_128(&c, 115);
99101
/* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
100-
r[0] = c & M; c >>= 52;
102+
r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
101103
VERIFY_BITS(r[0], 52);
102-
VERIFY_BITS(c, 61);
104+
VERIFY_BITS_128(&c, 61);
103105
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
104106

105-
c += (uint128_t)a0 * b[1]
106-
+ (uint128_t)a1 * b[0];
107-
VERIFY_BITS(c, 114);
107+
secp256k1_u128_accum_mul(&c, a0, b[1]);
108+
secp256k1_u128_accum_mul(&c, a1, b[0]);
109+
VERIFY_BITS_128(&c, 114);
108110
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
109-
d += (uint128_t)a2 * b[4]
110-
+ (uint128_t)a3 * b[3]
111-
+ (uint128_t)a4 * b[2];
112-
VERIFY_BITS(d, 114);
111+
secp256k1_u128_accum_mul(&d, a2, b[4]);
112+
secp256k1_u128_accum_mul(&d, a3, b[3]);
113+
secp256k1_u128_accum_mul(&d, a4, b[2]);
114+
VERIFY_BITS_128(&d, 114);
113115
/* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
114-
c += (d & M) * R; d >>= 52;
115-
VERIFY_BITS(c, 115);
116-
VERIFY_BITS(d, 62);
116+
secp256k1_u128_accum_mul(&c, secp256k1_u128_to_u64(&d) & M, R); secp256k1_u128_rshift(&d, 52);
117+
VERIFY_BITS_128(&c, 115);
118+
VERIFY_BITS_128(&d, 62);
117119
/* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
118-
r[1] = c & M; c >>= 52;
120+
r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
119121
VERIFY_BITS(r[1], 52);
120-
VERIFY_BITS(c, 63);
122+
VERIFY_BITS_128(&c, 63);
121123
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
122124

123-
c += (uint128_t)a0 * b[2]
124-
+ (uint128_t)a1 * b[1]
125-
+ (uint128_t)a2 * b[0];
126-
VERIFY_BITS(c, 114);
125+
secp256k1_u128_accum_mul(&c, a0, b[2]);
126+
secp256k1_u128_accum_mul(&c, a1, b[1]);
127+
secp256k1_u128_accum_mul(&c, a2, b[0]);
128+
VERIFY_BITS_128(&c, 114);
127129
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
128-
d += (uint128_t)a3 * b[4]
129-
+ (uint128_t)a4 * b[3];
130-
VERIFY_BITS(d, 114);
130+
secp256k1_u128_accum_mul(&d, a3, b[4]);
131+
secp256k1_u128_accum_mul(&d, a4, b[3]);
132+
VERIFY_BITS_128(&d, 114);
131133
/* [d 0 0 t4 t3 c t1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
132-
c += (uint128_t)R * (uint64_t)d; d >>= 64;
133-
VERIFY_BITS(c, 115);
134-
VERIFY_BITS(d, 50);
134+
secp256k1_u128_accum_mul(&c, R, secp256k1_u128_to_u64(&d)); secp256k1_u128_rshift(&d, 64);
135+
VERIFY_BITS_128(&c, 115);
136+
VERIFY_BITS_128(&d, 50);
135137
/* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
136138

137-
r[2] = c & M; c >>= 52;
139+
r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
138140
VERIFY_BITS(r[2], 52);
139-
VERIFY_BITS(c, 63);
141+
VERIFY_BITS_128(&c, 63);
140142
/* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
141-
c += (uint128_t)(R << 12) * (uint64_t)d + t3;
142-
VERIFY_BITS(c, 100);
143+
secp256k1_u128_accum_mul(&c, R << 12, secp256k1_u128_to_u64(&d));
144+
secp256k1_u128_accum_u64(&c, t3);
145+
VERIFY_BITS_128(c, 100);
143146
/* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
144-
r[3] = c & M; c >>= 52;
147+
r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
145148
VERIFY_BITS(r[3], 52);
146-
VERIFY_BITS(c, 48);
149+
VERIFY_BITS_128(&c, 48);
147150
/* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
148-
c += t4;
149-
VERIFY_BITS(c, 49);
150-
/* [c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
151-
r[4] = c;
151+
r[4] = secp256k1_u128_to_u64(&c) + t4;
152152
VERIFY_BITS(r[4], 49);
153153
/* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
154154
}
155155

156156
SECP256K1_INLINE static void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t *a) {
157-
uint128_t c, d;
157+
secp256k1_uint128 c, d;
158158
uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
159159
int64_t t3, t4, tx, u0;
160160
const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
@@ -170,107 +170,105 @@ SECP256K1_INLINE static void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t
170170
* Note that [x 0 0 0 0 0] = [x*R].
171171
*/
172172

173-
d = (uint128_t)(a0*2) * a3
174-
+ (uint128_t)(a1*2) * a2;
175-
VERIFY_BITS(d, 114);
173+
secp256k1_u128_mul(&d, a0*2, a3);
174+
secp256k1_u128_accum_mul(&d, a1*2, a2);
175+
VERIFY_BITS_128(&d, 114);
176176
/* [d 0 0 0] = [p3 0 0 0] */
177-
c = (uint128_t)a4 * a4;
178-
VERIFY_BITS(c, 112);
177+
secp256k1_u128_mul(&c, a4, a4);
178+
VERIFY_BITS_128(&c, 112);
179179
/* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
180-
d += (uint128_t)R * (uint64_t)c; c >>= 64;
181-
VERIFY_BITS(d, 115);
182-
VERIFY_BITS(c, 48);
180+
secp256k1_u128_accum_mul(&d, R, secp256k1_u128_to_u64(&c)); secp256k1_u128_rshift(&c, 64);
181+
VERIFY_BITS_128(&d, 115);
182+
VERIFY_BITS_128(&c, 48);
183183
/* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
184-
t3 = d & M; d >>= 52;
184+
t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
185185
VERIFY_BITS(t3, 52);
186-
VERIFY_BITS(d, 63);
186+
VERIFY_BITS_128(&d, 63);
187187
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
188188

189189
a4 *= 2;
190-
d += (uint128_t)a0 * a4
191-
+ (uint128_t)(a1*2) * a3
192-
+ (uint128_t)a2 * a2;
193-
VERIFY_BITS(d, 115);
190+
secp256k1_u128_accum_mul(&d, a0, a4);
191+
secp256k1_u128_accum_mul(&d, a1*2, a3);
192+
secp256k1_u128_accum_mul(&d, a2, a2);
193+
VERIFY_BITS_128(&d, 115);
194194
/* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
195-
d += (uint128_t)(R << 12) * (uint64_t)c;
196-
VERIFY_BITS(d, 116);
195+
secp256k1_u128_accum_mul(&d, R << 12, secp256k1_u128_to_u64(&c));
196+
VERIFY_BITS_128(&d, 116);
197197
/* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
198-
t4 = d & M; d >>= 52;
198+
t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
199199
VERIFY_BITS(t4, 52);
200-
VERIFY_BITS(d, 64);
200+
VERIFY_BITS_128(&d, 64);
201201
/* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
202202
tx = (t4 >> 48); t4 &= (M >> 4);
203203
VERIFY_BITS(tx, 4);
204204
VERIFY_BITS(t4, 48);
205205
/* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
206206

207-
c = (uint128_t)a0 * a0;
208-
VERIFY_BITS(c, 112);
207+
secp256k1_u128_mul(&c, a0, a0);
208+
VERIFY_BITS_128(&c, 112);
209209
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
210-
d += (uint128_t)a1 * a4
211-
+ (uint128_t)(a2*2) * a3;
212-
VERIFY_BITS(d, 114);
210+
secp256k1_u128_accum_mul(&d, a1, a4);
211+
secp256k1_u128_accum_mul(&d, a2*2, a3);
212+
VERIFY_BITS_128(&d, 114);
213213
/* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
214-
u0 = d & M; d >>= 52;
214+
u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
215215
VERIFY_BITS(u0, 52);
216-
VERIFY_BITS(d, 62);
216+
VERIFY_BITS_128(&d, 62);
217217
/* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
218218
/* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
219219
u0 = (u0 << 4) | tx;
220220
VERIFY_BITS(u0, 56);
221221
/* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
222-
c += (uint128_t)u0 * (R >> 4);
223-
VERIFY_BITS(c, 113);
222+
secp256k1_u128_accum_mul(&c, u0, R >> 4);
223+
VERIFY_BITS_128(&c, 113);
224224
/* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
225-
r[0] = c & M; c >>= 52;
225+
r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
226226
VERIFY_BITS(r[0], 52);
227-
VERIFY_BITS(c, 61);
227+
VERIFY_BITS_128(&c, 61);
228228
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
229229

230230
a0 *= 2;
231-
c += (uint128_t)a0 * a1;
232-
VERIFY_BITS(c, 114);
231+
secp256k1_u128_accum_mul(&c, a0, a1);
232+
VERIFY_BITS_128(&c, 114);
233233
/* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
234-
d += (uint128_t)a2 * a4
235-
+ (uint128_t)a3 * a3;
236-
VERIFY_BITS(d, 114);
234+
secp256k1_u128_accum_mul(&d, a2, a4);
235+
secp256k1_u128_accum_mul(&d, a3, a3);
236+
VERIFY_BITS_128(&d, 114);
237237
/* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
238-
c += (d & M) * R; d >>= 52;
239-
VERIFY_BITS(c, 115);
240-
VERIFY_BITS(d, 62);
238+
secp256k1_u128_accum_mul(&c, secp256k1_u128_to_u64(&d) & M, R); secp256k1_u128_rshift(&d, 52);
239+
VERIFY_BITS_128(&c, 115);
240+
VERIFY_BITS_128(&d, 62);
241241
/* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
242-
r[1] = c & M; c >>= 52;
242+
r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
243243
VERIFY_BITS(r[1], 52);
244-
VERIFY_BITS(c, 63);
244+
VERIFY_BITS_128(&c, 63);
245245
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
246246

247-
c += (uint128_t)a0 * a2
248-
+ (uint128_t)a1 * a1;
249-
VERIFY_BITS(c, 114);
247+
secp256k1_u128_accum_mul(&c, a0, a2);
248+
secp256k1_u128_accum_mul(&c, a1, a1);
249+
VERIFY_BITS_128(&c, 114);
250250
/* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
251-
d += (uint128_t)a3 * a4;
252-
VERIFY_BITS(d, 114);
251+
secp256k1_u128_accum_mul(&d, a3, a4);
252+
VERIFY_BITS_128(&d, 114);
253253
/* [d 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
254-
c += (uint128_t)R * (uint64_t)d; d >>= 64;
255-
VERIFY_BITS(c, 115);
256-
VERIFY_BITS(d, 50);
254+
secp256k1_u128_accum_mul(&c, R, secp256k1_u128_to_u64(&d)); secp256k1_u128_rshift(&d, 64);
255+
VERIFY_BITS_128(&c, 115);
256+
VERIFY_BITS_128(&d, 50);
257257
/* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
258-
r[2] = c & M; c >>= 52;
258+
r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
259259
VERIFY_BITS(r[2], 52);
260-
VERIFY_BITS(c, 63);
260+
VERIFY_BITS_128(&c, 63);
261261
/* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
262262

263-
c += (uint128_t)(R << 12) * (uint64_t)d + t3;
264-
VERIFY_BITS(c, 100);
263+
secp256k1_u128_accum_mul(&c, R << 12, secp256k1_u128_to_u64(&d));
264+
secp256k1_u128_accum_u64(&c, t3);
265+
VERIFY_BITS_128(&c, 100);
265266
/* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
266-
r[3] = c & M; c >>= 52;
267+
r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
267268
VERIFY_BITS(r[3], 52);
268-
VERIFY_BITS(c, 48);
269+
VERIFY_BITS_128(&c, 48);
269270
/* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
270-
c += t4;
271-
VERIFY_BITS(c, 49);
272-
/* [c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
273-
r[4] = c;
271+
r[4] = secp256k1_u128_to_u64(&c) + t4;
274272
VERIFY_BITS(r[4], 49);
275273
/* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
276274
}

0 commit comments

Comments
 (0)