@@ -119,40 +119,60 @@ static void secp256k1_ecmult_gen(const secp256k1_ecmult_gen_context *ctx, secp25
119
119
/* Gather the mask(block)-selected bits of recoded into bits. They're packed
120
120
* together: bit (tooth) of bits = bit
121
121
* ((block*COMB_TEETH + tooth)*COMB_SPACING + comb_off) of recoded. */
122
- uint32_t bits = 0 , sign , abs , index , tooth ;
123
- for (tooth = 0 ; tooth < COMB_TEETH && bit_pos < 256 ; ++ tooth ) {
124
- uint32_t bit = secp256k1_scalar_get_bits (& recoded , bit_pos , 1 );
125
- bits |= bit << tooth ;
126
- bit_pos += COMB_SPACING ;
122
+ uint32_t bits = 0 , index , tooth = 0 ;
123
+ while (1 ) {
124
+ if (EXPECT (tooth == COMB_TEETH , 0 )) {
125
+ /* All bits have been gathered.
126
+ *
127
+ * If the top bit of bits is 1, conditionally flip them all (corresponding
128
+ * to looking up the negated table value), and remember to negate the
129
+ * result in sign. */
130
+ uint32_t sign = (bits >> (COMB_TEETH - 1 )) & 1 ;
131
+ uint32_t abs = (bits ^ - sign ) & (COMB_POINTS - 1 );
132
+ VERIFY_CHECK (sign == 0 || sign == 1 );
133
+ VERIFY_CHECK (abs < COMB_POINTS );
134
+ /* This uses a conditional move to avoid any secret data in array indexes.
135
+ * _Any_ use of secret indexes has been demonstrated to result in timing
136
+ * sidechannels, even when the cache-line access patterns are uniform.
137
+ * See also:
138
+ * - "A word of warning", CHES 2013 Rump Session, by Daniel J. Bernstein and
139
+ * Peter Schwabe (https://cryptojedi.org/peter/data/chesrump-20130822.pdf)
140
+ * - "Cache Attacks and Countermeasures: the Case of AES", RSA 2006, by Dag
141
+ * Arne Osvik, Adi Shamir, and Eran Tromer
142
+ * (https://www.tau.ac.il/~tromer/papers/cache.pdf)
143
+ */
144
+ for (index = 0 ; index < COMB_POINTS ; ++ index ) {
145
+ secp256k1_ge_storage_cmov (& adds , & secp256k1_ecmult_gen_prec_table [block ][index ], index == abs );
146
+ }
147
+ /* Set add=adds or add=-adds, in constant time, based on sign. */
148
+ secp256k1_ge_from_storage (& add , & adds );
149
+ secp256k1_fe_negate (& neg , & add .y , 1 );
150
+ secp256k1_fe_cmov (& add .y , & neg , sign );
151
+ break ;
152
+ #if COMB_BITS > 256
153
+ } else if (EXPECT (bit_pos >= 256 , 0 )) {
154
+ /* Some bit(s) of (mask(block) << comb_off) are outside of [0,256). This means
155
+ * we are also done constructing bits, but know its top bit is zero, and no
156
+ * flipping/negating is needed. The table lookup can also be done over a
157
+ * smaller number of entries. */
158
+ VERIFY_CHECK (bits < (1U << tooth ));
159
+ VERIFY_CHECK (bits < COMB_POINTS );
160
+ for (index = 0 ; (index >> tooth ) == 0 ; ++ index ) {
161
+ secp256k1_ge_storage_cmov (& adds , & secp256k1_ecmult_gen_prec_table [block ][index ], index == bits );
162
+ }
163
+ secp256k1_ge_from_storage (& add , & adds );
164
+ break ;
165
+ #endif
166
+ } else {
167
+ /* Gather another bit. */
168
+ uint32_t bit = secp256k1_scalar_get_bits (& recoded , bit_pos , 1 );
169
+ VERIFY_CHECK (bit_pos < COMB_BITS && bit_pos < 256 );
170
+ bits |= bit << tooth ;
171
+ bit_pos += COMB_SPACING ;
172
+ ++ tooth ;
173
+ }
127
174
}
128
175
129
- /* If the top bit of bits is 1, conditionally flip them all (correspoding
130
- * to looking up the negated table value), and remember to negate the
131
- * result in sign. */
132
- sign = (bits >> (COMB_TEETH - 1 )) & 1 ;
133
- abs = (bits ^ - sign ) & (COMB_POINTS - 1 );
134
- VERIFY_CHECK (sign == 0 || sign == 1 );
135
- VERIFY_CHECK (abs < COMB_POINTS );
136
-
137
- /** This uses a conditional move to avoid any secret data in array indexes.
138
- * _Any_ use of secret indexes has been demonstrated to result in timing
139
- * sidechannels, even when the cache-line access patterns are uniform.
140
- * See also:
141
- * "A word of warning", CHES 2013 Rump Session, by Daniel J. Bernstein and Peter Schwabe
142
- * (https://cryptojedi.org/peter/data/chesrump-20130822.pdf) and
143
- * "Cache Attacks and Countermeasures: the Case of AES", RSA 2006,
144
- * by Dag Arne Osvik, Adi Shamir, and Eran Tromer
145
- * (https://www.tau.ac.il/~tromer/papers/cache.pdf)
146
- */
147
- for (index = 0 ; index < COMB_POINTS ; ++ index ) {
148
- secp256k1_ge_storage_cmov (& adds , & secp256k1_ecmult_gen_prec_table [block ][index ], index == abs );
149
- }
150
-
151
- /* Set add=adds or add=-adds, in constant time, based on sign. */
152
- secp256k1_ge_from_storage (& add , & adds );
153
- secp256k1_fe_negate (& neg , & add .y , 1 );
154
- secp256k1_fe_cmov (& add .y , & neg , sign );
155
-
156
176
/* Add the looked up and conditionally negated value to r. */
157
177
if (EXPECT (first , 0 )) {
158
178
/* If this is the first table lookup, we can skip addition. */
0 commit comments