|
18 | 18 | #![deny(rustdoc::private_intra_doc_links)]
|
19 | 19 | #![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
20 | 20 |
|
21 |
| -use proc_macro::TokenStream; |
| 21 | +extern crate alloc; |
| 22 | + |
| 23 | +use alloc::string::ToString; |
| 24 | +use proc_macro::{Delimiter, Group, TokenStream, TokenTree}; |
22 | 25 | use quote::quote;
|
23 | 26 | use syn::spanned::Spanned;
|
24 | 27 | use syn::{parse, ImplItemFn, Token};
|
@@ -74,3 +77,220 @@ pub fn maybe_await(expr: TokenStream) -> TokenStream {
|
74 | 77 |
|
75 | 78 | quoted.into()
|
76 | 79 | }
|
| 80 | + |
| 81 | +fn expect_ident(token: &TokenTree, expected_name: Option<&str>) { |
| 82 | + if let TokenTree::Ident(id) = &token { |
| 83 | + if let Some(exp) = expected_name { |
| 84 | + assert_eq!(id.to_string(), exp, "Expected ident {}, got {:?}", exp, token); |
| 85 | + } |
| 86 | + } else { |
| 87 | + panic!("Expected ident {:?}, got {:?}", expected_name, token); |
| 88 | + } |
| 89 | +} |
| 90 | + |
| 91 | +fn expect_punct(token: &TokenTree, expected: char) { |
| 92 | + if let TokenTree::Punct(p) = &token { |
| 93 | + assert_eq!(p.as_char(), expected, "Expected punctuation {}, got {}", expected, p); |
| 94 | + } else { |
| 95 | + panic!("Expected punctuation {}, got {:?}", expected, token); |
| 96 | + } |
| 97 | +} |
| 98 | + |
| 99 | +fn token_to_stream(token: TokenTree) -> proc_macro::TokenStream { |
| 100 | + proc_macro::TokenStream::from(token) |
| 101 | +} |
| 102 | + |
| 103 | +/// Processes a list of fields in a variant definition (see the docs for [`skip_legacy_fields!`]) |
| 104 | +fn process_fields(group: Group) -> proc_macro::TokenStream { |
| 105 | + let mut computed_fields = proc_macro::TokenStream::new(); |
| 106 | + if group.delimiter() == Delimiter::Brace { |
| 107 | + let mut fields_stream = group.stream().into_iter().peekable(); |
| 108 | + |
| 109 | + let mut new_fields = proc_macro::TokenStream::new(); |
| 110 | + loop { |
| 111 | + // The field list should end with .., at which point we break |
| 112 | + let next_tok = fields_stream.peek(); |
| 113 | + if let Some(TokenTree::Punct(_)) = next_tok { |
| 114 | + let dot1 = fields_stream.next().unwrap(); |
| 115 | + expect_punct(&dot1, '.'); |
| 116 | + let dot2 = fields_stream.next().expect("Missing second trailing ."); |
| 117 | + expect_punct(&dot2, '.'); |
| 118 | + let trailing_dots = [dot1, dot2]; |
| 119 | + new_fields.extend(trailing_dots.into_iter().map(token_to_stream)); |
| 120 | + assert!(fields_stream.peek().is_none()); |
| 121 | + break; |
| 122 | + } |
| 123 | + |
| 124 | + // Fields should take the form `ref field_name: ty_info` where `ty_info` |
| 125 | + // may be a single ident or may be a group. We skip the field if `ty_info` |
| 126 | + // is a group where the first token is the ident `legacy`. |
| 127 | + let ref_ident = fields_stream.next().unwrap(); |
| 128 | + expect_ident(&ref_ident, Some("ref")); |
| 129 | + let field_name_ident = fields_stream.next().unwrap(); |
| 130 | + let co = fields_stream.next().unwrap(); |
| 131 | + expect_punct(&co, ':'); |
| 132 | + let ty_info = fields_stream.next().unwrap(); |
| 133 | + let com = fields_stream.next().unwrap(); |
| 134 | + expect_punct(&com, ','); |
| 135 | + |
| 136 | + if let TokenTree::Group(group) = ty_info { |
| 137 | + let first_group_tok = group.stream().into_iter().next().unwrap(); |
| 138 | + if let TokenTree::Ident(ident) = first_group_tok { |
| 139 | + if ident.to_string() == "legacy" { |
| 140 | + continue; |
| 141 | + } |
| 142 | + } |
| 143 | + } |
| 144 | + |
| 145 | + let field = [ref_ident, field_name_ident, com]; |
| 146 | + new_fields.extend(field.into_iter().map(token_to_stream)); |
| 147 | + } |
| 148 | + let fields_group = Group::new(Delimiter::Brace, new_fields); |
| 149 | + computed_fields.extend(token_to_stream(TokenTree::Group(fields_group))); |
| 150 | + } else { |
| 151 | + computed_fields.extend(token_to_stream(TokenTree::Group(group))); |
| 152 | + } |
| 153 | + computed_fields |
| 154 | +} |
| 155 | + |
| 156 | +/// Scans a match statement for legacy fields which should be skipped. |
| 157 | +/// |
| 158 | +/// This is used internally in LDK's TLV serialization logic and is not expected to be used by |
| 159 | +/// other crates. |
| 160 | +/// |
| 161 | +/// Wraps a `match self {..}` statement and scans the fields in the match patterns (in the form |
| 162 | +/// `ref $field_name: $field_ty`) for types marked `legacy`, skipping those fields. |
| 163 | +/// |
| 164 | +/// Specifically, it expects input like the following, simply dropping `field3` and the |
| 165 | +/// `: $field_ty` after each field name. |
| 166 | +/// ```ignore |
| 167 | +/// match self { |
| 168 | +/// Enum::Variant { |
| 169 | +/// ref field1: option, |
| 170 | +/// ref field2: (option, explicit_type: u64), |
| 171 | +/// ref field3: (legacy, u64, {}, {}), // will be skipped |
| 172 | +/// .. |
| 173 | +/// } => expression |
| 174 | +/// } |
| 175 | +/// ``` |
| 176 | +#[proc_macro] |
| 177 | +pub fn skip_legacy_fields(expr: TokenStream) -> TokenStream { |
| 178 | + let mut stream = expr.into_iter(); |
| 179 | + let mut res = TokenStream::new(); |
| 180 | + |
| 181 | + // First expect `match self` followed by a `{}` group... |
| 182 | + let match_ident = stream.next().unwrap(); |
| 183 | + expect_ident(&match_ident, Some("match")); |
| 184 | + res.extend(proc_macro::TokenStream::from(match_ident)); |
| 185 | + |
| 186 | + let self_ident = stream.next().unwrap(); |
| 187 | + expect_ident(&self_ident, Some("self")); |
| 188 | + res.extend(proc_macro::TokenStream::from(self_ident)); |
| 189 | + |
| 190 | + let arms = stream.next().unwrap(); |
| 191 | + if let TokenTree::Group(group) = arms { |
| 192 | + let mut new_arms = TokenStream::new(); |
| 193 | + |
| 194 | + let mut arm_stream = group.stream().into_iter().peekable(); |
| 195 | + while arm_stream.peek().is_some() { |
| 196 | + // Each arm should contain Enum::Variant { fields } => init |
| 197 | + // We explicitly check the :s, =, and >, as well as an optional trailing , |
| 198 | + let enum_ident = arm_stream.next().unwrap(); |
| 199 | + let co1 = arm_stream.next().unwrap(); |
| 200 | + expect_punct(&co1, ':'); |
| 201 | + let co2 = arm_stream.next().unwrap(); |
| 202 | + expect_punct(&co2, ':'); |
| 203 | + let variant_ident = arm_stream.next().unwrap(); |
| 204 | + let fields = arm_stream.next().unwrap(); |
| 205 | + let eq = arm_stream.next().unwrap(); |
| 206 | + expect_punct(&eq, '='); |
| 207 | + let gt = arm_stream.next().unwrap(); |
| 208 | + expect_punct(>, '>'); |
| 209 | + let init = arm_stream.next().unwrap(); |
| 210 | + |
| 211 | + let next_tok = arm_stream.peek(); |
| 212 | + if let Some(TokenTree::Punct(_)) = next_tok { |
| 213 | + expect_punct(next_tok.unwrap(), ','); |
| 214 | + arm_stream.next(); |
| 215 | + } |
| 216 | + |
| 217 | + let computed_fields = if let TokenTree::Group(group) = fields { |
| 218 | + process_fields(group) |
| 219 | + } else { |
| 220 | + panic!("Expected a group for the fields in a match arm"); |
| 221 | + }; |
| 222 | + |
| 223 | + let arm_pfx = [enum_ident, co1, co2, variant_ident]; |
| 224 | + new_arms.extend(arm_pfx.into_iter().map(token_to_stream)); |
| 225 | + new_arms.extend(computed_fields); |
| 226 | + let arm_sfx = [eq, gt, init]; |
| 227 | + new_arms.extend(arm_sfx.into_iter().map(token_to_stream)); |
| 228 | + } |
| 229 | + |
| 230 | + let new_arm_group = Group::new(Delimiter::Brace, new_arms); |
| 231 | + res.extend(token_to_stream(TokenTree::Group(new_arm_group))); |
| 232 | + } else { |
| 233 | + panic!("Expected `match self {{..}}` and nothing else"); |
| 234 | + } |
| 235 | + |
| 236 | + assert!(stream.next().is_none(), "Expected `match self {{..}}` and nothing else"); |
| 237 | + |
| 238 | + res |
| 239 | +} |
| 240 | + |
| 241 | +/// Scans an enum definition for fields initialized with `legacy` types and drops them. |
| 242 | +/// |
| 243 | +/// This is used internally in LDK's TLV serialization logic and is not expected to be used by |
| 244 | +/// other crates. |
| 245 | +/// |
| 246 | +/// Is expected to wrap a struct definition like |
| 247 | +/// ```ignore |
| 248 | +/// drop_legacy_field_definition!(Self { |
| 249 | +/// field1: $crate::_ignore_arg!(field1, option), |
| 250 | +/// field2: $crate::_ignore_arg!(field2, (legacy, u64, {})), |
| 251 | +/// }) |
| 252 | +/// ``` |
| 253 | +/// and will drop fields defined like `field2` with a type starting with `legacy`. |
| 254 | +#[proc_macro] |
| 255 | +pub fn drop_legacy_field_definition(expr: TokenStream) -> TokenStream { |
| 256 | + let mut st = if let Ok(parsed) = parse::<syn::Expr>(expr) { |
| 257 | + if let syn::Expr::Struct(st) = parsed { |
| 258 | + st |
| 259 | + } else { |
| 260 | + return (quote! { |
| 261 | + compile_error!("drop_legacy_field_definition!() can only be used on struct expressions") |
| 262 | + }) |
| 263 | + .into(); |
| 264 | + } |
| 265 | + } else { |
| 266 | + return (quote! { |
| 267 | + compile_error!("drop_legacy_field_definition!() can only be used on expressions") |
| 268 | + }) |
| 269 | + .into(); |
| 270 | + }; |
| 271 | + assert!(st.attrs.is_empty()); |
| 272 | + assert!(st.qself.is_none()); |
| 273 | + assert!(st.dot2_token.is_none()); |
| 274 | + assert!(st.rest.is_none()); |
| 275 | + let mut new_fields = syn::punctuated::Punctuated::new(); |
| 276 | + core::mem::swap(&mut new_fields, &mut st.fields); |
| 277 | + for field in new_fields { |
| 278 | + if let syn::Expr::Macro(syn::ExprMacro { mac, .. }) = &field.expr { |
| 279 | + let macro_name = mac.path.segments.last().unwrap().ident.to_string(); |
| 280 | + let is_init = macro_name == "_ignore_arg"; |
| 281 | + // Skip `field_name` and `:`, giving us just the type's group |
| 282 | + let ty_tokens = mac.tokens.clone().into_iter().skip(2).next(); |
| 283 | + if let Some(proc_macro2::TokenTree::Group(group)) = ty_tokens { |
| 284 | + let first_token = group.stream().into_iter().next(); |
| 285 | + if let Some(proc_macro2::TokenTree::Ident(ident)) = first_token { |
| 286 | + if is_init && ident == "legacy" { |
| 287 | + continue; |
| 288 | + } |
| 289 | + } |
| 290 | + } |
| 291 | + } |
| 292 | + st.fields.push(field); |
| 293 | + } |
| 294 | + let out = syn::Expr::Struct(st); |
| 295 | + quote! { #out }.into() |
| 296 | +} |
0 commit comments