|
1 |
| -use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken}; |
| 1 | +use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TrailingToken}; |
2 | 2 | use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
3 | 3 | use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing};
|
4 | 4 | use rustc_ast::tokenstream::{DelimSpan, Spacing};
|
@@ -76,92 +76,6 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
|
76 | 76 | })
|
77 | 77 | }
|
78 | 78 |
|
79 |
| -// njn: remove |
80 |
| -// Produces a `TokenStream` on-demand. Using `cursor_snapshot` |
81 |
| -// and `num_calls`, we can reconstruct the `TokenStream` seen |
82 |
| -// by the callback. This allows us to avoid producing a `TokenStream` |
83 |
| -// if it is never needed - for example, a captured `macro_rules!` |
84 |
| -// argument that is never passed to a proc macro. |
85 |
| -// In practice token stream creation happens rarely compared to |
86 |
| -// calls to `collect_tokens` (see some statistics in #78736), |
87 |
| -// so we are doing as little up-front work as possible. |
88 |
| -// |
89 |
| -// This also makes `Parser` very cheap to clone, since |
90 |
| -// there is no intermediate collection buffer to clone. |
91 |
| -struct LazyAttrTokenStreamImpl { |
92 |
| - start_token: (Token, Spacing), |
93 |
| - cursor_snapshot: TokenCursor, |
94 |
| - num_calls: u32, |
95 |
| - break_last_token: bool, |
96 |
| - replace_ranges: Box<[ReplaceRange]>, |
97 |
| -} |
98 |
| - |
99 |
| -impl LazyAttrTokenStreamImpl { |
100 |
| - fn to_attr_token_stream(mut self) -> AttrTokenStream { |
101 |
| - // The token produced by the final call to `{,inlined_}next` was not |
102 |
| - // actually consumed by the callback. The combination of chaining the |
103 |
| - // initial token and using `take` produces the desired result - we |
104 |
| - // produce an empty `TokenStream` if no calls were made, and omit the |
105 |
| - // final token otherwise. |
106 |
| - let tokens = iter::once(FlatToken::Token(self.start_token)) |
107 |
| - .chain(iter::repeat_with(|| FlatToken::Token(self.cursor_snapshot.next()))) |
108 |
| - .take(self.num_calls as usize); |
109 |
| - |
110 |
| - if self.replace_ranges.is_empty() { |
111 |
| - make_attr_token_stream(tokens, self.break_last_token) |
112 |
| - } else { |
113 |
| - let mut tokens: Vec<_> = tokens.collect(); |
114 |
| - let mut replace_ranges = self.replace_ranges.to_vec(); |
115 |
| - replace_ranges.sort_by_key(|(range, _)| range.start); |
116 |
| - |
117 |
| - #[cfg(debug_assertions)] |
118 |
| - { |
119 |
| - for [(range, tokens), (next_range, next_tokens)] in replace_ranges.array_windows() { |
120 |
| - assert!( |
121 |
| - range.end <= next_range.start || range.end >= next_range.end, |
122 |
| - "Replace ranges should either be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})", |
123 |
| - range, |
124 |
| - tokens, |
125 |
| - next_range, |
126 |
| - next_tokens, |
127 |
| - ); |
128 |
| - } |
129 |
| - } |
130 |
| - |
131 |
| - // Process the replace ranges, starting from the highest start |
132 |
| - // position and working our way back. If have tokens like: |
133 |
| - // |
134 |
| - // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` |
135 |
| - // |
136 |
| - // Then we will generate replace ranges for both |
137 |
| - // the `#[cfg(FALSE)] field: bool` and the entire |
138 |
| - // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` |
139 |
| - // |
140 |
| - // By starting processing from the replace range with the greatest |
141 |
| - // start position, we ensure that any replace range which encloses |
142 |
| - // another replace range will capture the *replaced* tokens for the inner |
143 |
| - // range, not the original tokens. |
144 |
| - for (range, target) in replace_ranges.into_iter().rev() { |
145 |
| - assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}"); |
146 |
| - |
147 |
| - // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus |
148 |
| - // enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the |
149 |
| - // total length of `tokens` constant throughout the replacement process, allowing |
150 |
| - // us to use all of the `ReplaceRanges` entries without adjusting indices. |
151 |
| - let target_len = target.is_some() as usize; |
152 |
| - tokens.splice( |
153 |
| - (range.start as usize)..(range.end as usize), |
154 |
| - target |
155 |
| - .into_iter() |
156 |
| - .map(|target| FlatToken::AttrsTarget(target)) |
157 |
| - .chain(iter::repeat(FlatToken::Empty).take(range.len() - target_len)), |
158 |
| - ); |
159 |
| - } |
160 |
| - make_attr_token_stream(tokens.into_iter(), self.break_last_token) |
161 |
| - } |
162 |
| - } |
163 |
| -} |
164 |
| - |
165 | 79 | impl<'a> Parser<'a> {
|
166 | 80 | /// Records all tokens consumed by the provided callback,
|
167 | 81 | /// including the current token. These tokens are collected
|
@@ -207,7 +121,7 @@ impl<'a> Parser<'a> {
|
207 | 121 | }
|
208 | 122 |
|
209 | 123 | let start_token = (self.token.clone(), self.token_spacing);
|
210 |
| - let cursor_snapshot = self.token_cursor.clone(); |
| 124 | + let mut cursor_snapshot = self.token_cursor.clone(); |
211 | 125 | let start_pos = self.num_bump_calls;
|
212 | 126 | let has_outer_attrs = !attrs.attrs.is_empty();
|
213 | 127 | let replace_ranges_start = self.capture_state.replace_ranges.len();
|
@@ -294,29 +208,78 @@ impl<'a> Parser<'a> {
|
294 | 208 |
|
295 | 209 | let num_calls = end_pos - start_pos;
|
296 | 210 |
|
| 211 | + // The token produced by the final call to `{,inlined_}next` was not |
| 212 | + // actually consumed by the callback. The combination of chaining the |
| 213 | + // initial token and using `take` produces the desired result - we |
| 214 | + // produce an empty `TokenStream` if no calls were made, and omit the |
| 215 | + // final token otherwise. |
| 216 | + let tokens_iter = iter::once(FlatToken::Token(start_token)) |
| 217 | + .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) |
| 218 | + .take(num_calls as usize); |
| 219 | + |
297 | 220 | // If we have no attributes, then we will never need to
|
298 | 221 | // use any replace ranges.
|
299 |
| - let replace_ranges: Box<[ReplaceRange]> = if ret.attrs().is_empty() && !self.capture_cfg { |
300 |
| - Box::new([]) |
| 222 | + let tokens = if ret.attrs().is_empty() && !self.capture_cfg { |
| 223 | + make_attr_token_stream(tokens_iter, self.break_last_token) |
301 | 224 | } else {
|
| 225 | + let mut tokens: Vec<_> = tokens_iter.collect(); |
| 226 | + |
302 | 227 | // Grab any replace ranges that occur *inside* the current AST node.
|
303 | 228 | // We will perform the actual replacement below.
|
304 |
| - self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end] |
| 229 | + let mut replace_ranges: Vec<ReplaceRange> = self.capture_state.replace_ranges |
| 230 | + [replace_ranges_start..replace_ranges_end] |
305 | 231 | .iter()
|
306 | 232 | .cloned()
|
307 | 233 | .chain(inner_attr_replace_ranges.iter().cloned())
|
308 | 234 | .map(|(range, data)| ((range.start - start_pos)..(range.end - start_pos), data))
|
309 |
| - .collect() |
310 |
| - }; |
| 235 | + .collect(); |
| 236 | + replace_ranges.sort_by_key(|(range, _)| range.start); |
| 237 | + |
| 238 | + #[cfg(debug_assertions)] |
| 239 | + { |
| 240 | + for [(range, tokens), (next_range, next_tokens)] in replace_ranges.array_windows() { |
| 241 | + assert!( |
| 242 | + range.end <= next_range.start || range.end >= next_range.end, |
| 243 | + "Replace ranges should either be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})", |
| 244 | + range, |
| 245 | + tokens, |
| 246 | + next_range, |
| 247 | + next_tokens, |
| 248 | + ); |
| 249 | + } |
| 250 | + } |
| 251 | + |
| 252 | + // Process the replace ranges, starting from the highest start |
| 253 | + // position and working our way back. If have tokens like: |
| 254 | + // |
| 255 | + // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` |
| 256 | + // |
| 257 | + // Then we will generate replace ranges for both |
| 258 | + // the `#[cfg(FALSE)] field: bool` and the entire |
| 259 | + // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` |
| 260 | + // |
| 261 | + // By starting processing from the replace range with the greatest |
| 262 | + // start position, we ensure that any replace range which encloses |
| 263 | + // another replace range will capture the *replaced* tokens for the inner |
| 264 | + // range, not the original tokens. |
| 265 | + for (range, target) in replace_ranges.into_iter().rev() { |
| 266 | + assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}"); |
311 | 267 |
|
312 |
| - let tokens = LazyAttrTokenStreamImpl { |
313 |
| - start_token, |
314 |
| - num_calls, |
315 |
| - cursor_snapshot, |
316 |
| - break_last_token: self.break_last_token, |
317 |
| - replace_ranges, |
| 268 | + // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus |
| 269 | + // enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the |
| 270 | + // total length of `tokens` constant throughout the replacement process, allowing |
| 271 | + // us to use all of the `ReplaceRanges` entries without adjusting indices. |
| 272 | + let target_len = target.is_some() as usize; |
| 273 | + tokens.splice( |
| 274 | + (range.start as usize)..(range.end as usize), |
| 275 | + target |
| 276 | + .into_iter() |
| 277 | + .map(|target| FlatToken::AttrsTarget(target)) |
| 278 | + .chain(iter::repeat(FlatToken::Empty).take(range.len() - target_len)), |
| 279 | + ); |
| 280 | + } |
| 281 | + make_attr_token_stream(tokens.into_iter(), self.break_last_token) |
318 | 282 | };
|
319 |
| - let tokens = tokens.to_attr_token_stream(); |
320 | 283 |
|
321 | 284 | // If we support tokens and don't already have them, store the newly captured tokens.
|
322 | 285 | if let Some(target_tokens @ None) = ret.tokens_mut() {
|
@@ -429,6 +392,5 @@ mod size_asserts {
|
429 | 392 | use rustc_data_structures::static_assert_size;
|
430 | 393 | // tidy-alphabetical-start
|
431 | 394 | static_assert_size!(AttrWrapper, 16);
|
432 |
| - static_assert_size!(LazyAttrTokenStreamImpl, 96); |
433 | 395 | // tidy-alphabetical-end
|
434 | 396 | }
|
0 commit comments