summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJonas Schievink <jonasschievink@gmail.com>2020-09-27 01:53:29 +0200
committerGitHub <noreply@github.com>2020-09-27 01:53:29 +0200
commit7a1a87114e586684dd9d184d6a2f30629d7ffe58 (patch)
tree4f20a590c929b8f867062d5829c6fd0967bf67d0
parentRollup merge of #77231 - oli-obk:clippy_const_fn, r=Manishearth (diff)
parentpretty-print-reparse hack: Remove an impossible case (diff)
downloadrust-7a1a87114e586684dd9d184d6a2f30629d7ffe58.tar.gz
rust-7a1a87114e586684dd9d184d6a2f30629d7ffe58.tar.bz2
rust-7a1a87114e586684dd9d184d6a2f30629d7ffe58.tar.xz
Rollup merge of #77235 - petrochenkov:reparse, r=Aaron1011
pretty-print-reparse hack: Rename some variables for clarity This will also make it easier to make the comparisons asymmetric. Also one impossible case is removed. r? @Aaron1011
-rw-r--r--compiler/rustc_parse/src/lib.rs42
1 files changed, 22 insertions, 20 deletions
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 21bbdc9..d59dd40 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -7,7 +7,7 @@
7#![feature(or_patterns)] 7#![feature(or_patterns)]
8 8
9use rustc_ast as ast; 9use rustc_ast as ast;
10use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; 10use rustc_ast::token::{self, Nonterminal, Token, TokenKind};
11use rustc_ast::tokenstream::{self, Spacing, TokenStream, TokenTree}; 11use rustc_ast::tokenstream::{self, Spacing, TokenStream, TokenTree};
12use rustc_ast_pretty::pprust; 12use rustc_ast_pretty::pprust;
13use rustc_data_structures::sync::Lrc; 13use rustc_data_structures::sync::Lrc;
@@ -299,7 +299,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
299 // FIXME(#43081): Avoid this pretty-print + reparse hack 299 // FIXME(#43081): Avoid this pretty-print + reparse hack
300 let source = pprust::nonterminal_to_string(nt); 300 let source = pprust::nonterminal_to_string(nt);
301 let filename = FileName::macro_expansion_source_code(&source); 301 let filename = FileName::macro_expansion_source_code(&source);
302 let tokens_for_real = parse_stream_from_source_str(filename, source, sess, Some(span)); 302 let reparsed_tokens = parse_stream_from_source_str(filename, source, sess, Some(span));
303 303
304 // During early phases of the compiler the AST could get modified 304 // During early phases of the compiler the AST could get modified
305 // directly (e.g., attributes added or removed) and the internal cache 305 // directly (e.g., attributes added or removed) and the internal cache
@@ -325,7 +325,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
325 // modifications, including adding/removing typically non-semantic 325 // modifications, including adding/removing typically non-semantic
326 // tokens such as extra braces and commas, don't happen. 326 // tokens such as extra braces and commas, don't happen.
327 if let Some(tokens) = tokens { 327 if let Some(tokens) = tokens {
328 if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real, sess) { 328 if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess) {
329 return tokens; 329 return tokens;
330 } 330 }
331 info!( 331 info!(
@@ -333,9 +333,9 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
333 going with stringified version" 333 going with stringified version"
334 ); 334 );
335 info!("cached tokens: {:?}", tokens); 335 info!("cached tokens: {:?}", tokens);
336 info!("reparsed tokens: {:?}", tokens_for_real); 336 info!("reparsed tokens: {:?}", reparsed_tokens);
337 } 337 }
338 tokens_for_real 338 reparsed_tokens
339} 339}
340 340
341// See comments in `Nonterminal::to_tokenstream` for why we care about 341// See comments in `Nonterminal::to_tokenstream` for why we care about
@@ -344,8 +344,8 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
344// This is otherwise the same as `eq_unspanned`, only recursing with a 344// This is otherwise the same as `eq_unspanned`, only recursing with a
345// different method. 345// different method.
346pub fn tokenstream_probably_equal_for_proc_macro( 346pub fn tokenstream_probably_equal_for_proc_macro(
347 first: &TokenStream, 347 tokens: &TokenStream,
348 other: &TokenStream, 348 reparsed_tokens: &TokenStream,
349 sess: &ParseSess, 349 sess: &ParseSess,
350) -> bool { 350) -> bool {
351 // When checking for `probably_eq`, we ignore certain tokens that aren't 351 // When checking for `probably_eq`, we ignore certain tokens that aren't
@@ -359,9 +359,6 @@ pub fn tokenstream_probably_equal_for_proc_macro(
359 // The pretty printer tends to add trailing commas to 359 // The pretty printer tends to add trailing commas to
360 // everything, and in particular, after struct fields. 360 // everything, and in particular, after struct fields.
361 | token::Comma 361 | token::Comma
362 // The pretty printer emits `NoDelim` as whitespace.
363 | token::OpenDelim(DelimToken::NoDelim)
364 | token::CloseDelim(DelimToken::NoDelim)
365 // The pretty printer collapses many semicolons into one. 362 // The pretty printer collapses many semicolons into one.
366 | token::Semi 363 | token::Semi
367 // We don't preserve leading `|` tokens in patterns, so 364 // We don't preserve leading `|` tokens in patterns, so
@@ -460,10 +457,11 @@ pub fn tokenstream_probably_equal_for_proc_macro(
460 457
461 // Break tokens after we expand any nonterminals, so that we break tokens 458 // Break tokens after we expand any nonterminals, so that we break tokens
462 // that are produced as a result of nonterminal expansion. 459 // that are produced as a result of nonterminal expansion.
463 let t1 = first.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens); 460 let tokens = tokens.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
464 let t2 = other.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens); 461 let reparsed_tokens =
462 reparsed_tokens.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
465 463
466 t1.eq_by(t2, |t1, t2| tokentree_probably_equal_for_proc_macro(&t1, &t2, sess)) 464 tokens.eq_by(reparsed_tokens, |t, rt| tokentree_probably_equal_for_proc_macro(&t, &rt, sess))
467} 465}
468 466
469// See comments in `Nonterminal::to_tokenstream` for why we care about 467// See comments in `Nonterminal::to_tokenstream` for why we care about
@@ -472,16 +470,20 @@ pub fn tokenstream_probably_equal_for_proc_macro(
472// This is otherwise the same as `eq_unspanned`, only recursing with a 470// This is otherwise the same as `eq_unspanned`, only recursing with a
473// different method. 471// different method.
474pub fn tokentree_probably_equal_for_proc_macro( 472pub fn tokentree_probably_equal_for_proc_macro(
475 first: &TokenTree, 473 token: &TokenTree,
476 other: &TokenTree, 474 reparsed_token: &TokenTree,
477 sess: &ParseSess, 475 sess: &ParseSess,
478) -> bool { 476) -> bool {
479 match (first, other) { 477 match (token, reparsed_token) {
480 (TokenTree::Token(token), TokenTree::Token(token2)) => { 478 (TokenTree::Token(token), TokenTree::Token(reparsed_token)) => {
481 token_probably_equal_for_proc_macro(token, token2) 479 token_probably_equal_for_proc_macro(token, reparsed_token)
482 } 480 }
483 (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => { 481 (
484 delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2, sess) 482 TokenTree::Delimited(_, delim, tokens),
483 TokenTree::Delimited(_, reparsed_delim, reparsed_tokens),
484 ) => {
485 delim == reparsed_delim
486 && tokenstream_probably_equal_for_proc_macro(tokens, reparsed_tokens, sess)
485 } 487 }
486 _ => false, 488 _ => false,
487 } 489 }