diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 741feb4ba3dd4..90dd8dde6b3ea 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -1276,8 +1276,8 @@ impl TokenTree { } /// Use this token tree as a matcher to parse given tts. - pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree]) - -> macro_parser::NamedParseResult { + pub fn parse<'a>(cx: &'a base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree]) + -> macro_parser::NamedParseResult<'a> { // `None` is because we're not interpolating let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic, None, diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index d391cd0be7b12..ff867c666c90f 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -330,6 +330,10 @@ impl MultiSpan { &self.primary_spans } + pub fn primary_spans_mut(&mut self) -> &mut [Span] { + &mut self.primary_spans + } + /// Returns the strings to highlight. We always ensure that there /// is an entry for each of the primary spans -- for each primary /// span P, if there is at least one label with span P, we return diff --git a/src/libsyntax/errors/mod.rs b/src/libsyntax/errors/mod.rs index f06672fe111bf..d05d14f0314f2 100644 --- a/src/libsyntax/errors/mod.rs +++ b/src/libsyntax/errors/mod.rs @@ -184,7 +184,7 @@ pub struct DiagnosticBuilder<'a> { level: Level, message: String, code: Option, - span: MultiSpan, + pub span: MultiSpan, children: Vec, } @@ -302,11 +302,6 @@ impl<'a> DiagnosticBuilder<'a> { self } - pub fn set_span>(&mut self, sp: S) -> &mut Self { - self.span = sp.into(); - self - } - pub fn code(&mut self, s: String) -> &mut Self { self.code = Some(s); self @@ -421,7 +416,7 @@ impl Handler { msg: &str) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Warning, msg); - result.set_span(sp); + result.span = sp.into(); if !self.can_emit_warnings { result.cancel(); } @@ -433,7 +428,7 @@ impl Handler { code: &str) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Warning, msg); - result.set_span(sp); + result.span = sp.into(); result.code(code.to_owned()); if !self.can_emit_warnings { result.cancel(); @@ -453,7 +448,7 @@ impl Handler { -> DiagnosticBuilder<'a> { self.bump_err_count(); let mut result = DiagnosticBuilder::new(self, Level::Error, msg); - result.set_span(sp); + result.span = sp.into(); result } pub fn struct_span_err_with_code<'a, S: Into>(&'a self, @@ -463,7 +458,7 @@ impl Handler { -> DiagnosticBuilder<'a> { self.bump_err_count(); let mut result = DiagnosticBuilder::new(self, Level::Error, msg); - result.set_span(sp); + result.span = sp.into(); result.code(code.to_owned()); result } @@ -477,7 +472,7 @@ impl Handler { -> DiagnosticBuilder<'a> { self.bump_err_count(); let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg); - result.set_span(sp); + result.span = sp.into(); result } pub fn struct_span_fatal_with_code<'a, S: Into>(&'a self, @@ -487,7 +482,7 @@ impl Handler { -> DiagnosticBuilder<'a> { self.bump_err_count(); let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg); - result.set_span(sp); + result.span = sp.into(); result.code(code.to_owned()); result } @@ -496,10 +491,10 @@ impl Handler { DiagnosticBuilder::new(self, Level::Fatal, msg) } - pub fn cancel(&mut self, err: &mut DiagnosticBuilder) { + pub fn cancel(&self, err: &mut DiagnosticBuilder) { if err.level == Level::Error || err.level == Level::Fatal { assert!(self.has_errors()); - self.err_count.set(self.err_count.get() + 1); + self.err_count.set(self.err_count.get() - 1); } err.cancel(); } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index ca5eb8f8003bb..c0935334a2e35 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -82,9 +82,9 @@ use ast; use ast::{TokenTree, Name, Ident}; use codemap::{BytePos, mk_sp, Span, Spanned}; use codemap; -use errors::FatalError; +use errors::DiagnosticBuilder; use parse::lexer::*; //resolve bug? -use parse::ParseSess; +use parse::{ParseSess, PResult}; use parse::parser::{PathStyle, Parser}; use parse::token::{DocComment, MatchNt, SubstNt}; use parse::token::{Token, Nonterminal}; @@ -200,8 +200,8 @@ pub enum NamedMatch { MatchedNonterminal(Nonterminal) } -pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) - -> ParseResult>> { +pub fn nameize<'a>(p_s: &'a ParseSess, ms: &[TokenTree], res: &[Rc]) + -> ParseResult<'a, HashMap>> { fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc], ret_val: &mut HashMap>, idx: &mut usize) -> Result<(), (codemap::Span, String)> { @@ -248,16 +248,16 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) Success(ret_val) } -pub enum ParseResult { +pub enum ParseResult<'a, T> { Success(T), /// Arm failed to match - Failure(codemap::Span, String), + Failure(DiagnosticBuilder<'a>), /// Fatal error (malformed macro?). Abort compilation. Error(codemap::Span, String) } -pub type NamedParseResult = ParseResult>>; -pub type PositionalParseResult = ParseResult>>; +pub type NamedParseResult<'a> = ParseResult<'a, HashMap>>; +pub type PositionalParseResult<'a> = ParseResult<'a, Vec>>; /// Perform a token equality check, ignoring syntax context (that is, an /// unhygienic comparison) @@ -270,11 +270,11 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool { } } -pub fn parse(sess: &ParseSess, - cfg: ast::CrateConfig, - mut rdr: TtReader, - ms: &[TokenTree]) - -> NamedParseResult { +pub fn parse<'a>(sess: &'a ParseSess, + cfg: ast::CrateConfig, + mut rdr: TtReader<'a>, + ms: &[TokenTree]) + -> NamedParseResult<'a> { let mut cur_eis = Vec::new(); cur_eis.push(initial_matcher_pos(Rc::new(ms.iter() .cloned() @@ -445,7 +445,9 @@ pub fn parse(sess: &ParseSess, } else if eof_eis.len() > 1 { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { - return Failure(sp, "unexpected end of macro invocation".to_string()); + return Failure(sess.span_diagnostic.struct_span_err( + sp, "unexpected end of macro invocation" + )); } } else { if (!bb_eis.is_empty() && !next_eis.is_empty()) @@ -466,8 +468,10 @@ pub fn parse(sess: &ParseSess, } )) } else if bb_eis.is_empty() && next_eis.is_empty() { - return Failure(sp, format!("no rules expected the token `{}`", - pprust::token_to_string(&tok))); + return Failure(sess.span_diagnostic.struct_span_err( + sp, &format!("no rules expected the token `{}`", + pprust::token_to_string(&tok)) + )); } else if !next_eis.is_empty() { /* Now process the next token */ while !next_eis.is_empty() { @@ -481,8 +485,12 @@ pub fn parse(sess: &ParseSess, match ei.top_elts.get_tt(ei.idx) { TokenTree::Token(span, MatchNt(_, ident)) => { let match_cur = ei.match_cur; - (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( - parse_nt(&mut rust_parser, span, &ident.name.as_str())))); + let nt = match parse_nt(&mut rust_parser, span, + &ident.name.as_str()) { + Ok(nt) => Rc::new(MatchedNonterminal(nt)), + Err(diag) => return Failure(diag) + }; + (&mut ei.matches[match_cur]).push(nt); ei.idx += 1; ei.match_cur += 1; } @@ -500,55 +508,45 @@ pub fn parse(sess: &ParseSess, } } -pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { +pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> PResult<'a, Nonterminal> { match name { "tt" => { p.quote_depth += 1; //but in theory, non-quoted tts might be useful let res: ::parse::PResult<'a, _> = p.parse_token_tree(); - let res = token::NtTT(P(panictry!(res))); + let res = token::NtTT(P(res?)); p.quote_depth -= 1; - return res; + return Ok(res); } _ => {} } // check at the beginning and the parser checks after each bump p.check_unknown_macro_variable(); match name { - "item" => match panictry!(p.parse_item()) { - Some(i) => token::NtItem(i), - None => { - p.fatal("expected an item keyword").emit(); - panic!(FatalError); - } + "item" => match p.parse_item()? { + Some(i) => Ok(token::NtItem(i)), + None => Err(p.fatal("expected an item keyword")) }, - "block" => token::NtBlock(panictry!(p.parse_block())), - "stmt" => match panictry!(p.parse_stmt()) { - Some(s) => token::NtStmt(P(s)), - None => { - p.fatal("expected a statement").emit(); - panic!(FatalError); - } + "block" => Ok(token::NtBlock(p.parse_block()?)), + "stmt" => match p.parse_stmt()? { + Some(s) => Ok(token::NtStmt(P(s))), + None => Err(p.fatal("expected a statement")) }, - "pat" => token::NtPat(panictry!(p.parse_pat())), - "expr" => token::NtExpr(panictry!(p.parse_expr())), - "ty" => token::NtTy(panictry!(p.parse_ty())), + "pat" => Ok(token::NtPat(p.parse_pat()?)), + "expr" => Ok(token::NtExpr(p.parse_expr()?)), + "ty" => Ok(token::NtTy(p.parse_ty()?)), // this could be handled like a token, since it is one "ident" => match p.token { token::Ident(sn) => { p.bump(); - token::NtIdent(Box::new(Spanned::{node: sn, span: p.span})) + Ok(token::NtIdent(Box::new(Spanned::{node: sn, span: p.span}))) } _ => { let token_str = pprust::token_to_string(&p.token); - p.fatal(&format!("expected ident, found {}", - &token_str[..])).emit(); - panic!(FatalError) + Err(p.fatal(&format!("expected ident, found {}", &token_str[..]))) } }, - "path" => { - token::NtPath(Box::new(panictry!(p.parse_path(PathStyle::Type)))) - }, - "meta" => token::NtMeta(panictry!(p.parse_meta_item())), + "path" => Ok(token::NtPath(Box::new(p.parse_path(PathStyle::Type)?))), + "meta" => Ok(token::NtMeta(p.parse_meta_item()?)), // this is not supposed to happen, since it has been checked // when compiling the macro. _ => p.span_bug(sp, "invalid fragment specifier") diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 3522c8863cf52..97aae15f7e250 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -10,6 +10,7 @@ use ast::{self, TokenTree}; use codemap::{Span, DUMMY_SP}; +use errors::FatalError; use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension}; use ext::base::{NormalTT, TTMacroExpander}; use ext::tt::macro_parser::{Success, Error, Failure}; @@ -158,7 +159,7 @@ impl TTMacroExpander for MacroRulesMacroExpander { } /// Given `lhses` and `rhses`, this is the new macro we create -fn generic_extension<'cx>(cx: &'cx ExtCtxt, +fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, sp: Span, name: ast::Ident, imported_from: Option, @@ -174,7 +175,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, // Which arm's failure should we report? (the one furthest along) let mut best_fail_spot = DUMMY_SP; - let mut best_fail_msg = "internal error: ran no matchers".to_string(); + let mut best_fail_diag = None; for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers let lhs_tt = match *lhs { @@ -184,6 +185,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match TokenTree::parse(cx, lhs_tt, arg) { Success(named_matches) => { + best_fail_diag.map(|mut d| cx.parse_sess.span_diagnostic.cancel(&mut d)); + let rhs = match rhses[i] { // ignore delimiters TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(), @@ -214,17 +217,34 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, macro_ident: name }) } - Failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo { - best_fail_spot = sp; - best_fail_msg = (*msg).clone(); - }, + Failure(diag) => { + let sp = diag.span.primary_span(); + let mut new_diag = Some(diag); + if let Some(sp) = sp { + if sp.lo >= best_fail_spot.lo { + best_fail_spot = sp; + ::std::mem::swap(&mut best_fail_diag, &mut new_diag); + } + } + // remove the previous diag if we swapped, of the new one if we didn't. + new_diag.map(|mut diag| cx.parse_sess.span_diagnostic.cancel(&mut diag)); + } Error(err_sp, ref msg) => { cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]) } } } - cx.span_fatal(best_fail_spot.substitute_dummy(sp), &best_fail_msg[..]); + match best_fail_diag { + None => cx.span_bug(sp, "internal error: ran no matchers"), + Some(mut diag) => { + for span in diag.span.primary_spans_mut() { + *span = span.substitute_dummy(sp); + } + diag.emit(); + panic!(FatalError); + } + } } // Note that macro-by-example's input is also matched against a token tree: @@ -279,7 +299,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, arg_reader, &argument_gram) { Success(m) => m, - Failure(sp, str) | Error(sp, str) => { + Failure(mut diag) => { + for span in diag.span.primary_spans_mut() { + *span = span.substitute_dummy(def.span); + } + diag.emit(); + panic!(FatalError); + } + Error(sp, str) => { panic!(cx.parse_sess().span_diagnostic .span_fatal(sp.substitute_dummy(def.span), &str[..])); } diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs index 713a7d1e811a2..aa39cefceb440 100644 --- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs +++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs @@ -56,8 +56,12 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) _ => unreachable!() } } - Failure(_, s) | Error(_, s) => { - panic!("expected Success, but got Error/Failure: {}", s); + Failure(diag) => { + diag.emit(); + panic!("expected Success, but got Failure"); + } + Error(_, s) => { + panic!("expected Success, but got Error: {}", s); } }; diff --git a/src/test/run-pass/issue-27832.rs b/src/test/run-pass/issue-27832.rs new file mode 100644 index 0000000000000..3d8dfd9bae008 --- /dev/null +++ b/src/test/run-pass/issue-27832.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! m { + ( $i:ident ) => (); + ( $t:tt $j:tt ) => (); +} + +fn main() { + m!(c); + m!(t 9); + m!(0 9); +}