diff --git a/src/Cargo.lock b/src/Cargo.lock index 2d42903ad0a7d..dc94ee27e930d 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -883,14 +883,6 @@ name = "proc_macro" version = "0.0.0" dependencies = [ "syntax 0.0.0", -] - -[[package]] -name = "proc_macro_plugin" -version = "0.0.0" -dependencies = [ - "rustc_plugin 0.0.0", - "syntax 0.0.0", "syntax_pos 0.0.0", ] @@ -1203,7 +1195,6 @@ dependencies = [ "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "graphviz 0.0.0", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc_macro_plugin 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_borrowck 0.0.0", diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 69b0c4a2756a6..5a3785b1ed634 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -482,7 +482,9 @@ impl Build { } } - if self.config.extended && compiler.is_final_stage(self) { + if mode == Mode::Libstd && + self.config.extended && + compiler.is_final_stage(self) { cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string()); } diff --git a/src/doc/unstable-book/src/library-features/proc-macro.md b/src/doc/unstable-book/src/library-features/proc-macro.md new file mode 100644 index 0000000000000..19e7f663c7ac3 --- /dev/null +++ b/src/doc/unstable-book/src/library-features/proc-macro.md @@ -0,0 +1,7 @@ +# `proc_macro` + +The tracking issue for this feature is: [#38356] + +[#38356]: https://github.com/rust-lang/rust/issues/38356 + +------------------------ diff --git a/src/libproc_macro/Cargo.toml b/src/libproc_macro/Cargo.toml index 7ce65d0fe4dbc..1b5141773a967 100644 --- a/src/libproc_macro/Cargo.toml +++ b/src/libproc_macro/Cargo.toml @@ -9,3 +9,4 @@ crate-type = ["dylib"] [dependencies] syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index f3d0521a2af6c..06f9634d70613 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -37,18 +37,26 @@ test(no_crate_inject, attr(deny(warnings))), test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))] +#![feature(i128_type)] #![feature(rustc_private)] #![feature(staged_api)] #![feature(lang_items)] +#[macro_use] extern crate syntax; +extern crate syntax_pos; -use std::fmt; +use std::{ascii, fmt, iter}; use std::str::FromStr; +use syntax::ast; use syntax::errors::DiagnosticBuilder; -use syntax::parse; -use syntax::tokenstream::TokenStream as TokenStream_; +use syntax::parse::{self, token, parse_stream_from_source_str}; +use syntax::print::pprust; +use syntax::symbol::Symbol; +use syntax::tokenstream; +use syntax_pos::DUMMY_SP; +use syntax_pos::SyntaxContext; /// The main type provided by this crate, representing an abstract stream of /// tokens. @@ -60,17 +68,508 @@ use syntax::tokenstream::TokenStream as TokenStream_; /// The API of this type is intentionally bare-bones, but it'll be expanded over /// time! #[stable(feature = "proc_macro_lib", since = "1.15.0")] -pub struct TokenStream { - inner: TokenStream_, -} +#[derive(Clone, Debug)] +pub struct TokenStream(tokenstream::TokenStream); /// Error returned from `TokenStream::from_str`. -#[derive(Debug)] #[stable(feature = "proc_macro_lib", since = "1.15.0")] +#[derive(Debug)] pub struct LexError { _inner: (), } +#[stable(feature = "proc_macro_lib", since = "1.15.0")] +impl FromStr for TokenStream { + type Err = LexError; + + fn from_str(src: &str) -> Result { + __internal::with_sess(|(sess, mark)| { + let src = src.to_string(); + let name = "".to_string(); + let call_site = mark.expn_info().unwrap().call_site; + let stream = parse::parse_stream_from_source_str(name, src, sess, Some(call_site)); + Ok(__internal::token_stream_wrap(stream)) + }) + } +} + +#[stable(feature = "proc_macro_lib", since = "1.15.0")] +impl fmt::Display for TokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.0.fmt(f) + } +} + +/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input. +/// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs +/// constructs the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`. +/// +/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term. +/// To quote `$` itself, use `$$`. +#[unstable(feature = "proc_macro", issue = "38356")] +#[macro_export] +macro_rules! quote { () => {} } + +#[unstable(feature = "proc_macro_internals", issue = "27812")] +#[doc(hidden)] +mod quote; + +#[unstable(feature = "proc_macro", issue = "38356")] +impl From for TokenStream { + fn from(tree: TokenTree) -> TokenStream { + TokenStream(tree.to_internal()) + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl From for TokenStream { + fn from(kind: TokenNode) -> TokenStream { + TokenTree::from(kind).into() + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl> iter::FromIterator for TokenStream { + fn from_iter>(streams: I) -> Self { + let mut builder = tokenstream::TokenStreamBuilder::new(); + for stream in streams { + builder.push(stream.into().0); + } + TokenStream(builder.build()) + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl IntoIterator for TokenStream { + type Item = TokenTree; + type IntoIter = TokenTreeIter; + + fn into_iter(self) -> TokenTreeIter { + TokenTreeIter { cursor: self.0.trees(), next: None } + } +} + +impl TokenStream { + /// Returns an empty `TokenStream`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn empty() -> TokenStream { + TokenStream(tokenstream::TokenStream::empty()) + } + + /// Checks if this `TokenStream` is empty. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +/// A region of source code, along with macro expansion information. +#[unstable(feature = "proc_macro", issue = "38356")] +#[derive(Copy, Clone, Debug)] +pub struct Span(syntax_pos::Span); + +#[unstable(feature = "proc_macro", issue = "38356")] +impl Default for Span { + fn default() -> Span { + ::__internal::with_sess(|(_, mark)| Span(syntax_pos::Span { + ctxt: SyntaxContext::empty().apply_mark(mark), + ..mark.expn_info().unwrap().call_site + })) + } +} + +/// Quote a `Span` into a `TokenStream`. +/// This is needed to implement a custom quoter. +#[unstable(feature = "proc_macro", issue = "38356")] +pub fn quote_span(span: Span) -> TokenStream { + TokenStream(quote::Quote::quote(&span.0)) +} + +impl Span { + /// The span of the invocation of the current procedural macro. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn call_site() -> Span { + ::__internal::with_sess(|(_, mark)| Span(mark.expn_info().unwrap().call_site)) + } +} + +/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`). +#[unstable(feature = "proc_macro", issue = "38356")] +#[derive(Clone, Debug)] +pub struct TokenTree { + /// The `TokenTree`'s span + pub span: Span, + /// Description of the `TokenTree` + pub kind: TokenNode, +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl From for TokenTree { + fn from(kind: TokenNode) -> TokenTree { + TokenTree { span: Span::default(), kind: kind } + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl fmt::Display for TokenTree { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + TokenStream::from(self.clone()).fmt(f) + } +} + +/// Description of a `TokenTree` +#[derive(Clone, Debug)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub enum TokenNode { + /// A delimited tokenstream. + Group(Delimiter, TokenStream), + /// A unicode identifier. + Term(Term), + /// A punctuation character (`+`, `,`, `$`, etc.). + Op(char, Spacing), + /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`). + Literal(Literal), +} + +/// Describes how a sequence of token trees is delimited. +#[derive(Copy, Clone, Debug)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub enum Delimiter { + /// `( ... )` + Parenthesis, + /// `[ ... ]` + Brace, + /// `{ ... }` + Bracket, + /// An implicit delimiter, e.g. `$var`, where $var is `...`. + None, +} + +/// An interned string. +#[derive(Copy, Clone, Debug)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub struct Term(Symbol); + +impl Term { + /// Intern a string into a `Term`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn intern(string: &str) -> Term { + Term(Symbol::intern(string)) + } + + /// Get a reference to the interned string. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn as_str(&self) -> &str { + unsafe { &*(&*self.0.as_str() as *const str) } + } +} + +/// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace. +#[derive(Copy, Clone, Debug)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub enum Spacing { + /// e.g. `+` is `Alone` in `+ =`. + Alone, + /// e.g. `+` is `Joint` in `+=`. + Joint, +} + +/// A literal character (`'a'`), string (`"hello"`), or number (`2.3`). +#[derive(Clone, Debug)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub struct Literal(token::Token); + +#[unstable(feature = "proc_macro", issue = "38356")] +impl fmt::Display for Literal { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + TokenTree { kind: TokenNode::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f) + } +} + +macro_rules! int_literals { + ($($int_kind:ident),*) => {$( + /// Integer literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn $int_kind(n: $int_kind) -> Literal { + Literal::typed_integer(n as i128, stringify!($int_kind)) + } + )*} +} + +impl Literal { + /// Integer literal + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn integer(n: i128) -> Literal { + Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), None)) + } + + int_literals!(u8, i8, u16, i16, u32, i32, u64, i64); + fn typed_integer(n: i128, kind: &'static str) -> Literal { + Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), + Some(Symbol::intern(kind)))) + } + + /// Floating point literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn float(n: f64) -> Literal { + if !n.is_finite() { + panic!("Invalid float literal {}", n); + } + Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), None)) + } + + /// Floating point literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn f32(n: f32) -> Literal { + if !n.is_finite() { + panic!("Invalid f32 literal {}", n); + } + Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), + Some(Symbol::intern("f32")))) + } + + /// Floating point literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn f64(n: f64) -> Literal { + if !n.is_finite() { + panic!("Invalid f64 literal {}", n); + } + Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), + Some(Symbol::intern("f64")))) + } + + /// String literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn string(string: &str) -> Literal { + let mut escaped = String::new(); + for ch in string.chars() { + escaped.extend(ch.escape_unicode()); + } + Literal(token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None)) + } + + /// Character literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn character(ch: char) -> Literal { + let mut escaped = String::new(); + escaped.extend(ch.escape_unicode()); + Literal(token::Literal(token::Lit::Char(Symbol::intern(&escaped)), None)) + } + + /// Byte string literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn byte_string(bytes: &[u8]) -> Literal { + let string = bytes.iter().cloned().flat_map(ascii::escape_default) + .map(Into::::into).collect::(); + Literal(token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None)) + } +} + +/// An iterator over `TokenTree`s. +#[unstable(feature = "proc_macro", issue = "38356")] +pub struct TokenTreeIter { + cursor: tokenstream::Cursor, + next: Option, +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl Iterator for TokenTreeIter { + type Item = TokenTree; + + fn next(&mut self) -> Option { + loop { + let next = + unwrap_or!(self.next.take().or_else(|| self.cursor.next_as_stream()), return None); + let tree = TokenTree::from_internal(next, &mut self.next); + if tree.span.0 == DUMMY_SP { + if let TokenNode::Group(Delimiter::None, stream) = tree.kind { + self.cursor.insert(stream.0); + continue + } + } + return Some(tree); + } + } +} + +impl Delimiter { + fn from_internal(delim: token::DelimToken) -> Delimiter { + match delim { + token::Paren => Delimiter::Parenthesis, + token::Brace => Delimiter::Brace, + token::Bracket => Delimiter::Bracket, + token::NoDelim => Delimiter::None, + } + } + + fn to_internal(self) -> token::DelimToken { + match self { + Delimiter::Parenthesis => token::Paren, + Delimiter::Brace => token::Brace, + Delimiter::Bracket => token::Bracket, + Delimiter::None => token::NoDelim, + } + } +} + +impl TokenTree { + fn from_internal(stream: tokenstream::TokenStream, next: &mut Option) + -> TokenTree { + use syntax::parse::token::*; + + let (tree, is_joint) = stream.as_tree(); + let (mut span, token) = match tree { + tokenstream::TokenTree::Token(span, token) => (span, token), + tokenstream::TokenTree::Delimited(span, delimed) => { + let delimiter = Delimiter::from_internal(delimed.delim); + return TokenTree { + span: Span(span), + kind: TokenNode::Group(delimiter, TokenStream(delimed.tts.into())), + }; + } + }; + + let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone }; + macro_rules! op { + ($op:expr) => { TokenNode::Op($op, op_kind) } + } + + macro_rules! joint { + ($first:expr, $rest:expr) => { joint($first, $rest, is_joint, &mut span, next) } + } + + fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span, + next: &mut Option) + -> TokenNode { + let (first_span, rest_span) = (*span, *span); + *span = first_span; + let tree = tokenstream::TokenTree::Token(rest_span, rest); + *next = Some(if is_joint { tree.joint() } else { tree.into() }); + TokenNode::Op(first, Spacing::Joint) + } + + let kind = match token { + Eq => op!('='), + Lt => op!('<'), + Le => joint!('<', Eq), + EqEq => joint!('=', Eq), + Ne => joint!('!', Eq), + Ge => joint!('>', Eq), + Gt => op!('>'), + AndAnd => joint!('&', BinOp(And)), + OrOr => joint!('|', BinOp(Or)), + Not => op!('!'), + Tilde => op!('~'), + BinOp(Plus) => op!('+'), + BinOp(Minus) => op!('-'), + BinOp(Star) => op!('*'), + BinOp(Slash) => op!('/'), + BinOp(Percent) => op!('%'), + BinOp(Caret) => op!('^'), + BinOp(And) => op!('&'), + BinOp(Or) => op!('|'), + BinOp(Shl) => joint!('<', Lt), + BinOp(Shr) => joint!('>', Gt), + BinOpEq(Plus) => joint!('+', Eq), + BinOpEq(Minus) => joint!('-', Eq), + BinOpEq(Star) => joint!('*', Eq), + BinOpEq(Slash) => joint!('/', Eq), + BinOpEq(Percent) => joint!('%', Eq), + BinOpEq(Caret) => joint!('^', Eq), + BinOpEq(And) => joint!('&', Eq), + BinOpEq(Or) => joint!('|', Eq), + BinOpEq(Shl) => joint!('<', Le), + BinOpEq(Shr) => joint!('>', Ge), + At => op!('@'), + Dot => op!('.'), + DotDot => joint!('.', Dot), + DotDotDot => joint!('.', DotDot), + Comma => op!(','), + Semi => op!(';'), + Colon => op!(':'), + ModSep => joint!(':', Colon), + RArrow => joint!('-', Gt), + LArrow => joint!('<', BinOp(Minus)), + FatArrow => joint!('=', Gt), + Pound => op!('#'), + Dollar => op!('$'), + Question => op!('?'), + Underscore => op!('_'), + + Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)), + Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)), + + Interpolated(ref nt) => __internal::with_sess(|(sess, _)| { + TokenNode::Group(Delimiter::None, TokenStream(nt.1.force(|| { + // FIXME(jseyfried): Avoid this pretty-print + reparse hack + let name = "".to_owned(); + let source = pprust::token_to_string(&token); + parse_stream_from_source_str(name, source, sess, Some(span)) + }))) + }), + + OpenDelim(..) | CloseDelim(..) => unreachable!(), + Whitespace | Comment | Shebang(..) | Eof => unreachable!(), + }; + + TokenTree { span: Span(span), kind: kind } + } + + fn to_internal(self) -> tokenstream::TokenStream { + use syntax::parse::token::*; + use syntax::tokenstream::{TokenTree, Delimited}; + + let (op, kind) = match self.kind { + TokenNode::Op(op, kind) => (op, kind), + TokenNode::Group(delimiter, tokens) => { + return TokenTree::Delimited(self.span.0, Delimited { + delim: delimiter.to_internal(), + tts: tokens.0.into(), + }).into(); + }, + TokenNode::Term(symbol) => { + let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt }; + let token = + if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) }; + return TokenTree::Token(self.span.0, token).into(); + } + TokenNode::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(), + }; + + let token = match op { + '=' => Eq, + '<' => Lt, + '>' => Gt, + '!' => Not, + '~' => Tilde, + '+' => BinOp(Plus), + '-' => BinOp(Minus), + '*' => BinOp(Star), + '/' => BinOp(Slash), + '%' => BinOp(Percent), + '^' => BinOp(Caret), + '&' => BinOp(And), + '|' => BinOp(Or), + '@' => At, + '.' => Dot, + ',' => Comma, + ';' => Semi, + ':' => Colon, + '#' => Pound, + '$' => Dollar, + '?' => Question, + '_' => Underscore, + _ => panic!("unsupported character {}", op), + }; + + let tree = TokenTree::Token(self.span.0, token); + match kind { + Spacing::Alone => tree.into(), + Spacing::Joint => tree.joint(), + } + } +} + /// Permanently unstable internal implementation details of this crate. This /// should not be used. /// @@ -83,32 +582,33 @@ pub struct LexError { #[unstable(feature = "proc_macro_internals", issue = "27812")] #[doc(hidden)] pub mod __internal { + pub use quote::{Quoter, __rt}; + use std::cell::Cell; - use std::rc::Rc; use syntax::ast; + use syntax::ext::base::ExtCtxt; + use syntax::ext::hygiene::Mark; use syntax::ptr::P; - use syntax::parse::{self, token, ParseSess}; - use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_}; + use syntax::parse::{self, ParseSess}; + use syntax::parse::token::{self, Token}; + use syntax::tokenstream; + use syntax_pos::DUMMY_SP; use super::{TokenStream, LexError}; pub fn new_token_stream(item: P) -> TokenStream { - TokenStream { - inner: TokenTree::Token(item.span, token::Interpolated(Rc::new(token::NtItem(item)))) - .into() - } + let token = Token::interpolated(token::NtItem(item)); + TokenStream(tokenstream::TokenTree::Token(DUMMY_SP, token).into()) } - pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream { - TokenStream { - inner: inner - } + pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream { + TokenStream(inner) } pub fn token_stream_parse_items(stream: TokenStream) -> Result>, LexError> { - with_parse_sess(move |sess| { - let mut parser = parse::stream_to_parser(sess, stream.inner); + with_sess(move |(sess, _)| { + let mut parser = parse::stream_to_parser(sess, stream.0); let mut items = Vec::new(); while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) { @@ -119,8 +619,8 @@ pub mod __internal { }) } - pub fn token_stream_inner(stream: TokenStream) -> TokenStream_ { - stream.inner + pub fn token_stream_inner(stream: TokenStream) -> tokenstream::TokenStream { + stream.0 } pub trait Registry { @@ -140,13 +640,14 @@ pub mod __internal { // Emulate scoped_thread_local!() here essentially thread_local! { - static CURRENT_SESS: Cell<*const ParseSess> = Cell::new(0 as *const _); + static CURRENT_SESS: Cell<(*const ParseSess, Mark)> = + Cell::new((0 as *const _, Mark::root())); } - pub fn set_parse_sess(sess: &ParseSess, f: F) -> R + pub fn set_sess(cx: &ExtCtxt, f: F) -> R where F: FnOnce() -> R { - struct Reset { prev: *const ParseSess } + struct Reset { prev: (*const ParseSess, Mark) } impl Drop for Reset { fn drop(&mut self) { @@ -156,18 +657,18 @@ pub mod __internal { CURRENT_SESS.with(|p| { let _reset = Reset { prev: p.get() }; - p.set(sess); + p.set((cx.parse_sess, cx.current_expansion.mark)); f() }) } - pub fn with_parse_sess(f: F) -> R - where F: FnOnce(&ParseSess) -> R + pub fn with_sess(f: F) -> R + where F: FnOnce((&ParseSess, Mark)) -> R { let p = CURRENT_SESS.with(|p| p.get()); - assert!(!p.is_null(), "proc_macro::__internal::with_parse_sess() called \ - before set_parse_sess()!"); - f(unsafe { &*p }) + assert!(!p.0.is_null(), "proc_macro::__internal::with_sess() called \ + before set_parse_sess()!"); + f(unsafe { (&*p.0, p.1) }) } } @@ -175,24 +676,3 @@ fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError { err.cancel(); LexError { _inner: () } } - -#[stable(feature = "proc_macro_lib", since = "1.15.0")] -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - __internal::with_parse_sess(|sess| { - let src = src.to_string(); - let name = "".to_string(); - let stream = parse::parse_stream_from_source_str(name, src, sess); - Ok(__internal::token_stream_wrap(stream)) - }) - } -} - -#[stable(feature = "proc_macro_lib", since = "1.15.0")] -impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.inner.fmt(f) - } -} diff --git a/src/libproc_macro/quote.rs b/src/libproc_macro/quote.rs new file mode 100644 index 0000000000000..bee2c1e0eb6b6 --- /dev/null +++ b/src/libproc_macro/quote.rs @@ -0,0 +1,263 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! # Quasiquoter +//! This file contains the implementation internals of the quasiquoter provided by `quote!`. + +//! This quasiquoter uses macros 2.0 hygiene to reliably use items from `__rt`, +//! including re-exported API `libsyntax`, to build a `syntax::tokenstream::TokenStream` +//! and wrap it into a `proc_macro::TokenStream`. + +use syntax::ast::Ident; +use syntax::ext::base::{ExtCtxt, ProcMacro}; +use syntax::parse::token::{self, Token, Lit}; +use syntax::symbol::Symbol; +use syntax::tokenstream::{Delimited, TokenTree, TokenStream, TokenStreamBuilder}; +use syntax_pos::{DUMMY_SP, Span}; +use syntax_pos::hygiene::SyntaxContext; + +pub struct Quoter; + +pub mod __rt { + pub use syntax::ast::Ident; + pub use syntax::parse::token; + pub use syntax::symbol::Symbol; + pub use syntax::tokenstream::{TokenStream, TokenStreamBuilder, TokenTree, Delimited}; + pub use super::{ctxt, span}; + + pub fn unquote + Clone>(tokens: &T) -> TokenStream { + T::into(tokens.clone()).0 + } +} + +pub fn ctxt() -> SyntaxContext { + ::__internal::with_sess(|(_, mark)| SyntaxContext::empty().apply_mark(mark)) +} + +pub fn span() -> Span { + ::Span::default().0 +} + +pub trait Quote { + fn quote(&self) -> TokenStream; +} + +macro_rules! quote_tok { + (,) => { Token::Comma }; + (.) => { Token::Dot }; + (:) => { Token::Colon }; + (::) => { Token::ModSep }; + (!) => { Token::Not }; + (<) => { Token::Lt }; + (>) => { Token::Gt }; + (_) => { Token::Underscore }; + (0) => { Token::Literal(token::Lit::Integer(Symbol::intern("0")), None) }; + (&) => { Token::BinOp(token::And) }; + ($i:ident) => { Token::Ident(Ident { name: Symbol::intern(stringify!($i)), ctxt: ctxt() }) }; +} + +macro_rules! quote_tree { + ((unquote $($t:tt)*)) => { TokenStream::from($($t)*) }; + ((quote $($t:tt)*)) => { ($($t)*).quote() }; + (($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) }; + ([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) }; + ({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) }; + (rt) => { quote!(::__internal::__rt) }; + ($t:tt) => { TokenStream::from(TokenTree::Token(span(), quote_tok!($t))) }; +} + +fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream { + TokenTree::Delimited(span(), Delimited { delim: delim, tts: stream.into() }).into() +} + +macro_rules! quote { + () => { TokenStream::empty() }; + ($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::() }; +} + +impl ProcMacro for Quoter { + fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, _: Span, stream: TokenStream) -> TokenStream { + let mut info = cx.current_expansion.mark.expn_info().unwrap(); + info.callee.allow_internal_unstable = true; + cx.current_expansion.mark.set_expn_info(info); + ::__internal::set_sess(cx, || quote!(::TokenStream((quote stream)))) + } +} + +impl Quote for Option { + fn quote(&self) -> TokenStream { + match *self { + Some(ref t) => quote!(Some((quote t))), + None => quote!(None), + } + } +} + +impl Quote for TokenStream { + fn quote(&self) -> TokenStream { + let mut builder = TokenStreamBuilder::new(); + builder.push(quote!(rt::TokenStreamBuilder::new())); + + let mut trees = self.trees(); + loop { + let (mut tree, mut is_joint) = match trees.next_as_stream() { + Some(next) => next.as_tree(), + None => return builder.add(quote!(.build())).build(), + }; + if let TokenTree::Token(_, Token::Dollar) = tree { + let (next_tree, next_is_joint) = match trees.next_as_stream() { + Some(next) => next.as_tree(), + None => panic!("unexpected trailing `$` in `quote!`"), + }; + match next_tree { + TokenTree::Token(_, Token::Ident(..)) => { + builder.push(quote!(.add(rt::unquote(&(unquote next_tree))))); + continue + } + TokenTree::Token(_, Token::Dollar) => { + tree = next_tree; + is_joint = next_is_joint; + } + _ => panic!("`$` must be followed by an ident or `$` in `quote!`"), + } + } + + builder.push(match is_joint { + true => quote!(.add((quote tree).joint())), + false => quote!(.add(rt::TokenStream::from((quote tree)))), + }); + } + } +} + +impl Quote for TokenTree { + fn quote(&self) -> TokenStream { + match *self { + TokenTree::Token(span, ref token) => quote! { + rt::TokenTree::Token((quote span), (quote token)) + }, + TokenTree::Delimited(span, ref delimited) => quote! { + rt::TokenTree::Delimited((quote span), (quote delimited)) + }, + } + } +} + +impl Quote for Delimited { + fn quote(&self) -> TokenStream { + quote!(rt::Delimited { delim: (quote self.delim), tts: (quote self.stream()).into() }) + } +} + +impl<'a> Quote for &'a str { + fn quote(&self) -> TokenStream { + TokenTree::Token(span(), Token::Literal(token::Lit::Str_(Symbol::intern(self)), None)) + .into() + } +} + +impl Quote for usize { + fn quote(&self) -> TokenStream { + let integer_symbol = Symbol::intern(&self.to_string()); + TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None)) + .into() + } +} + +impl Quote for Ident { + fn quote(&self) -> TokenStream { + quote!(rt::Ident { name: (quote self.name), ctxt: rt::ctxt() }) + } +} + +impl Quote for Symbol { + fn quote(&self) -> TokenStream { + quote!(rt::Symbol::intern((quote &*self.as_str()))) + } +} + +impl Quote for Span { + fn quote(&self) -> TokenStream { + quote!(rt::span()) + } +} + +impl Quote for Token { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*; $($t:tt)*) => { + match *self { + $( Token::$i => quote!(rt::token::$i), )* + $( $t )* + } + } + } + + gen_match! { + Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot, + Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question, + Underscore; + + Token::OpenDelim(delim) => quote!(rt::token::OpenDelim((quote delim))), + Token::CloseDelim(delim) => quote!(rt::token::CloseDelim((quote delim))), + Token::BinOp(tok) => quote!(rt::token::BinOp((quote tok))), + Token::BinOpEq(tok) => quote!(rt::token::BinOpEq((quote tok))), + Token::Ident(ident) => quote!(rt::token::Ident((quote ident))), + Token::Lifetime(ident) => quote!(rt::token::Lifetime((quote ident))), + Token::Literal(lit, sfx) => quote!(rt::token::Literal((quote lit), (quote sfx))), + _ => panic!("Unhandled case!"), + } + } +} + +impl Quote for token::BinOpToken { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*) => { + match *self { + $( token::BinOpToken::$i => quote!(rt::token::BinOpToken::$i), )* + } + } + } + + gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr) + } +} + +impl Quote for Lit { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*; $($raw:ident),*) => { + match *self { + $( Lit::$i(lit) => quote!(rt::token::Lit::$i((quote lit))), )* + $( Lit::$raw(lit, n) => { + quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n))) + })* + } + } + } + + gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw) + } +} + +impl Quote for token::DelimToken { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*) => { + match *self { + $(token::DelimToken::$i => { quote!(rt::token::DelimToken::$i) })* + } + } + } + + gen_match!(Paren, Bracket, Brace, NoDelim) + } +} diff --git a/src/libproc_macro_plugin/Cargo.toml b/src/libproc_macro_plugin/Cargo.toml deleted file mode 100644 index 146a66cdf01cb..0000000000000 --- a/src/libproc_macro_plugin/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -authors = ["The Rust Project Developers"] -name = "proc_macro_plugin" -version = "0.0.0" - -[lib] -path = "lib.rs" -crate-type = ["dylib"] - -[dependencies] -rustc_plugin = { path = "../librustc_plugin" } -syntax = { path = "../libsyntax" } -syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/libproc_macro_plugin/lib.rs b/src/libproc_macro_plugin/lib.rs deleted file mode 100644 index d1bc0966eb567..0000000000000 --- a/src/libproc_macro_plugin/lib.rs +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! # Proc_Macro -//! -//! A library for procedural macro writers. -//! -//! ## Usage -//! This crate provides the `quote!` macro for syntax creation. -//! -//! The `quote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;` -//! at the crate root. This is a temporary solution until we have better hygiene. -//! -//! ## Quasiquotation -//! -//! The quasiquoter creates output that, when run, constructs the tokenstream specified as -//! input. For example, `quote!(5 + 5)` will produce a program, that, when run, will -//! construct the TokenStream `5 | + | 5`. -//! -//! ### Unquoting -//! -//! Unquoting is done with `$`, and works by taking the single next ident as the unquoted term. -//! To quote `$` itself, use `$$`. -//! -//! A simple example is: -//! -//!``` -//!fn double(tmp: TokenStream) -> TokenStream { -//! quote!($tmp * 2) -//!} -//!``` -//! -//! ### Large example: Scheme's `cond` -//! -//! Below is an example implementation of Scheme's `cond`. -//! -//! ``` -//! fn cond(input: TokenStream) -> TokenStream { -//! let mut conds = Vec::new(); -//! let mut input = input.trees().peekable(); -//! while let Some(tree) = input.next() { -//! let mut cond = match tree { -//! TokenTree::Delimited(_, ref delimited) => delimited.stream(), -//! _ => panic!("Invalid input"), -//! }; -//! let mut trees = cond.trees(); -//! let test = trees.next(); -//! let rhs = trees.collect::(); -//! if rhs.is_empty() { -//! panic!("Invalid macro usage in cond: {}", cond); -//! } -//! let is_else = match test { -//! Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true, -//! _ => false, -//! }; -//! conds.push(if is_else || input.peek().is_none() { -//! quote!({ $rhs }) -//! } else { -//! let test = test.unwrap(); -//! quote!(if $test { $rhs } else) -//! }); -//! } -//! -//! conds.into_iter().collect() -//! } -//! ``` -#![crate_name = "proc_macro_plugin"] -#![feature(plugin_registrar)] -#![crate_type = "dylib"] -#![crate_type = "rlib"] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] -#![deny(warnings)] - -#![feature(rustc_diagnostic_macros)] - -extern crate rustc_plugin; -extern crate syntax; -extern crate syntax_pos; - -mod quote; -use quote::quote; - -use rustc_plugin::Registry; -use syntax::ext::base::SyntaxExtension; -use syntax::symbol::Symbol; - -// ____________________________________________________________________________________________ -// Main macro definition - -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("quote"), - SyntaxExtension::ProcMacro(Box::new(quote))); -} diff --git a/src/libproc_macro_plugin/quote.rs b/src/libproc_macro_plugin/quote.rs deleted file mode 100644 index 09675564291a2..0000000000000 --- a/src/libproc_macro_plugin/quote.rs +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! # Quasiquoter -//! This file contains the implementation internals of the quasiquoter provided by `qquote!`. - -use syntax::ast::Ident; -use syntax::parse::token::{self, Token, Lit}; -use syntax::symbol::Symbol; -use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream}; -use syntax_pos::DUMMY_SP; - -use std::iter; - -pub fn quote<'cx>(stream: TokenStream) -> TokenStream { - stream.quote() -} - -trait Quote { - fn quote(&self) -> TokenStream; -} - -macro_rules! quote_tok { - (,) => { Token::Comma }; - (.) => { Token::Dot }; - (:) => { Token::Colon }; - (::) => { Token::ModSep }; - (!) => { Token::Not }; - (<) => { Token::Lt }; - (>) => { Token::Gt }; - (_) => { Token::Underscore }; - ($i:ident) => { Token::Ident(Ident::from_str(stringify!($i))) }; -} - -macro_rules! quote_tree { - ((unquote $($t:tt)*)) => { $($t)* }; - ((quote $($t:tt)*)) => { ($($t)*).quote() }; - (($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) }; - ([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) }; - ({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) }; - ($t:tt) => { TokenStream::from(TokenTree::Token(DUMMY_SP, quote_tok!($t))) }; -} - -fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream { - TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into() -} - -macro_rules! quote { - () => { TokenStream::empty() }; - ($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::() }; -} - -impl Quote for Option { - fn quote(&self) -> TokenStream { - match *self { - Some(ref t) => quote!(::std::option::Option::Some((quote t))), - None => quote!(::std::option::Option::None), - } - } -} - -impl Quote for TokenStream { - fn quote(&self) -> TokenStream { - if self.is_empty() { - return quote!(::syntax::tokenstream::TokenStream::empty()); - } - - struct Quoter(iter::Peekable); - - impl Iterator for Quoter { - type Item = TokenStream; - - fn next(&mut self) -> Option { - let quoted_tree = if let Some(&TokenTree::Token(_, Token::Dollar)) = self.0.peek() { - self.0.next(); - match self.0.next() { - Some(tree @ TokenTree::Token(_, Token::Ident(..))) => Some(tree.into()), - Some(tree @ TokenTree::Token(_, Token::Dollar)) => Some(tree.quote()), - // FIXME(jseyfried): improve these diagnostics - Some(..) => panic!("`$` must be followed by an ident or `$` in `quote!`"), - None => panic!("unexpected trailing `$` in `quote!`"), - } - } else { - self.0.next().as_ref().map(Quote::quote) - }; - - quoted_tree.map(|quoted_tree| { - quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),) - }) - } - } - - let quoted = Quoter(self.trees().peekable()).collect::(); - quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>()) - } -} - -impl Quote for TokenTree { - fn quote(&self) -> TokenStream { - match *self { - TokenTree::Token(_, ref token) => quote! { - ::syntax::tokenstream::TokenTree::Token(::syntax::ext::quote::rt::DUMMY_SP, - (quote token)) - }, - TokenTree::Delimited(_, ref delimited) => quote! { - ::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP, - (quote delimited)) - }, - } - } -} - -impl Quote for Delimited { - fn quote(&self) -> TokenStream { - quote!(::syntax::tokenstream::Delimited { - delim: (quote self.delim), - tts: (quote self.stream()).into(), - }) - } -} - -impl<'a> Quote for &'a str { - fn quote(&self) -> TokenStream { - TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Str_(Symbol::intern(self)), None)) - .into() - } -} - -impl Quote for usize { - fn quote(&self) -> TokenStream { - let integer_symbol = Symbol::intern(&self.to_string()); - TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None)) - .into() - } -} - -impl Quote for Ident { - fn quote(&self) -> TokenStream { - // FIXME(jseyfried) quote hygiene - quote!(::syntax::ast::Ident::from_str((quote &*self.name.as_str()))) - } -} - -impl Quote for Symbol { - fn quote(&self) -> TokenStream { - quote!(::syntax::symbol::Symbol::intern((quote &*self.as_str()))) - } -} - -impl Quote for Token { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*; $($t:tt)*) => { - match *self { - $( Token::$i => quote!(::syntax::parse::token::$i), )* - $( $t )* - } - } - } - - gen_match! { - Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot, - Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question, - Underscore; - - Token::OpenDelim(delim) => quote!(::syntax::parse::token::OpenDelim((quote delim))), - Token::CloseDelim(delim) => quote!(::syntax::parse::token::CloseDelim((quote delim))), - Token::BinOp(tok) => quote!(::syntax::parse::token::BinOp((quote tok))), - Token::BinOpEq(tok) => quote!(::syntax::parse::token::BinOpEq((quote tok))), - Token::Ident(ident) => quote!(::syntax::parse::token::Ident((quote ident))), - Token::Lifetime(ident) => quote!(::syntax::parse::token::Lifetime((quote ident))), - Token::Literal(lit, sfx) => quote! { - ::syntax::parse::token::Literal((quote lit), (quote sfx)) - }, - _ => panic!("Unhandled case!"), - } - } -} - -impl Quote for token::BinOpToken { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*) => { - match *self { - $( token::BinOpToken::$i => quote!(::syntax::parse::token::BinOpToken::$i), )* - } - } - } - - gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr) - } -} - -impl Quote for Lit { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*; $($raw:ident),*) => { - match *self { - $( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )* - $( Lit::$raw(lit, n) => { - quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n))) - })* - } - } - } - - gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw) - } -} - -impl Quote for token::DelimToken { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*) => { - match *self { - $(token::DelimToken::$i => { quote!(::syntax::parse::token::DelimToken::$i) })* - } - } - } - - gen_match!(Paren, Bracket, Brace, NoDelim) - } -} diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index b9cc3b5fb937f..b827284271ed2 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -283,8 +283,7 @@ fn hash_token<'a, 'gcx, 'tcx, W: StableHasherResult>(token: &token::Token, } token::Token::Ident(ident) | - token::Token::Lifetime(ident) | - token::Token::SubstNt(ident) => ident.name.hash_stable(hcx, hasher), + token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher), token::Token::Interpolated(ref non_terminal) => { // FIXME(mw): This could be implemented properly. It's just a diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index e27990c29cf9e..e6dc5da969a88 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -728,6 +728,7 @@ pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let ref declared_lib_features = sess.features.borrow().declared_lib_features; let mut remaining_lib_features: FxHashMap = declared_lib_features.clone().into_iter().collect(); + remaining_lib_features.remove(&Symbol::intern("proc_macro")); fn format_stable_since_msg(version: &str) -> String { format!("this feature has been stable since {}. Attribute no longer needed", version) diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index 2e949f48c175e..0b950787e3b91 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -13,7 +13,6 @@ arena = { path = "../libarena" } graphviz = { path = "../libgraphviz" } log = { version = "0.3", features = ["release_max_level_info"] } env_logger = { version = "0.4", default-features = false } -proc_macro_plugin = { path = "../libproc_macro_plugin" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_borrowck = { path = "../librustc_borrowck" } diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 502eab44dac52..54138e2e3b028 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -33,6 +33,7 @@ use std::rc::Rc; use syntax::ast; use syntax::attr; +use syntax::ext::base::SyntaxExtension; use syntax::parse::filemap_to_stream; use syntax::symbol::Symbol; use syntax_pos::{Span, NO_EXPANSION}; @@ -365,6 +366,10 @@ impl CrateStore for cstore::CStore { let data = self.get_crate_data(id.krate); if let Some(ref proc_macros) = data.proc_macros { return LoadedMacro::ProcMacro(proc_macros[id.index.as_usize() - 1].1.clone()); + } else if data.name == "proc_macro" && + self.get_crate_data(id.krate).item_name(id.index) == "quote" { + let ext = SyntaxExtension::ProcMacro(Box::new(::proc_macro::__internal::Quoter)); + return LoadedMacro::ProcMacro(Rc::new(ext)); } let (name, def) = data.get_macro(id.index); @@ -372,7 +377,7 @@ impl CrateStore for cstore::CStore { let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body); let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION }; - let body = filemap_to_stream(&sess.parse_sess, filemap); + let body = filemap_to_stream(&sess.parse_sess, filemap, None); // Mark the attrs as used let attrs = data.get_item_attrs(id.index, &self.dep_graph); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 3e6d06ec86ff8..ad3a9dd9fefaf 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -472,7 +472,7 @@ impl<'a, 'tcx> CrateMetadata { } } - fn item_name(&self, item_index: DefIndex) -> ast::Name { + pub fn item_name(&self, item_index: DefIndex) -> ast::Name { self.def_key(item_index) .disambiguated_data .data diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index e9701b95002d4..f74aac255a039 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -1129,6 +1129,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { /// Serialize the text of exported macros fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> { use syntax::print::pprust; + let def_id = self.tcx.hir.local_def_id(macro_def.id); Entry { kind: EntryKind::MacroDef(self.lazy(&MacroDef { body: pprust::tts_to_string(¯o_def.body.trees().collect::>()), @@ -1136,11 +1137,11 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { })), visibility: self.lazy(&ty::Visibility::Public), span: self.lazy(¯o_def.span), - attributes: self.encode_attributes(¯o_def.attrs), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + children: LazySeq::empty(), - stability: None, - deprecation: None, ty: None, inherent_impls: LazySeq::empty(), variances: LazySeq::empty(), diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index de8749c43d95c..89a40b0db9662 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -319,7 +319,7 @@ impl<'a> Classifier<'a> { token::Lifetime(..) => Class::Lifetime, token::Underscore | token::Eof | token::Interpolated(..) | - token::SubstNt(..) | token::Tilde | token::At => Class::None, + token::Tilde | token::At => Class::None, }; // Anything that didn't return above is the simple case where we the diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index ecab801d40853..d00e29d954fc6 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -99,7 +99,7 @@ impl Path { pub fn default_to_global(mut self) -> Path { if !self.is_global() && !::parse::token::Ident(self.segments[0].identifier).is_path_segment_keyword() { - self.segments.insert(0, PathSegment::crate_root()); + self.segments.insert(0, PathSegment::crate_root(self.span)); } self } @@ -133,10 +133,10 @@ impl PathSegment { pub fn from_ident(ident: Ident, span: Span) -> Self { PathSegment { identifier: ident, span: span, parameters: None } } - pub fn crate_root() -> Self { + pub fn crate_root(span: Span) -> Self { PathSegment { - identifier: keywords::CrateRoot.ident(), - span: DUMMY_SP, + identifier: Ident { ctxt: span.ctxt, ..keywords::CrateRoot.ident() }, + span: span, parameters: None, } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 8e63e219c42c1..f0fc849c0c596 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -1057,7 +1057,7 @@ impl MetaItem { { let (mut span, name) = match tokens.next() { Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name), - Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match **nt { + Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 { token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name), token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), _ => return None, @@ -1229,7 +1229,7 @@ impl LitKind { match token { Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)), Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)), - Token::Interpolated(ref nt) => match **nt { + Token::Interpolated(ref nt) => match nt.0 { token::NtExpr(ref v) => match v.node { ExprKind::Lit(ref lit) => Some(lit.node.clone()), _ => None, diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 8089fad5f36d8..4881170c1d13a 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -215,7 +215,7 @@ impl TTMacroExpander for F impl Folder for AvoidInterpolatedIdents { fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree { if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt { - if let token::NtIdent(ident) = **nt { + if let token::NtIdent(ident) = nt.0 { return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node)); } } @@ -578,7 +578,10 @@ impl SyntaxExtension { pub fn is_modern(&self) -> bool { match *self { - SyntaxExtension::DeclMacro(..) => true, + SyntaxExtension::DeclMacro(..) | + SyntaxExtension::ProcMacro(..) | + SyntaxExtension::AttrProcMacro(..) | + SyntaxExtension::ProcMacroDerive(..) => true, _ => false, } } @@ -903,17 +906,3 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt, } Some(es) } - -pub struct ChangeSpan { - pub span: Span -} - -impl Folder for ChangeSpan { - fn new_span(&mut self, _sp: Span) -> Span { - self.span - } - - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - fold::noop_fold_mac(mac, self) - } -} diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 412a34932087d..1eb749623d8ae 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -320,7 +320,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let last_identifier = idents.pop().unwrap(); let mut segments: Vec = Vec::new(); if global { - segments.push(ast::PathSegment::crate_root()); + segments.push(ast::PathSegment::crate_root(sp)); } segments.extend(idents.into_iter().map(|i| ast::PathSegment::from_ident(i, sp))); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index f8a26287bd47b..d2e51c9cb4868 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -16,20 +16,20 @@ use config::{is_test_or_bench, StripUnconfigured}; use errors::FatalError; use ext::base::*; use ext::derive::{add_derived_markers, collect_derives}; -use ext::hygiene::Mark; +use ext::hygiene::{Mark, SyntaxContext}; use ext::placeholders::{placeholder, PlaceholderExpander}; use feature_gate::{self, Features, is_builtin_attr}; use fold; use fold::*; -use parse::{filemap_to_stream, ParseSess, DirectoryOwnership, PResult, token}; +use parse::{DirectoryOwnership, PResult}; +use parse::token::{self, Token}; use parse::parser::Parser; -use print::pprust; use ptr::P; use std_inject; use symbol::Symbol; use symbol::keywords; use syntax_pos::{Span, DUMMY_SP}; -use tokenstream::TokenStream; +use tokenstream::{TokenStream, TokenTree}; use util::small_vector::SmallVector; use visit::Visitor; @@ -427,11 +427,13 @@ impl<'a, 'b> MacroExpander<'a, 'b> { kind.expect_from_annotatables(items) } SyntaxExtension::AttrProcMacro(ref mac) => { - let item_toks = stream_for_item(&item, self.cx.parse_sess); - - let span = Span { ctxt: self.cx.backtrace(), ..attr.span }; - let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks); - self.parse_expansion(tok_result, kind, &attr.path, span) + let item_tok = TokenTree::Token(DUMMY_SP, Token::interpolated(match item { + Annotatable::Item(item) => token::NtItem(item), + Annotatable::TraitItem(item) => token::NtTraitItem(item.unwrap()), + Annotatable::ImplItem(item) => token::NtImplItem(item.unwrap()), + })).into(); + let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_tok); + self.parse_expansion(tok_result, kind, &attr.path, attr.span) } SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => { self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path)); @@ -470,7 +472,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> { Ok(()) }; - let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark)); let opt_expanded = match *ext { SyntaxExtension::DeclMacro(ref expand, def_span) => { if let Err(msg) = validate_and_set_expn_info(def_span.map(|(_, s)| s), @@ -478,7 +479,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.span_err(path.span, &msg); return kind.dummy(span); } - kind.make_from(expand.expand(self.cx, span, marked_tts)) + kind.make_from(expand.expand(self.cx, span, mac.node.stream())) } NormalTT(ref expandfun, def_info, allow_internal_unstable) => { @@ -487,7 +488,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.span_err(path.span, &msg); return kind.dummy(span); } - kind.make_from(expandfun.expand(self.cx, span, marked_tts)) + kind.make_from(expandfun.expand(self.cx, span, mac.node.stream())) } IdentTT(ref expander, tt_span, allow_internal_unstable) => { @@ -506,7 +507,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } }); - let input: Vec<_> = marked_tts.into_trees().collect(); + let input: Vec<_> = mac.node.stream().into_trees().collect(); kind.make_from(expander.expand(self.cx, span, ident, input)) } @@ -541,21 +542,17 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }, }); - let tok_result = expandfun.expand(self.cx, span, marked_tts); + let tok_result = expandfun.expand(self.cx, span, mac.node.stream()); Some(self.parse_expansion(tok_result, kind, path, span)) } }; - let expanded = if let Some(expanded) = opt_expanded { - expanded - } else { + unwrap_or!(opt_expanded, { let msg = format!("non-{kind} macro in {kind} position: {name}", name = path.segments[0].identifier.name, kind = kind.name()); self.cx.span_err(path.span, &msg); - return kind.dummy(span); - }; - - expanded.fold_with(&mut Marker(mark)) + kind.dummy(span) + }) } /// Expand a derive invocation. Returns the result of expansion. @@ -621,8 +618,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } }; parser.ensure_complete_parse(path, kind.name(), span); - // FIXME better span info - expansion.fold_with(&mut ChangeSpan { span: span }) + expansion } } @@ -673,7 +669,9 @@ impl<'a> Parser<'a> { if self.token != token::Eof { let msg = format!("macro expansion ignores token `{}` and any following", self.this_token_to_string()); - let mut err = self.diagnostic().struct_span_err(self.span, &msg); + let mut def_site_span = self.span; + def_site_span.ctxt = SyntaxContext::empty(); // Avoid emitting backtrace info twice. + let mut err = self.diagnostic().struct_span_err(def_site_span, &msg); let msg = format!("caused by the macro expansion here; the usage \ of `{}!` is likely invalid in {} context", macro_path, kind_name); @@ -773,28 +771,6 @@ pub fn find_attr_invoc(attrs: &mut Vec) -> Option TokenStream { - let text = match *item { - Annotatable::Item(ref i) => pprust::item_to_string(i), - Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti), - Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii), - }; - string_to_stream(text, parse_sess) -} - -fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream { - let filename = String::from(""); - filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text)) -} - impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { fn fold_expr(&mut self, expr: P) -> P { let mut expr = self.cfg.configure_expr(expr).unwrap(); @@ -1070,7 +1046,7 @@ impl<'feat> ExpansionConfig<'feat> { } // A Marker adds the given mark to the syntax context. -struct Marker(Mark); +pub struct Marker(pub Mark); impl Folder for Marker { fn fold_ident(&mut self, mut ident: Ident) -> Ident { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index f8fac847a053e..9907dfe341e75 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -30,9 +30,9 @@ pub mod rt { use ast; use codemap::Spanned; use ext::base::ExtCtxt; - use parse::{self, token, classify}; + use parse::{self, classify}; + use parse::token::{self, Token}; use ptr::P; - use std::rc::Rc; use symbol::Symbol; use tokenstream::{self, TokenTree, TokenStream}; @@ -82,70 +82,70 @@ pub mod rt { impl ToTokens for ast::Path { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtPath(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::Ty { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtTy(P(self.clone())); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Block { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtBlock(P(self.clone())); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Generics { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtGenerics(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::WhereClause { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtWhereClause(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtItem(self.clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::ImplItem { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtImplItem(self.clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtImplItem((**self).clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::TraitItem { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtTraitItem(self.clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Stmt { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtStmt(self.clone()); - let mut tts = vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]; + let mut tts = vec![TokenTree::Token(self.span, Token::interpolated(nt))]; // Some statements require a trailing semicolon. if classify::stmt_ends_with_semi(&self.node) { @@ -159,35 +159,35 @@ pub mod rt { impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtExpr(self.clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtPat(self.clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Arm { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtArm(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::Arg { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtArg(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtBlock(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } @@ -215,7 +215,7 @@ pub mod rt { impl ToTokens for ast::MetaItem { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtMeta(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } @@ -364,7 +364,7 @@ pub mod rt { fn parse_tts(&self, s: String) -> Vec { let source_name = "".to_owned(); - parse::parse_stream_from_source_str(source_name, s, self.parse_sess()) + parse::parse_stream_from_source_str(source_name, s, self.parse_sess(), None) .into_trees().collect() } } @@ -700,7 +700,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { token::Underscore => "Underscore", token::Eof => "Eof", - token::Whitespace | token::SubstNt(_) | token::Comment | token::Shebang(_) => { + token::Whitespace | token::Comment | token::Shebang(_) => { panic!("unhandled token in quote!"); } }; diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 61d8fc2941afb..e877f1fedd409 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -158,15 +158,10 @@ pub type NamedParseResult = ParseResult>>; pub fn count_names(ms: &[TokenTree]) -> usize { ms.iter().fold(0, |count, elt| { count + match *elt { - TokenTree::Sequence(_, ref seq) => { - seq.num_captures - } - TokenTree::Delimited(_, ref delim) => { - count_names(&delim.tts) - } - TokenTree::MetaVarDecl(..) => { - 1 - } + TokenTree::Sequence(_, ref seq) => seq.num_captures, + TokenTree::Delimited(_, ref delim) => count_names(&delim.tts), + TokenTree::MetaVar(..) => 0, + TokenTree::MetaVarDecl(..) => 1, TokenTree::Token(..) => 0, } }) @@ -244,7 +239,7 @@ fn nameize>(sess: &ParseSess, ms: &[TokenTree], mut } } } - TokenTree::Token(..) => (), + TokenTree::MetaVar(..) | TokenTree::Token(..) => (), } Ok(()) @@ -409,12 +404,11 @@ fn inner_parse_loop(sess: &ParseSess, ei.idx = 0; cur_eis.push(ei); } - TokenTree::Token(_, ref t) => { - if token_name_eq(t, token) { - ei.idx += 1; - next_eis.push(ei); - } + TokenTree::Token(_, ref t) if token_name_eq(t, token) => { + ei.idx += 1; + next_eis.push(ei); } + TokenTree::Token(..) | TokenTree::MetaVar(..) => {} } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 9c728c9f2ebf0..b732f47ce6a93 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -120,7 +120,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, _ => cx.span_bug(sp, "malformed macro rhs"), }; // rhs has holes ( `$id` and `$(...)` that need filled) - let tts = transcribe(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs); + let tts = transcribe(cx, Some(named_matches), rhs); if cx.trace_macros() { trace_macros_note(cx, sp, format!("to `{}`", tts)); @@ -292,7 +292,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { use self::quoted::TokenTree; for tt in tts { match *tt { - TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => (), + TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (), TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) { return false; }, @@ -372,7 +372,7 @@ impl FirstSets { let mut first = TokenSet::empty(); for tt in tts.iter().rev() { match *tt { - TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => { + TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { first.replace_with(tt.clone()); } TokenTree::Delimited(span, ref delimited) => { @@ -432,7 +432,7 @@ impl FirstSets { for tt in tts.iter() { assert!(first.maybe_empty); match *tt { - TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => { + TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { first.add_one(tt.clone()); return first; } @@ -602,7 +602,7 @@ fn check_matcher_core(sess: &ParseSess, // First, update `last` so that it corresponds to the set // of NT tokens that might end the sequence `... token`. match *token { - TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => { + TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { let can_be_followed_by_any; if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, token) { let msg = format!("invalid fragment specifier `{}`", bad_frag); @@ -872,6 +872,7 @@ fn is_legal_fragment_specifier(sess: &ParseSess, fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { match *tt { quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok), + quoted::TokenTree::MetaVar(_, name) => format!("${}", name), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ in follow set checker"), diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index c094a23cefc4a..4e9e30857b1e8 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -78,9 +78,11 @@ pub enum KleeneOp { pub enum TokenTree { Token(Span, token::Token), Delimited(Span, Rc), - /// A kleene-style repetition sequence with a span + /// A kleene-style repetition sequence Sequence(Span, Rc), - /// Matches a nonterminal. This is only used in the left hand side of MBE macros. + /// E.g. `$var` + MetaVar(Span, ast::Ident), + /// E.g. `$var:expr`. This is only used in the left hand side of MBE macros. MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */), } @@ -130,6 +132,7 @@ impl TokenTree { pub fn span(&self) -> Span { match *self { TokenTree::Token(sp, _) | + TokenTree::MetaVar(sp, _) | TokenTree::MetaVarDecl(sp, _, _) | TokenTree::Delimited(sp, _) | TokenTree::Sequence(sp, _) => sp, @@ -144,7 +147,7 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars while let Some(tree) = trees.next() { let tree = parse_tree(tree, &mut trees, expect_matchers, sess); match tree { - TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => { + TokenTree::MetaVar(start_sp, ident) if expect_matchers => { let span = match trees.next() { Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { @@ -199,13 +202,13 @@ fn parse_tree(tree: tokenstream::TokenTree, let ident = ast::Ident { name: keywords::DollarCrate.name(), ..ident }; TokenTree::Token(span, token::Ident(ident)) } else { - TokenTree::Token(span, token::SubstNt(ident)) + TokenTree::MetaVar(span, ident) } } Some(tokenstream::TokenTree::Token(span, tok)) => { let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok)); sess.span_diagnostic.span_err(span, &msg); - TokenTree::Token(span, token::SubstNt(keywords::Invalid.ident())) + TokenTree::MetaVar(span, keywords::Invalid.ident()) } None => TokenTree::Token(span, token::Dollar), }, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 78e755e73fa30..fe3dd83f9d5c0 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -9,10 +9,12 @@ // except according to those terms. use ast::Ident; -use errors::Handler; +use ext::base::ExtCtxt; +use ext::expand::Marker; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::quoted; -use parse::token::{self, SubstNt, Token, NtTT}; +use fold::noop_fold_tt; +use parse::token::{self, Token, NtTT}; use syntax_pos::{Span, DUMMY_SP}; use tokenstream::{TokenStream, TokenTree, Delimited}; use util::small_vector::SmallVector; @@ -61,9 +63,9 @@ impl Iterator for Frame { } /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TokenTree::{Sequence, Match}`s, or `SubstNt`s, `interp` can +/// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can /// (and should) be None. -pub fn transcribe(sp_diag: &Handler, +pub fn transcribe(cx: &ExtCtxt, interp: Option>>, src: Vec) -> TokenStream { @@ -120,22 +122,20 @@ pub fn transcribe(sp_diag: &Handler, &interpolations, &repeats) { LockstepIterSize::Unconstrained => { - panic!(sp_diag.span_fatal( - sp, /* blame macro writer */ + cx.span_fatal(sp, /* blame macro writer */ "attempted to repeat an expression \ containing no syntax \ - variables matched as repeating at this depth")); + variables matched as repeating at this depth"); } LockstepIterSize::Contradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - panic!(sp_diag.span_fatal(sp, &msg[..])); + cx.span_fatal(sp, &msg[..]); } LockstepIterSize::Constraint(len, _) => { if len == 0 { if seq.op == quoted::KleeneOp::OneOrMore { // FIXME #2887 blame invoker - panic!(sp_diag.span_fatal(sp, - "this must repeat at least once")); + cx.span_fatal(sp, "this must repeat at least once"); } } else { repeats.push((0, len)); @@ -149,29 +149,37 @@ pub fn transcribe(sp_diag: &Handler, } } // FIXME #2887: think about span stuff here - quoted::TokenTree::Token(sp, SubstNt(ident)) => { - match lookup_cur_matched(ident, &interpolations, &repeats) { - None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()), - Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched { - match **nt { - NtTT(ref tt) => result.push(tt.clone().into()), - _ => { - let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); - result.push(token.into()); - } + quoted::TokenTree::MetaVar(mut sp, ident) => { + if let Some(cur_matched) = lookup_cur_matched(ident, &interpolations, &repeats) { + if let MatchedNonterminal(ref nt) = *cur_matched { + if let NtTT(ref tt) = **nt { + result.push(tt.clone().into()); + } else { + sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark); + let token = TokenTree::Token(sp, Token::interpolated((**nt).clone())); + result.push(token.into()); } } else { - panic!(sp_diag.span_fatal( - sp, /* blame the macro writer */ - &format!("variable '{}' is still repeating at this depth", ident))); + cx.span_fatal(sp, /* blame the macro writer */ + &format!("variable '{}' is still repeating at this depth", ident)); } + } else { + let ident = + Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident }; + sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark); + result.push(TokenTree::Token(sp, token::Dollar).into()); + result.push(TokenTree::Token(sp, token::Ident(ident)).into()); } } - quoted::TokenTree::Delimited(span, delimited) => { + quoted::TokenTree::Delimited(mut span, delimited) => { + span.ctxt = span.ctxt.apply_mark(cx.current_expansion.mark); stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span }); result_stack.push(mem::replace(&mut result, Vec::new())); } - quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok).into()), + quoted::TokenTree::Token(sp, tok) => { + let mut marker = Marker(cx.current_expansion.mark); + result.push(noop_fold_tt(TokenTree::Token(sp, tok), &mut marker).into()) + } quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"), } } @@ -240,7 +248,7 @@ fn lockstep_iter_size(tree: "ed::TokenTree, size + lockstep_iter_size(tt, interpolations, repeats) }) }, - TokenTree::Token(_, SubstNt(name)) | TokenTree::MetaVarDecl(_, name, _) => + TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => match lookup_cur_matched(name, interpolations, repeats) { Some(matched) => match *matched { MatchedNonterminal(_) => LockstepIterSize::Unconstrained, diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index df8ee189d21b3..4b0ec1a20e331 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -38,12 +38,19 @@ use symbol::Symbol; use std::ascii::AsciiExt; use std::env; -macro_rules! setter { +macro_rules! set { + (proc_macro) => {{ + fn f(features: &mut Features, span: Span) { + features.declared_lib_features.push((Symbol::intern("proc_macro"), span)); + features.proc_macro = true; + } + f as fn(&mut Features, Span) + }}; ($field: ident) => {{ - fn f(features: &mut Features) -> &mut bool { - &mut features.$field + fn f(features: &mut Features, _: Span) { + features.$field = true; } - f as fn(&mut Features) -> &mut bool + f as fn(&mut Features, Span) }} } @@ -51,10 +58,9 @@ macro_rules! declare_features { ($((active, $feature: ident, $ver: expr, $issue: expr),)+) => { /// Represents active features that are currently being implemented or /// currently being considered for addition/removal. - const ACTIVE_FEATURES: &'static [(&'static str, &'static str, - Option, fn(&mut Features) -> &mut bool)] = &[ - $((stringify!($feature), $ver, $issue, setter!($feature))),+ - ]; + const ACTIVE_FEATURES: + &'static [(&'static str, &'static str, Option, fn(&mut Features, Span))] = + &[$((stringify!($feature), $ver, $issue, set!($feature))),+]; /// A set of features to be used by later passes. pub struct Features { @@ -1478,9 +1484,9 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> F continue }; - if let Some(&(_, _, _, setter)) = ACTIVE_FEATURES.iter() + if let Some(&(_, _, _, set)) = ACTIVE_FEATURES.iter() .find(|& &(n, _, _, _)| name == n) { - *(setter(&mut features)) = true; + set(&mut features, mi.span); feature_checker.collect(&features, mi.span); } else if let Some(&(_, _, _)) = REMOVED_FEATURES.iter() @@ -1514,7 +1520,7 @@ struct MutexFeatureChecker { impl MutexFeatureChecker { // If this method turns out to be a hotspot due to branching, - // the branching can be eliminated by modifying `setter!()` to set these spans + // the branching can be eliminated by modifying `set!()` to set these spans // only for the features that need to be checked for mutual exclusion. fn collect(&mut self, features: &Features, span: Span) { if features.proc_macro { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 4c6cf49a8db43..1fc670ec9f7fb 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -22,7 +22,7 @@ use ast::*; use ast; use syntax_pos::Span; use codemap::{Spanned, respan}; -use parse::token; +use parse::token::{self, Token}; use ptr::P; use symbol::keywords; use tokenstream::*; @@ -573,7 +573,7 @@ pub fn noop_fold_tt(tt: TokenTree, fld: &mut T) -> TokenTree { } pub fn noop_fold_tts(tts: TokenStream, fld: &mut T) -> TokenStream { - tts.trees().map(|tt| fld.fold_tt(tt)).collect() + tts.map(|tt| fld.fold_tt(tt)) } // apply ident folder if it's an ident, apply other folds to interpolated nodes @@ -586,9 +586,8 @@ pub fn noop_fold_token(t: token::Token, fld: &mut T) -> token::Token Ok(nt) => nt, Err(nt) => (*nt).clone(), }; - token::Interpolated(Rc::new(fld.fold_interpolated(nt))) + Token::interpolated(fld.fold_interpolated(nt.0)) } - token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)), _ => t } } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 082930777e598..c99a09ab24e6b 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -151,7 +151,7 @@ impl<'a> Parser<'a> { pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { let meta = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { Nonterminal::NtMeta(ref meta) => Some(meta.clone()), _ => None, }, @@ -223,7 +223,7 @@ impl<'a> Parser<'a> { /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { let nt_meta = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtMeta(ref e) => Some(e.clone()), _ => None, }, diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a35b278a4b064..09cdf26bf1fff 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -66,14 +66,15 @@ pub struct StringReader<'a> { token: token::Token, span: Span, open_braces: Vec<(token::DelimToken, Span)>, -} - -fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span { lo: lo, hi: hi, ctxt: NO_EXPANSION } + pub override_span: Option, } impl<'a> StringReader<'a> { - fn next_token(&mut self) -> TokenAndSpan { + fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { + unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION}) + } + + fn next_token(&mut self) -> TokenAndSpan where Self: Sized { let res = self.try_next_token(); self.unwrap_or_abort(res) } @@ -175,6 +176,7 @@ impl<'a> StringReader<'a> { token: token::Eof, span: syntax_pos::DUMMY_SP, open_braces: Vec::new(), + override_span: None, } } @@ -229,12 +231,12 @@ impl<'a> StringReader<'a> { /// Report a fatal error spanning [`from_pos`, `to_pos`). fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError { - self.fatal_span(mk_sp(from_pos, to_pos), m) + self.fatal_span(self.mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`). fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { - self.err_span(mk_sp(from_pos, to_pos), m) + self.err_span(self.mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -258,7 +260,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -282,7 +284,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_err(self.mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -306,11 +308,11 @@ impl<'a> StringReader<'a> { None => { if self.is_eof() { self.peek_tok = token::Eof; - self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos); + self.peek_span = self.mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { let start_bytepos = self.pos; self.peek_tok = self.next_token_inner()?; - self.peek_span = mk_sp(start_bytepos, self.pos); + self.peek_span = self.mk_sp(start_bytepos, self.pos); }; } } @@ -481,7 +483,7 @@ impl<'a> StringReader<'a> { self.with_str_from(start, |string| { if string == "_" { self.sess.span_diagnostic - .struct_span_warn(mk_sp(start, self.pos), + .struct_span_warn(self.mk_sp(start, self.pos), "underscore literal suffix is not allowed") .warn("this was previously accepted by the compiler but is \ being phased out; it will become a hard error in \ @@ -502,7 +504,7 @@ impl<'a> StringReader<'a> { if let Some(c) = self.ch { if c.is_whitespace() { let msg = "called consume_any_line_comment, but there was whitespace"; - self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg); + self.sess.span_diagnostic.span_err(self.mk_sp(self.pos, self.pos), msg); } } @@ -545,13 +547,13 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) }) } else { Some(TokenAndSpan { tok: token::Comment, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) } } @@ -584,7 +586,7 @@ impl<'a> StringReader<'a> { } return Some(TokenAndSpan { tok: token::Shebang(self.name_from(start)), - sp: mk_sp(start, self.pos), + sp: self.mk_sp(start, self.pos), }); } } @@ -612,7 +614,7 @@ impl<'a> StringReader<'a> { } let c = Some(TokenAndSpan { tok: token::Whitespace, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }); debug!("scanning whitespace: {:?}", c); c @@ -674,7 +676,7 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) }) } @@ -869,7 +871,7 @@ impl<'a> StringReader<'a> { let valid = if self.ch_is('{') { self.scan_unicode_escape(delim) && !ascii_only } else { - let span = mk_sp(start, self.pos); + let span = self.mk_sp(start, self.pos); self.sess.span_diagnostic .struct_span_err(span, "incorrect unicode escape sequence") .span_help(span, @@ -907,13 +909,13 @@ impl<'a> StringReader<'a> { }, c); if e == '\r' { - err.span_help(mk_sp(escaped_pos, pos), + err.span_help(self.mk_sp(escaped_pos, pos), "this is an isolated carriage return; consider \ checking your editor and version control \ settings"); } if (e == '{' || e == '}') && !ascii_only { - err.span_help(mk_sp(escaped_pos, pos), + err.span_help(self.mk_sp(escaped_pos, pos), "if used in a formatting string, curly braces \ are escaped with `{{` and `}}`"); } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 554a1fcfc71a6..63a396c14db85 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -19,7 +19,9 @@ impl<'a> StringReader<'a> { pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { let mut tts = Vec::new(); while self.token != token::Eof { - tts.push(self.parse_token_tree()?.into()); + let tree = self.parse_token_tree()?; + let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token); + tts.push(if is_joint { tree.joint() } else { tree.into() }); } Ok(TokenStream::concat(tts)) } @@ -31,13 +33,15 @@ impl<'a> StringReader<'a> { if let token::CloseDelim(..) = self.token { return TokenStream::concat(tts); } - match self.parse_token_tree() { - Ok(tt) => tts.push(tt.into()), + let tree = match self.parse_token_tree() { + Ok(tree) => tree, Err(mut e) => { e.emit(); return TokenStream::concat(tts); } - } + }; + let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token); + tts.push(if is_joint { tree.joint() } else { tree.into() }); } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 3a68a6ba7646c..bd9a621c00c00 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -141,9 +141,10 @@ pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess new_parser_from_source_str(sess, name, source).parse_stmt() } -pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess) - -> TokenStream { - filemap_to_stream(sess, sess.codemap().new_filemap(name, source)) +pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess, + override_span: Option) + -> TokenStream { + filemap_to_stream(sess, sess.codemap().new_filemap(name, source), override_span) } // Create a new parser from a source string @@ -177,7 +178,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, /// Given a filemap and config, return a parser pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc, ) -> Parser { let end_pos = filemap.end_pos; - let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap)); + let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None)); if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION }; @@ -212,8 +213,10 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a filemap, produce a sequence of token-trees -pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc) -> TokenStream { +pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc, override_span: Option) + -> TokenStream { let mut srdr = lexer::StringReader::new(sess, filemap); + srdr.override_span = override_span; srdr.real_token(); panictry!(srdr.parse_all_token_trees()) } @@ -684,7 +687,7 @@ mod tests { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Path(None, ast::Path { span: sp(0, 6), - segments: vec![ast::PathSegment::crate_root(), + segments: vec![ast::PathSegment::crate_root(sp(0, 2)), str2seg("a", 2, 3), str2seg("b", 5, 6)] }), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 64506c4af4691..c248e20b608fc 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -107,7 +107,7 @@ pub enum BlockMode { macro_rules! maybe_whole_expr { ($p:expr) => { if let token::Interpolated(nt) = $p.token.clone() { - match *nt { + match nt.0 { token::NtExpr(ref e) => { $p.bump(); return Ok((*e).clone()); @@ -134,7 +134,7 @@ macro_rules! maybe_whole_expr { macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { if let token::Interpolated(nt) = $p.token.clone() { - if let token::$constructor($x) = (*nt).clone() { + if let token::$constructor($x) = nt.0.clone() { $p.bump(); return Ok($e); } @@ -1602,7 +1602,7 @@ impl<'a> Parser<'a> { /// Matches token_lit = LIT_INTEGER | ... pub fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { let out = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtExpr(ref v) => match v.node { ExprKind::Lit(ref lit) => { lit.node.clone() } _ => { return self.unexpected_last(&self.token); } @@ -1761,7 +1761,7 @@ impl<'a> Parser<'a> { }; if is_global { - segments.insert(0, PathSegment::crate_root()); + segments.insert(0, PathSegment::crate_root(lo)); } // Assemble the result. @@ -1775,7 +1775,7 @@ impl<'a> Parser<'a> { /// This is used when parsing derive macro paths in `#[derive]` attributes. pub fn parse_path_allowing_meta(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { let meta_ident = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtMeta(ref meta) => match meta.node { ast::MetaItemKind::Word => Some(ast::Ident::with_empty_ctxt(meta.name)), _ => None, @@ -2610,13 +2610,16 @@ impl<'a> Parser<'a> { pub fn process_potential_macro_variable(&mut self) { let ident = match self.token { - token::SubstNt(name) => { + token::Dollar if self.span.ctxt != syntax_pos::hygiene::SyntaxContext::empty() && + self.look_ahead(1, |t| t.is_ident()) => { + self.bump(); + let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() }; self.fatal(&format!("unknown macro variable `{}`", name)).emit(); return } token::Interpolated(ref nt) => { self.meta_var_span = Some(self.span); - match **nt { + match nt.0 { token::NtIdent(ident) => ident, _ => return, } @@ -6168,7 +6171,7 @@ impl<'a> Parser<'a> { // `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`. self.eat(&token::ModSep); let prefix = ast::Path { - segments: vec![PathSegment::crate_root()], + segments: vec![PathSegment::crate_root(lo)], span: lo.to(self.span), }; let view_path_kind = if self.eat(&token::BinOp(token::Star)) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 75969cf2eb85b..834ac38af9870 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -16,10 +16,12 @@ pub use self::Token::*; use ast::{self}; use ptr::P; +use serialize::{Decodable, Decoder, Encodable, Encoder}; use symbol::keywords; -use tokenstream::TokenTree; +use tokenstream::{TokenStream, TokenTree}; -use std::fmt; +use std::cell::Cell; +use std::{cmp, fmt}; use std::rc::Rc; #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] @@ -167,14 +169,12 @@ pub enum Token { Underscore, Lifetime(ast::Ident), - /* For interpolation */ - Interpolated(Rc), + // The `LazyTokenStream` is a pure function of the `Nonterminal`, + // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc. + Interpolated(Rc<(Nonterminal, LazyTokenStream)>), // Can be expanded into several tokens. /// Doc comment DocComment(ast::Name), - // In right-hand-sides of MBE macros: - /// A syntactic variable that will be filled in by macro expansion. - SubstNt(ast::Ident), // Junk. These carry no data because we don't really care about the data // they *would* carry, and don't really want to allocate a new ident for @@ -190,6 +190,10 @@ pub enum Token { } impl Token { + pub fn interpolated(nt: Nonterminal) -> Token { + Token::Interpolated(Rc::new((nt, LazyTokenStream::new()))) + } + /// Returns `true` if the token starts with '>'. pub fn is_like_gt(&self) -> bool { match *self { @@ -214,7 +218,7 @@ impl Token { Lt | BinOp(Shl) | // associated path ModSep | // global path Pound => true, // expression attributes - Interpolated(ref nt) => match **nt { + Interpolated(ref nt) => match nt.0 { NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true, _ => false, }, @@ -237,7 +241,7 @@ impl Token { Lifetime(..) | // lifetime bound in trait object Lt | BinOp(Shl) | // associated path ModSep => true, // global path - Interpolated(ref nt) => match **nt { + Interpolated(ref nt) => match nt.0 { NtIdent(..) | NtTy(..) | NtPath(..) => true, _ => false, }, @@ -256,7 +260,7 @@ impl Token { pub fn ident(&self) -> Option { match *self { Ident(ident) => Some(ident), - Interpolated(ref nt) => match **nt { + Interpolated(ref nt) => match nt.0 { NtIdent(ident) => Some(ident.node), _ => None, }, @@ -288,7 +292,7 @@ impl Token { /// Returns `true` if the token is an interpolated path. pub fn is_path(&self) -> bool { if let Interpolated(ref nt) = *self { - if let NtPath(..) = **nt { + if let NtPath(..) = nt.0 { return true; } } @@ -358,6 +362,60 @@ impl Token { } } + pub fn glue(self, joint: Token) -> Option { + Some(match self { + Eq => match joint { + Eq => EqEq, + Gt => FatArrow, + _ => return None, + }, + Lt => match joint { + Eq => Le, + Lt => BinOp(Shl), + Le => BinOpEq(Shl), + BinOp(Minus) => LArrow, + _ => return None, + }, + Gt => match joint { + Eq => Ge, + Gt => BinOp(Shr), + Ge => BinOpEq(Shr), + _ => return None, + }, + Not => match joint { + Eq => Ne, + _ => return None, + }, + BinOp(op) => match joint { + Eq => BinOpEq(op), + BinOp(And) if op == And => AndAnd, + BinOp(Or) if op == Or => OrOr, + Gt if op == Minus => RArrow, + _ => return None, + }, + Dot => match joint { + Dot => DotDot, + DotDot => DotDotDot, + _ => return None, + }, + DotDot => match joint { + Dot => DotDotDot, + _ => return None, + }, + Colon => match joint { + Colon => ModSep, + _ => return None, + }, + + Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | Comma | + Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | + OpenDelim(..) | CloseDelim(..) | Underscore => return None, + + Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) | + Whitespace | Comment | Shebang(..) | Eof => return None, + }) + } + /// Returns `true` if the token is either a special identifier or a keyword. pub fn is_reserved_ident(&self) -> bool { self.is_special_ident() || self.is_used_keyword() || self.is_unused_keyword() @@ -411,3 +469,66 @@ impl fmt::Debug for Nonterminal { } } } + +pub fn is_op(tok: &Token) -> bool { + match *tok { + OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | + Ident(..) | Underscore | Lifetime(..) | Interpolated(..) | + Whitespace | Comment | Shebang(..) | Eof => false, + _ => true, + } +} + +pub struct LazyTokenStream(Cell>); + +impl Clone for LazyTokenStream { + fn clone(&self) -> Self { + let opt_stream = self.0.take(); + self.0.set(opt_stream.clone()); + LazyTokenStream(Cell::new(opt_stream)) + } +} + +impl cmp::Eq for LazyTokenStream {} +impl PartialEq for LazyTokenStream { + fn eq(&self, _other: &LazyTokenStream) -> bool { + true + } +} + +impl fmt::Debug for LazyTokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self.clone().0.into_inner(), f) + } +} + +impl LazyTokenStream { + pub fn new() -> Self { + LazyTokenStream(Cell::new(None)) + } + + pub fn force TokenStream>(&self, f: F) -> TokenStream { + let mut opt_stream = self.0.take(); + if opt_stream.is_none() { + opt_stream = Some(f()); + } + self.0.set(opt_stream.clone()); + opt_stream.clone().unwrap() + } +} + +impl Encodable for LazyTokenStream { + fn encode(&self, _: &mut S) -> Result<(), S::Error> { + Ok(()) + } +} + +impl Decodable for LazyTokenStream { + fn decode(_: &mut D) -> Result { + Ok(LazyTokenStream::new()) + } +} + +impl ::std::hash::Hash for LazyTokenStream { + fn hash(&self, _hasher: &mut H) {} +} diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 6c00e0b9efd7e..d449e412d6cc3 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -270,13 +270,12 @@ pub fn token_to_string(tok: &Token) -> String { /* Other */ token::DocComment(s) => s.to_string(), - token::SubstNt(s) => format!("${}", s), token::Eof => "".to_string(), token::Whitespace => " ".to_string(), token::Comment => "/* */".to_string(), token::Shebang(s) => format!("/* shebang: {}*/", s), - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtExpr(ref e) => expr_to_string(e), token::NtMeta(ref e) => meta_item_to_string(e), token::NtTy(ref e) => ty_to_string(e), diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 963482fc223f1..8eee25405df6b 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -138,6 +138,10 @@ impl TokenTree { _ => false, } } + + pub fn joint(self) -> TokenStream { + TokenStream { kind: TokenStreamKind::JointTree(self) } + } } /// # Token Streams @@ -155,6 +159,7 @@ pub struct TokenStream { enum TokenStreamKind { Empty, Tree(TokenTree), + JointTree(TokenTree), Stream(RcSlice), } @@ -199,7 +204,7 @@ impl TokenStream { pub fn concat(mut streams: Vec) -> TokenStream { match streams.len() { 0 => TokenStream::empty(), - 1 => TokenStream::from(streams.pop().unwrap()), + 1 => streams.pop().unwrap(), _ => TokenStream::concat_rc_slice(RcSlice::new(streams)), } } @@ -225,6 +230,105 @@ impl TokenStream { } true } + + /// Precondition: `self` consists of a single token tree. + /// Returns true if the token tree is a joint operation w.r.t. `proc_macro::TokenNode`. + pub fn as_tree(self) -> (TokenTree, bool /* joint? */) { + match self.kind { + TokenStreamKind::Tree(tree) => (tree, false), + TokenStreamKind::JointTree(tree) => (tree, true), + _ => unreachable!(), + } + } + + pub fn map TokenTree>(self, mut f: F) -> TokenStream { + let mut trees = self.into_trees(); + let mut result = Vec::new(); + while let Some(stream) = trees.next_as_stream() { + result.push(match stream.kind { + TokenStreamKind::Tree(tree) => f(tree).into(), + TokenStreamKind::JointTree(tree) => f(tree).joint(), + _ => unreachable!() + }); + } + TokenStream::concat(result) + } + + fn first_tree(&self) -> Option { + match self.kind { + TokenStreamKind::Empty => None, + TokenStreamKind::Tree(ref tree) | + TokenStreamKind::JointTree(ref tree) => Some(tree.clone()), + TokenStreamKind::Stream(ref stream) => stream.first().unwrap().first_tree(), + } + } + + fn last_tree_if_joint(&self) -> Option { + match self.kind { + TokenStreamKind::Empty | TokenStreamKind::Tree(..) => None, + TokenStreamKind::JointTree(ref tree) => Some(tree.clone()), + TokenStreamKind::Stream(ref stream) => stream.last().unwrap().last_tree_if_joint(), + } + } +} + +pub struct TokenStreamBuilder(Vec); + +impl TokenStreamBuilder { + pub fn new() -> TokenStreamBuilder { + TokenStreamBuilder(Vec::new()) + } + + pub fn push>(&mut self, stream: T) { + let stream = stream.into(); + let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint); + if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint { + if let Some(TokenTree::Token(span, tok)) = stream.first_tree() { + if let Some(glued_tok) = last_tok.glue(tok) { + let last_stream = self.0.pop().unwrap(); + self.push_all_but_last_tree(&last_stream); + let glued_span = last_span.to(span); + self.0.push(TokenTree::Token(glued_span, glued_tok).into()); + self.push_all_but_first_tree(&stream); + return + } + } + } + self.0.push(stream); + } + + pub fn add>(mut self, stream: T) -> Self { + self.push(stream); + self + } + + pub fn build(self) -> TokenStream { + TokenStream::concat(self.0) + } + + fn push_all_but_last_tree(&mut self, stream: &TokenStream) { + if let TokenStreamKind::Stream(ref streams) = stream.kind { + let len = streams.len(); + match len { + 1 => {} + 2 => self.0.push(streams[0].clone().into()), + _ => self.0.push(TokenStream::concat_rc_slice(streams.sub_slice(0 .. len - 1))), + } + self.push_all_but_last_tree(&streams[len - 1]) + } + } + + fn push_all_but_first_tree(&mut self, stream: &TokenStream) { + if let TokenStreamKind::Stream(ref streams) = stream.kind { + let len = streams.len(); + match len { + 1 => {} + 2 => self.0.push(streams[1].clone().into()), + _ => self.0.push(TokenStream::concat_rc_slice(streams.sub_slice(1 .. len))), + } + self.push_all_but_first_tree(&streams[0]) + } + } } #[derive(Clone)] @@ -234,6 +338,7 @@ pub struct Cursor(CursorKind); enum CursorKind { Empty, Tree(TokenTree, bool /* consumed? */), + JointTree(TokenTree, bool /* consumed? */), Stream(StreamCursor), } @@ -244,42 +349,45 @@ struct StreamCursor { stack: Vec<(RcSlice, usize)>, } -impl Iterator for Cursor { - type Item = TokenTree; - - fn next(&mut self) -> Option { - let cursor = match self.0 { - CursorKind::Stream(ref mut cursor) => cursor, - CursorKind::Tree(ref tree, ref mut consumed @ false) => { - *consumed = true; - return Some(tree.clone()); - } - _ => return None, - }; +impl StreamCursor { + fn new(stream: RcSlice) -> Self { + StreamCursor { stream: stream, index: 0, stack: Vec::new() } + } + fn next_as_stream(&mut self) -> Option { loop { - if cursor.index < cursor.stream.len() { - match cursor.stream[cursor.index].kind.clone() { - TokenStreamKind::Tree(tree) => { - cursor.index += 1; - return Some(tree); - } - TokenStreamKind::Stream(stream) => { - cursor.stack.push((mem::replace(&mut cursor.stream, stream), - mem::replace(&mut cursor.index, 0) + 1)); - } - TokenStreamKind::Empty => { - cursor.index += 1; - } + if self.index < self.stream.len() { + self.index += 1; + let next = self.stream[self.index - 1].clone(); + match next.kind { + TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => return Some(next), + TokenStreamKind::Stream(stream) => self.insert(stream), + TokenStreamKind::Empty => {} } - } else if let Some((stream, index)) = cursor.stack.pop() { - cursor.stream = stream; - cursor.index = index; + } else if let Some((stream, index)) = self.stack.pop() { + self.stream = stream; + self.index = index; } else { return None; } } } + + fn insert(&mut self, stream: RcSlice) { + self.stack.push((mem::replace(&mut self.stream, stream), + mem::replace(&mut self.index, 0))); + } +} + +impl Iterator for Cursor { + type Item = TokenTree; + + fn next(&mut self) -> Option { + self.next_as_stream().map(|stream| match stream.kind { + TokenStreamKind::Tree(tree) | TokenStreamKind::JointTree(tree) => tree, + _ => unreachable!() + }) + } } impl Cursor { @@ -287,18 +395,49 @@ impl Cursor { Cursor(match stream.kind { TokenStreamKind::Empty => CursorKind::Empty, TokenStreamKind::Tree(tree) => CursorKind::Tree(tree, false), - TokenStreamKind::Stream(stream) => { - CursorKind::Stream(StreamCursor { stream: stream, index: 0, stack: Vec::new() }) - } + TokenStreamKind::JointTree(tree) => CursorKind::JointTree(tree, false), + TokenStreamKind::Stream(stream) => CursorKind::Stream(StreamCursor::new(stream)), }) } - pub fn original_stream(self) -> TokenStream { + pub fn next_as_stream(&mut self) -> Option { + let (stream, consumed) = match self.0 { + CursorKind::Tree(ref tree, ref mut consumed @ false) => + (tree.clone().into(), consumed), + CursorKind::JointTree(ref tree, ref mut consumed @ false) => + (tree.clone().joint(), consumed), + CursorKind::Stream(ref mut cursor) => return cursor.next_as_stream(), + _ => return None, + }; + + *consumed = true; + Some(stream) + } + + pub fn insert(&mut self, stream: TokenStream) { + match self.0 { + _ if stream.is_empty() => return, + CursorKind::Empty => *self = stream.trees(), + CursorKind::Tree(_, consumed) | CursorKind::JointTree(_, consumed) => { + *self = TokenStream::concat(vec![self.original_stream(), stream]).trees(); + if consumed { + self.next(); + } + } + CursorKind::Stream(ref mut cursor) => { + cursor.insert(ThinTokenStream::from(stream).0.unwrap()); + } + } + } + + pub fn original_stream(&self) -> TokenStream { match self.0 { CursorKind::Empty => TokenStream::empty(), - CursorKind::Tree(tree, _) => tree.into(), - CursorKind::Stream(cursor) => TokenStream::concat_rc_slice({ - cursor.stack.get(0).cloned().map(|(stream, _)| stream).unwrap_or(cursor.stream) + CursorKind::Tree(ref tree, _) => tree.clone().into(), + CursorKind::JointTree(ref tree, _) => tree.clone().joint(), + CursorKind::Stream(ref cursor) => TokenStream::concat_rc_slice({ + cursor.stack.get(0).cloned().map(|(stream, _)| stream) + .unwrap_or(cursor.stream.clone()) }), } } @@ -307,8 +446,9 @@ impl Cursor { fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result { for stream in streams { n = match stream.kind { - TokenStreamKind::Tree(ref tree) if n == 0 => return Ok(tree.clone()), - TokenStreamKind::Tree(..) => n - 1, + TokenStreamKind::Tree(ref tree) | TokenStreamKind::JointTree(ref tree) + if n == 0 => return Ok(tree.clone()), + TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => n - 1, TokenStreamKind::Stream(ref stream) => match look_ahead(stream, n) { Ok(tree) => return Ok(tree), Err(n) => n, @@ -316,13 +456,15 @@ impl Cursor { _ => n, }; } - Err(n) } match self.0 { - CursorKind::Empty | CursorKind::Tree(_, true) => Err(n), - CursorKind::Tree(ref tree, false) => look_ahead(&[tree.clone().into()], n), + CursorKind::Empty | + CursorKind::Tree(_, true) | + CursorKind::JointTree(_, true) => Err(n), + CursorKind::Tree(ref tree, false) | + CursorKind::JointTree(ref tree, false) => look_ahead(&[tree.clone().into()], n), CursorKind::Stream(ref cursor) => { look_ahead(&cursor.stream[cursor.index ..], n).or_else(|mut n| { for &(ref stream, index) in cursor.stack.iter().rev() { @@ -350,6 +492,7 @@ impl From for ThinTokenStream { ThinTokenStream(match stream.kind { TokenStreamKind::Empty => None, TokenStreamKind::Tree(tree) => Some(RcSlice::new(vec![tree.into()])), + TokenStreamKind::JointTree(tree) => Some(RcSlice::new(vec![tree.joint()])), TokenStreamKind::Stream(stream) => Some(stream), }) } diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 2727ab79ebf76..d993ba14a4ab5 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -20,7 +20,7 @@ use std::iter::Peekable; /// Map a string to tts, using a made-up filename: pub fn string_to_stream(source_str: String) -> TokenStream { let ps = ParseSess::new(FilePathMapping::empty()); - filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), source_str)) + filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), source_str), None) } /// Map string to parser (via tts) diff --git a/src/libsyntax/util/rc_slice.rs b/src/libsyntax/util/rc_slice.rs index 2d9fd7aa87553..d6939d71129e4 100644 --- a/src/libsyntax/util/rc_slice.rs +++ b/src/libsyntax/util/rc_slice.rs @@ -9,7 +9,7 @@ // except according to those terms. use std::fmt; -use std::ops::Deref; +use std::ops::{Deref, Range}; use std::rc::Rc; use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, @@ -30,6 +30,14 @@ impl RcSlice { data: Rc::new(vec.into_boxed_slice()), } } + + pub fn sub_slice(&self, range: Range) -> Self { + RcSlice { + data: self.data.clone(), + offset: self.offset + range.start as u32, + len: (range.end - range.start) as u32, + } + } } impl Deref for RcSlice { diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index dc4b8eb24cd0a..6f4c112acb6c6 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -15,6 +15,8 @@ use syntax::feature_gate; use syntax::parse::token; use syntax::ptr::P; use syntax_pos::Span; +use syntax_pos::symbol::Symbol; +use syntax_pos::hygiene::SyntaxContext; use syntax::tokenstream::TokenTree; pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, @@ -50,7 +52,10 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, } } } - let res = ast::Ident::from_str(&res_str); + let res = ast::Ident { + name: Symbol::intern(&res_str), + ctxt: SyntaxContext::empty().apply_mark(cx.current_expansion.mark), + }; struct Result { ident: ast::Ident, diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index b01ef65e5fe5e..fa5537b5d8fe3 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -16,7 +16,6 @@ use syntax::ast::{self, ItemKind, Attribute, Mac}; use syntax::attr::{mark_used, mark_known}; use syntax::codemap::Span; use syntax::ext::base::*; -use syntax::fold::Folder; use syntax::visit::Visitor; struct MarkAttrs<'a>(&'a [ast::Name]); @@ -75,7 +74,7 @@ impl MultiItemModifier for ProcMacroDerive { MarkAttrs(&self.attrs).visit_item(&item); let input = __internal::new_token_stream(ecx.resolver.eliminate_crate_var(item.clone())); - let res = __internal::set_parse_sess(&ecx.parse_sess, || { + let res = __internal::set_sess(ecx, || { let inner = self.inner; panic::catch_unwind(panic::AssertUnwindSafe(|| inner(input))) }); @@ -97,9 +96,9 @@ impl MultiItemModifier for ProcMacroDerive { } }; - let new_items = __internal::set_parse_sess(&ecx.parse_sess, || { + __internal::set_sess(ecx, || { match __internal::token_stream_parse_items(stream) { - Ok(new_items) => new_items, + Ok(new_items) => new_items.into_iter().map(Annotatable::Item).collect(), Err(_) => { // FIXME: handle this better let msg = "proc-macro derive produced unparseable tokens"; @@ -107,12 +106,6 @@ impl MultiItemModifier for ProcMacroDerive { panic!(FatalError); } } - }); - - // Reassign spans of all expanded items to the input `item` - // for better errors here. - new_items.into_iter().map(|item| { - Annotatable::Item(ChangeSpan { span: span }.fold_item(item).expect_one("")) - }).collect() + }) } } diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index a6768c07fe13b..144d1930df90b 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -20,7 +20,7 @@ use syntax::ext::build::AstBuilder; use syntax::parse::token; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; -use syntax_pos::{Span, DUMMY_SP}; +use syntax_pos::Span; use syntax::tokenstream; use std::collections::{HashMap, HashSet}; @@ -558,7 +558,9 @@ impl<'a, 'b> Context<'a, 'b> { // passed to this function. for (i, e) in self.args.into_iter().enumerate() { let name = self.ecx.ident_of(&format!("__arg{}", i)); - pats.push(self.ecx.pat_ident(DUMMY_SP, name)); + let span = + Span { ctxt: e.span.ctxt.apply_mark(self.ecx.current_expansion.mark), ..e.span }; + pats.push(self.ecx.pat_ident(span, name)); for ref arg_ty in self.arg_unique_types[i].iter() { locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name)); } @@ -672,10 +674,10 @@ impl<'a, 'b> Context<'a, 'b> { } pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt, - sp: Span, + mut sp: Span, tts: &[tokenstream::TokenTree]) -> Box { - + sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark); match parse_args(ecx, sp, tts) { Some((efmt, args, names)) => { MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names)) @@ -696,7 +698,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, // `ArgumentType` does not derive `Clone`. let arg_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect(); let arg_unique_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect(); - let macsp = ecx.call_site(); + let mut macsp = ecx.call_site(); + macsp.ctxt = macsp.ctxt.apply_mark(ecx.current_expansion.mark); let msg = "format argument must be a string literal."; let fmt = match expr_to_spanned_string(ecx, efmt, msg) { Some(fmt) => fmt, diff --git a/src/libsyntax_ext/proc_macro_impl.rs b/src/libsyntax_ext/proc_macro_impl.rs index f60e5824db962..5fcedbf50c60f 100644 --- a/src/libsyntax_ext/proc_macro_impl.rs +++ b/src/libsyntax_ext/proc_macro_impl.rs @@ -34,7 +34,7 @@ impl base::AttrProcMacro for AttrProcMacro { let annotation = __internal::token_stream_wrap(annotation); let annotated = __internal::token_stream_wrap(annotated); - let res = __internal::set_parse_sess(&ecx.parse_sess, || { + let res = __internal::set_sess(ecx, || { panic::catch_unwind(panic::AssertUnwindSafe(|| (self.inner)(annotation, annotated))) }); @@ -69,7 +69,7 @@ impl base::ProcMacro for BangProcMacro { -> TokenStream { let input = __internal::token_stream_wrap(input); - let res = __internal::set_parse_sess(&ecx.parse_sess, || { + let res = __internal::set_sess(ecx, || { panic::catch_unwind(panic::AssertUnwindSafe(|| (self.inner)(input))) }); diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index f2ccc3f051e92..804b91ab09e3c 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -144,24 +144,18 @@ impl SyntaxContext { pub fn apply_mark(self, mark: Mark) -> SyntaxContext { HygieneData::with(|data| { let syntax_contexts = &mut data.syntax_contexts; - let ctxt_data = syntax_contexts[self.0 as usize]; - if mark == ctxt_data.outer_mark { - return ctxt_data.prev_ctxt; - } - - let modern = if data.marks[mark.0 as usize].modern { - *data.markings.entry((ctxt_data.modern, mark)).or_insert_with(|| { - let modern = SyntaxContext(syntax_contexts.len() as u32); + let mut modern = syntax_contexts[self.0 as usize].modern; + if data.marks[mark.0 as usize].modern { + modern = *data.markings.entry((modern, mark)).or_insert_with(|| { + let len = syntax_contexts.len() as u32; syntax_contexts.push(SyntaxContextData { outer_mark: mark, - prev_ctxt: ctxt_data.modern, - modern: modern, + prev_ctxt: modern, + modern: SyntaxContext(len), }); - modern - }) - } else { - ctxt_data.modern - }; + SyntaxContext(len) + }); + } *data.markings.entry((self, mark)).or_insert_with(|| { syntax_contexts.push(SyntaxContextData { diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index bb62efd376a0b..a7c247689cce8 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -186,7 +186,7 @@ impl Span { pub fn to(self, end: Span) -> Span { // FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480) - if end.ctxt == SyntaxContext::empty() { + if self.ctxt == SyntaxContext::empty() { Span { lo: self.lo, ..end } } else { Span { hi: end.hi, ..self } diff --git a/src/test/compile-fail/asm-out-assign-imm.rs b/src/test/compile-fail/asm-out-assign-imm.rs index 3c4a5dcb7b038..f95e4410381d9 100644 --- a/src/test/compile-fail/asm-out-assign-imm.rs +++ b/src/test/compile-fail/asm-out-assign-imm.rs @@ -28,7 +28,6 @@ pub fn main() { asm!("mov $1, $0" : "=r"(x) : "r"(5)); //~^ ERROR re-assignment of immutable variable `x` //~| NOTE re-assignment of immutable - //~| NOTE in this expansion of asm! } foo(x); } diff --git a/src/test/compile-fail/macro-context.rs b/src/test/compile-fail/macro-context.rs index 80802e19f8401..cc714a6e43141 100644 --- a/src/test/compile-fail/macro-context.rs +++ b/src/test/compile-fail/macro-context.rs @@ -23,5 +23,5 @@ fn main() { m!() => {} //~ NOTE the usage of `m!` is likely invalid in pattern context } - m!(); + m!(); //~ NOTE in this expansion } diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 0433b95865ef8..e2c68a626f91e 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -8,50 +8,37 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![allow(unused_parens)] -#![feature(plugin)] -#![feature(plugin_registrar)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] +// no-prefer-dynamic -extern crate rustc_plugin; -extern crate syntax; +#![crate_type = "proc-macro"] +#![feature(proc_macro)] -use rustc_plugin::Registry; +extern crate proc_macro; -use syntax::ext::base::SyntaxExtension; -use syntax::parse::token::Token; -use syntax::symbol::Symbol; -use syntax::tokenstream::{TokenTree, TokenStream}; +use proc_macro::{TokenStream, TokenNode, quote}; -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("cond"), - SyntaxExtension::ProcMacro(Box::new(cond))); -} - -fn cond(input: TokenStream) -> TokenStream { +#[proc_macro] +pub fn cond(input: TokenStream) -> TokenStream { let mut conds = Vec::new(); - let mut input = input.trees().peekable(); + let mut input = input.into_iter().peekable(); while let Some(tree) = input.next() { - let mut cond = match tree { - TokenTree::Delimited(_, ref delimited) => delimited.stream(), + let cond = match tree.kind { + TokenNode::Group(_, cond) => cond, _ => panic!("Invalid input"), }; - let mut trees = cond.trees(); - let test = trees.next(); - let rhs = trees.collect::(); + let mut cond_trees = cond.clone().into_iter(); + let test = cond_trees.next().expect("Unexpected empty condition in `cond!`"); + let rhs = cond_trees.collect::(); if rhs.is_empty() { panic!("Invalid macro usage in cond: {}", cond); } - let is_else = match test { - Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true, + let is_else = match test.kind { + TokenNode::Term(word) => word.as_str() == "else", _ => false, }; conds.push(if is_else || input.peek().is_none() { quote!({ $rhs }) } else { - let test = test.unwrap(); quote!(if $test { $rhs } else) }); } diff --git a/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs b/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs index 9522592a5e9e6..cf6584e961a67 100644 --- a/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs +++ b/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs @@ -8,29 +8,20 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(plugin)] -#![feature(plugin_registrar)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] +// no-prefer-dynamic -extern crate rustc_plugin; -extern crate syntax; +#![crate_type = "proc-macro"] +#![feature(proc_macro, proc_macro_lib)] -use rustc_plugin::Registry; -use syntax::ext::base::SyntaxExtension; -use syntax::symbol::Symbol; -use syntax::tokenstream::TokenStream; +extern crate proc_macro; -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("hello"), - SyntaxExtension::ProcMacro(Box::new(hello))); -} +use proc_macro::{TokenStream, quote}; // This macro is not very interesting, but it does contain delimited tokens with // no content - `()` and `{}` - which has caused problems in the past. // Also, it tests that we can escape `$` via `$$`. -fn hello(_: TokenStream) -> TokenStream { +#[proc_macro] +pub fn hello(_: TokenStream) -> TokenStream { quote!({ fn hello() {} macro_rules! m { ($$($$t:tt)*) => { $$($$t)* } } diff --git a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs index 0e37a7a5dcce2..1b47043884844 100644 --- a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs +++ b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs @@ -8,47 +8,37 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(plugin, plugin_registrar, rustc_private)] -#![plugin(proc_macro_plugin)] - -extern crate rustc_plugin; -extern crate syntax; - -use rustc_plugin::Registry; -use syntax::ext::base::SyntaxExtension; -use syntax::tokenstream::TokenStream; -use syntax::symbol::Symbol; - -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("attr_tru"), - SyntaxExtension::AttrProcMacro(Box::new(attr_tru))); - reg.register_syntax_extension(Symbol::intern("attr_identity"), - SyntaxExtension::AttrProcMacro(Box::new(attr_identity))); - reg.register_syntax_extension(Symbol::intern("tru"), - SyntaxExtension::ProcMacro(Box::new(tru))); - reg.register_syntax_extension(Symbol::intern("ret_tru"), - SyntaxExtension::ProcMacro(Box::new(ret_tru))); - reg.register_syntax_extension(Symbol::intern("identity"), - SyntaxExtension::ProcMacro(Box::new(identity))); -} +// no-prefer-dynamic + +#![crate_type = "proc-macro"] +#![feature(proc_macro, proc_macro_lib)] + +extern crate proc_macro; + +use proc_macro::{TokenStream, quote}; -fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream { - quote!(fn f1() -> bool { true }) +#[proc_macro_attribute] +pub fn attr_tru(_attr: TokenStream, item: TokenStream) -> TokenStream { + let name = item.into_iter().skip(1).next().unwrap(); + quote!(fn $name() -> bool { true }) } -fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream { +#[proc_macro_attribute] +pub fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream { quote!($item) } -fn tru(_ts: TokenStream) -> TokenStream { +#[proc_macro] +pub fn tru(_ts: TokenStream) -> TokenStream { quote!(true) } -fn ret_tru(_ts: TokenStream) -> TokenStream { +#[proc_macro] +pub fn ret_tru(_ts: TokenStream) -> TokenStream { quote!(return true;) } -fn identity(ts: TokenStream) -> TokenStream { +#[proc_macro] +pub fn identity(ts: TokenStream) -> TokenStream { quote!($ts) } diff --git a/src/test/run-pass-fulldeps/macro-quote-1.rs b/src/test/run-pass-fulldeps/macro-quote-1.rs deleted file mode 100644 index e7d0a83017be0..0000000000000 --- a/src/test/run-pass-fulldeps/macro-quote-1.rs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// ignore-stage1 - -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] - -extern crate syntax; -extern crate syntax_pos; - -use syntax::ast::{Ident, Name}; -use syntax::parse::token::{self, Token, Lit}; -use syntax::tokenstream::TokenTree; - -fn main() { - let true_tok = token::Ident(Ident::from_str("true")); - assert!(quote!(true).eq_unspanned(&true_tok.into())); - - // issue #35829, extended check to proc_macro. - let triple_dot_tok = Token::DotDotDot; - assert!(quote!(...).eq_unspanned(&triple_dot_tok.into())); - - let byte_str_tok = Token::Literal(Lit::ByteStr(Name::intern("one")), None); - assert!(quote!(b"one").eq_unspanned(&byte_str_tok.into())); - - let byte_str_raw_tok = Token::Literal(Lit::ByteStrRaw(Name::intern("#\"two\"#"), 3), None); - assert!(quote!(br###"#"two"#"###).eq_unspanned(&byte_str_raw_tok.into())); - - let str_raw_tok = Token::Literal(Lit::StrRaw(Name::intern("#\"three\"#"), 2), None); - assert!(quote!(r##"#"three"#"##).eq_unspanned(&str_raw_tok.into())); -} diff --git a/src/test/run-pass-fulldeps/macro-quote-cond.rs b/src/test/run-pass-fulldeps/macro-quote-cond.rs index fa969b6a087cf..cff743bdae6cd 100644 --- a/src/test/run-pass-fulldeps/macro-quote-cond.rs +++ b/src/test/run-pass-fulldeps/macro-quote-cond.rs @@ -11,9 +11,11 @@ // aux-build:cond_plugin.rs // ignore-stage1 -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(cond_plugin)] +#![feature(proc_macro)] + +extern crate cond_plugin; + +use cond_plugin::cond; fn fact(n : i64) -> i64 { if n == 0 { diff --git a/src/test/run-pass-fulldeps/macro-quote-test.rs b/src/test/run-pass-fulldeps/macro-quote-test.rs index bdbea8a419416..eb77895e2d7ad 100644 --- a/src/test/run-pass-fulldeps/macro-quote-test.rs +++ b/src/test/run-pass-fulldeps/macro-quote-test.rs @@ -13,10 +13,10 @@ // aux-build:hello_macro.rs // ignore-stage1 -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(hello_macro)] +#![feature(proc_macro)] + +extern crate hello_macro; fn main() { - hello!(); + hello_macro::hello!(); } diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs index 989c77f1089cf..93815d16837d3 100644 --- a/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs @@ -24,7 +24,7 @@ pub fn attr_with_args(args: TokenStream, input: TokenStream) -> TokenStream { let input = input.to_string(); - assert_eq!(input, "fn foo ( ) { }"); + assert_eq!(input, "fn foo() { }"); r#" fn foo() -> &'static str { "Hello, world!" } diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs new file mode 100644 index 0000000000000..ec2ff0d1e2b8c --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs @@ -0,0 +1,36 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![feature(proc_macro)] +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::{TokenStream, TokenNode, Spacing, Literal, quote}; + +#[proc_macro] +pub fn count_compound_ops(input: TokenStream) -> TokenStream { + assert_eq!(count_compound_ops_helper(quote!(++ (&&) 4@a)), 3); + TokenNode::Literal(Literal::u32(count_compound_ops_helper(input))).into() +} + +fn count_compound_ops_helper(input: TokenStream) -> u32 { + let mut count = 0; + for token in input { + match token.kind { + TokenNode::Op(c, Spacing::Alone) => count += 1, + TokenNode::Group(_, tokens) => count += count_compound_ops_helper(tokens), + _ => {} + } + } + count +} diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example.rs new file mode 100644 index 0000000000000..8ffa7abe6f7f9 --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(proc_macro)] + +extern crate hygiene_example_codegen; + +pub use hygiene_example_codegen::hello; + +pub fn print(string: &str) { + println!("{}", string); +} diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example_codegen.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example_codegen.rs new file mode 100644 index 0000000000000..055e4e2fad7af --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example_codegen.rs @@ -0,0 +1,36 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![feature(proc_macro)] +#![crate_type = "proc-macro"] + +extern crate proc_macro as proc_macro_renamed; // This does not break `quote!` + +use proc_macro_renamed::{TokenStream, quote}; + +#[proc_macro] +pub fn hello(input: TokenStream) -> TokenStream { + quote!(hello_helper!($input)) + //^ `hello_helper!` always resolves to the following proc macro, + //| no matter where `hello!` is used. +} + +#[proc_macro] +pub fn hello_helper(input: TokenStream) -> TokenStream { + quote! { + extern crate hygiene_example; // This is never a conflict error + let string = format!("hello {}", $input); + //^ `format!` always resolves to the prelude macro, + //| even if a different `format!` is in scope where `hello!` is used. + hygiene_example::print(&string) + } +} diff --git a/src/test/run-pass-fulldeps/proc-macro/count_compound_ops.rs b/src/test/run-pass-fulldeps/proc-macro/count_compound_ops.rs new file mode 100644 index 0000000000000..1a2b144e4717b --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/count_compound_ops.rs @@ -0,0 +1,20 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:count_compound_ops.rs + +#![feature(proc_macro)] + +extern crate count_compound_ops; +use count_compound_ops::count_compound_ops; + +fn main() { + assert_eq!(count_compound_ops!(foo<=>bar << or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:hygiene_example_codegen.rs +// aux-build:hygiene_example.rs + +#![feature(proc_macro)] + +extern crate hygiene_example; +use hygiene_example::hello; + +fn main() { + mod hygiene_example {} // no conflict with `extern crate hygiene_example;` from the proc macro + macro_rules! format { () => {} } // does not interfere with `format!` from the proc macro + macro_rules! hello_helper { () => {} } // similarly does not intefere with the proc macro + + let string = "world"; // no conflict with `string` from the proc macro + hello!(string); + hello!(string); +} diff --git a/src/test/run-pass-fulldeps/proc_macro.rs b/src/test/run-pass-fulldeps/proc_macro.rs index 22cc9f0f8d40e..cdda723585b7a 100644 --- a/src/test/run-pass-fulldeps/proc_macro.rs +++ b/src/test/run-pass-fulldeps/proc_macro.rs @@ -12,10 +12,11 @@ // ignore-stage1 // ignore-cross-compile -#![feature(plugin, custom_attribute)] -#![feature(type_macros)] +#![feature(proc_macro)] -#![plugin(proc_macro_def)] +extern crate proc_macro_def; + +use proc_macro_def::{attr_tru, attr_identity, identity, ret_tru, tru}; #[attr_tru] fn f1() -> bool { diff --git a/src/test/ui/token/macro-incomplete-parse.rs b/src/test/ui/token/macro-incomplete-parse.rs index 47374fc3c6085..08749373432f5 100644 --- a/src/test/ui/token/macro-incomplete-parse.rs +++ b/src/test/ui/token/macro-incomplete-parse.rs @@ -32,7 +32,7 @@ macro_rules! ignored_pat { ignored_item!(); //~ NOTE caused by the macro expansion here fn main() { - ignored_expr!(); + ignored_expr!(); //~ NOTE in this expansion match 1 { ignored_pat!() => (), //~ NOTE caused by the macro expansion here _ => (), diff --git a/src/test/ui/token/macro-incomplete-parse.stderr b/src/test/ui/token/macro-incomplete-parse.stderr index f23d97586b843..6bce09af05250 100644 --- a/src/test/ui/token/macro-incomplete-parse.stderr +++ b/src/test/ui/token/macro-incomplete-parse.stderr @@ -15,6 +15,9 @@ error: expected one of `.`, `;`, `?`, `}`, or an operator, found `,` | 22 | () => ( 1, //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `,` | ^ expected one of `.`, `;`, `?`, `}`, or an operator here +... +35 | ignored_expr!(); //~ NOTE in this expansion + | ---------------- in this macro invocation error: macro expansion ignores token `,` and any following --> $DIR/macro-incomplete-parse.rs:29:14 diff --git a/src/tools/tidy/src/cargo.rs b/src/tools/tidy/src/cargo.rs index c8c6cb0ee6b41..f40fea60f40a8 100644 --- a/src/tools/tidy/src/cargo.rs +++ b/src/tools/tidy/src/cargo.rs @@ -91,14 +91,6 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) { continue } - // We want the compiler to depend on the proc_macro_plugin crate so - // that it is built and included in the end, but we don't want to - // actually use it in the compiler. - if toml.contains("name = \"rustc_driver\"") && - krate == "proc_macro_plugin" { - continue - } - if !librs.contains(&format!("extern crate {}", krate)) { tidy_error!(bad, "{} doesn't have `extern crate {}`, but Cargo.toml \ depends on it", libfile.display(), krate); diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs index 722fc2b317eb4..4c94ade98d965 100644 --- a/src/tools/tidy/src/features.rs +++ b/src/tools/tidy/src/features.rs @@ -245,7 +245,7 @@ fn get_and_check_lib_features(base_src_path: &Path, let mut err = |msg: &str| { tidy_error!(bad, "{}:{}: {}", file.display(), line, msg); }; - if lang_features.contains_key(name) { + if lang_features.contains_key(name) && name != "proc_macro" { err("duplicating a lang feature"); } if let Some(ref s) = lib_features.get(name) {