Skip to content

Implementation of RFC 2151, Raw Identifiers #48942

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Mar 23, 2018
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions src/libproc_macro/lib.rs
Original file line number Diff line number Diff line change
@@ -681,7 +681,8 @@ impl TokenTree {
Dollar => op!('$'),
Question => op!('?'),

Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
Ident(ident, false) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
Ident(ident, true) => TokenNode::Term(Term(Symbol::intern(&format!("r#{}", ident)))),
Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)),

Interpolated(_) => {
@@ -713,8 +714,14 @@ impl TokenTree {
},
TokenNode::Term(symbol) => {
let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt() };
let sym_str = symbol.0.as_str();
let token =
if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) };
if sym_str.starts_with("'") { Lifetime(ident) }
else if sym_str.starts_with("r#") {
let name = Symbol::intern(&sym_str[2..]);
let ident = ast::Ident { name, ctxt: self.span.0.ctxt() };
Ident(ident, true)
} else { Ident(ident, false) };
return TokenTree::Token(self.span.0, token).into();
}
TokenNode::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(),
5 changes: 4 additions & 1 deletion src/librustc/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
@@ -318,7 +318,10 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
opt_name.hash_stable(hcx, hasher);
}

token::Token::Ident(ident) |
token::Token::Ident(ident, is_raw) => {
ident.name.hash_stable(hcx, hasher);
is_raw.hash_stable(hcx, hasher);
}
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),

token::Token::Interpolated(_) => {
4 changes: 2 additions & 2 deletions src/librustc_passes/ast_validation.rs
Original file line number Diff line number Diff line change
@@ -41,13 +41,13 @@ impl<'a> AstValidator<'a> {
keywords::StaticLifetime.name(),
keywords::Invalid.name()];
if !valid_names.contains(&lifetime.ident.name) &&
token::Ident(lifetime.ident.without_first_quote()).is_reserved_ident() {
token::is_reserved_ident(lifetime.ident.without_first_quote()) {
self.err_handler().span_err(lifetime.span, "lifetimes cannot use keyword names");
}
}

fn check_label(&self, label: Ident, span: Span) {
if token::Ident(label.without_first_quote()).is_reserved_ident() {
if token::is_reserved_ident(label.without_first_quote()) {
self.err_handler().span_err(span, &format!("invalid label name `{}`", label.name));
}
}
2 changes: 1 addition & 1 deletion src/librustc_resolve/lib.rs
Original file line number Diff line number Diff line change
@@ -3206,7 +3206,7 @@ impl<'a> Resolver<'a> {
// `$crate::a::b`
module = Some(self.resolve_crate_root(ident.node.ctxt, true));
continue
} else if i == 1 && !token::Ident(ident.node).is_path_segment_keyword() {
} else if i == 1 && !token::is_path_segment_keyword(ident.node) {
let prev_name = path[0].node.name;
if prev_name == keywords::Extern.name() ||
prev_name == keywords::CrateRoot.name() &&
2 changes: 1 addition & 1 deletion src/librustc_resolve/macros.rs
Original file line number Diff line number Diff line change
@@ -268,7 +268,7 @@ impl<'a> base::Resolver for Resolver<'a> {
if k > 0 {
tokens.push(TokenTree::Token(path.span, Token::ModSep).into());
}
let tok = Token::Ident(segment.identifier);
let tok = Token::from_ast_ident(segment.identifier);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I changed this in from_ast_ident, since it seems this is the best behavior in all these cases.

tokens.push(TokenTree::Token(path.span, tok).into());
}
}
4 changes: 2 additions & 2 deletions src/librustc_resolve/resolve_imports.rs
Original file line number Diff line number Diff line change
@@ -625,7 +625,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> {
} else {
Some(self.resolve_crate_root(source.ctxt.modern(), false))
}
} else if is_extern && !token::Ident(source).is_path_segment_keyword() {
} else if is_extern && !token::is_path_segment_keyword(source) {
let crate_id =
self.crate_loader.resolve_crate_from_path(source.name, directive.span);
let crate_root =
@@ -667,7 +667,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> {
}
PathResult::Failed(span, msg, true) => {
let (mut self_path, mut self_result) = (module_path.clone(), None);
let is_special = |ident| token::Ident(ident).is_path_segment_keyword() &&
let is_special = |ident| token::is_path_segment_keyword(ident) &&
ident.name != keywords::CrateRoot.name();
if !self_path.is_empty() && !is_special(self_path[0].node) &&
!(self_path.len() > 1 && is_special(self_path[1].node)) {
8 changes: 4 additions & 4 deletions src/librustdoc/html/highlight.rs
Original file line number Diff line number Diff line change
@@ -323,12 +323,12 @@ impl<'a> Classifier<'a> {
}

// Keywords are also included in the identifier set.
token::Ident(ident) => {
token::Ident(ident, is_raw) => {
match &*ident.name.as_str() {
"ref" | "mut" => Class::RefKeyWord,
"ref" | "mut" if !is_raw => Class::RefKeyWord,

"self" |"Self" => Class::Self_,
"false" | "true" => Class::Bool,
"self" | "Self" => Class::Self_,
"false" | "true" if !is_raw => Class::Bool,

"Option" | "Result" => Class::PreludeTy,
"Some" | "None" | "Ok" | "Err" => Class::PreludeVal,
2 changes: 1 addition & 1 deletion src/libsyntax/ast.rs
Original file line number Diff line number Diff line change
@@ -112,7 +112,7 @@ impl Path {
// or starts with something like `self`/`super`/`$crate`/etc.
pub fn make_root(&self) -> Option<PathSegment> {
if let Some(ident) = self.segments.get(0).map(|seg| seg.identifier) {
if ::parse::token::Ident(ident).is_path_segment_keyword() &&
if ::parse::token::is_path_segment_keyword(ident) &&
ident.name != keywords::Crate.name() {
return None;
}
13 changes: 7 additions & 6 deletions src/libsyntax/attr.rs
Original file line number Diff line number Diff line change
@@ -1106,17 +1106,18 @@ impl IntType {

impl MetaItem {
fn tokens(&self) -> TokenStream {
let ident = TokenTree::Token(self.span, Token::Ident(Ident::with_empty_ctxt(self.name)));
let ident = TokenTree::Token(self.span,
Token::from_ast_ident(Ident::with_empty_ctxt(self.name)));
TokenStream::concat(vec![ident.into(), self.node.tokens(self.span)])
}

fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
where I: Iterator<Item = TokenTree>,
{
let (span, name) = match tokens.next() {
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
Some(TokenTree::Token(span, Token::Ident(ident, _))) => (span, ident.name),
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name),
token::Nonterminal::NtIdent(ident, _) => (ident.span, ident.node.name),
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
_ => return None,
},
@@ -1269,14 +1270,14 @@ impl LitKind {
"true"
} else {
"false"
}))),
})), false),
}
}

fn from_token(token: Token) -> Option<LitKind> {
match token {
Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)),
Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)),
Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)),
Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)),
Token::Interpolated(ref nt) => match nt.0 {
token::NtExpr(ref v) => match v.node {
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
10 changes: 5 additions & 5 deletions src/libsyntax/diagnostics/plugin.rs
Original file line number Diff line number Diff line change
@@ -44,7 +44,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let code = match (token_tree.len(), token_tree.get(0)) {
(1, Some(&TokenTree::Token(_, token::Ident(code)))) => code,
(1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
_ => unreachable!()
};

@@ -82,10 +82,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
token_tree.get(1),
token_tree.get(2)
) {
(1, Some(&TokenTree::Token(_, token::Ident(ref code))), None, None) => {
(1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
(code, None)
},
(3, Some(&TokenTree::Token(_, token::Ident(ref code))),
(3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
Some(&TokenTree::Token(_, token::Comma)),
Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => {
(code, Some(description))
@@ -150,9 +150,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
(
// Crate name.
&TokenTree::Token(_, token::Ident(ref crate_name)),
&TokenTree::Token(_, token::Ident(ref crate_name, _)),
// DIAGNOSTICS ident.
&TokenTree::Token(_, token::Ident(ref name))
&TokenTree::Token(_, token::Ident(ref name, _))
) => (*&crate_name, name),
_ => unreachable!()
};
5 changes: 3 additions & 2 deletions src/libsyntax/ext/base.rs
Original file line number Diff line number Diff line change
@@ -229,8 +229,9 @@ impl<F> TTMacroExpander for F
impl Folder for AvoidInterpolatedIdents {
fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree {
if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt {
if let token::NtIdent(ident) = nt.0 {
return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node));
if let token::NtIdent(ident, is_raw) = nt.0 {
return tokenstream::TokenTree::Token(ident.span,
token::Ident(ident.node, is_raw));
}
}
fold::noop_fold_tt(tt, self)
12 changes: 7 additions & 5 deletions src/libsyntax/ext/quote.rs
Original file line number Diff line number Diff line change
@@ -75,7 +75,7 @@ pub mod rt {

impl ToTokens for ast::Ident {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))]
vec![TokenTree::Token(DUMMY_SP, Token::from_ast_ident(*self))]
}
}

@@ -238,7 +238,9 @@ pub mod rt {
if i > 0 {
inner.push(TokenTree::Token(self.span, token::Colon).into());
}
inner.push(TokenTree::Token(self.span, token::Ident(segment.identifier)).into());
inner.push(TokenTree::Token(
self.span, token::Token::from_ast_ident(segment.identifier)
).into());
}
inner.push(self.tokens.clone());

@@ -658,10 +660,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
token::Literal(token::ByteStr(i), suf) => return mk_lit!("ByteStr", suf, i),
token::Literal(token::ByteStrRaw(i, n), suf) => return mk_lit!("ByteStrRaw", suf, i, n),

token::Ident(ident) => {
token::Ident(ident, is_raw) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "Ident"),
vec![mk_ident(cx, sp, ident)]);
vec![mk_ident(cx, sp, ident), cx.expr_bool(sp, is_raw)]);
}

token::Lifetime(ident) => {
@@ -720,7 +722,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {

fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> {
match *tt {
TokenTree::Token(sp, token::Ident(ident)) if quoted => {
TokenTree::Token(sp, token::Ident(ident, _)) if quoted => {
// tt.extend($ident.to_tokens(ext_cx))

let e_to_toks =
21 changes: 11 additions & 10 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
@@ -364,8 +364,8 @@ pub fn parse_failure_msg(tok: Token) -> String {

/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
fn token_name_eq(t1: &Token, t2: &Token) -> bool {
if let (Some(id1), Some(id2)) = (t1.ident(), t2.ident()) {
id1.name == id2.name
if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
id1.name == id2.name && is_raw1 == is_raw2
} else if let (&token::Lifetime(id1), &token::Lifetime(id2)) = (t1, t2) {
id1.name == id2.name
} else {
@@ -711,9 +711,10 @@ pub fn parse(

/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &Token) -> Option<Ident> {
fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
match *token {
token::Ident(ident) if ident.name != keywords::Underscore.name() => Some(ident),
token::Ident(ident, is_raw) if ident.name != keywords::Underscore.name() =>
Some((ident, is_raw)),
_ => None,
}
}
@@ -737,7 +738,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
"ident" => get_macro_ident(token).is_some(),
"vis" => match *token {
// The follow-set of :vis + "priv" keyword + interpolated
Token::Comma | Token::Ident(_) | Token::Interpolated(_) => true,
Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true,
_ => token.can_begin_type(),
},
"block" => match *token {
@@ -746,7 +747,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
token::NtItem(_)
| token::NtPat(_)
| token::NtTy(_)
| token::NtIdent(_)
| token::NtIdent(..)
| token::NtMeta(_)
| token::NtPath(_)
| token::NtVis(_) => false, // none of these may start with '{'.
@@ -755,15 +756,15 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
_ => false,
},
"path" | "meta" => match *token {
Token::ModSep | Token::Ident(_) => true,
Token::ModSep | Token::Ident(..) => true,
Token::Interpolated(ref nt) => match nt.0 {
token::NtPath(_) | token::NtMeta(_) => true,
_ => may_be_ident(&nt.0),
},
_ => false,
},
"pat" => match *token {
Token::Ident(_) | // box, ref, mut, and other identifiers (can stricten)
Token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
Token::OpenDelim(token::Paren) | // tuple pattern
Token::OpenDelim(token::Bracket) | // slice pattern
Token::BinOp(token::And) | // reference
@@ -823,9 +824,9 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
"expr" => token::NtExpr(panictry!(p.parse_expr())),
"ty" => token::NtTy(panictry!(p.parse_ty())),
// this could be handled like a token, since it is one
"ident" => if let Some(ident) = get_macro_ident(&p.token) {
"ident" => if let Some((ident, is_raw)) = get_macro_ident(&p.token) {
p.bump();
token::NtIdent(respan(p.prev_span, ident))
token::NtIdent(respan(p.prev_span, ident), is_raw)
} else {
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}", &token_str)).emit();
6 changes: 3 additions & 3 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
@@ -831,7 +831,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
"pat" => match *tok {
TokenTree::Token(_, ref tok) => match *tok {
FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
Ident(i) if i.name == "if" || i.name == "in" => Ok(true),
Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true),
_ => Ok(false)
},
_ => Ok(false),
@@ -840,7 +840,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
TokenTree::Token(_, ref tok) => match *tok {
OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
Ident(i) if i.name == "as" || i.name == "where" => Ok(true),
Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true),
_ => Ok(false)
},
TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true),
@@ -860,7 +860,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
match *tok {
TokenTree::Token(_, ref tok) => match *tok {
Comma => Ok(true),
Ident(i) if i.name != "priv" => Ok(true),
Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true),
ref tok => Ok(tok.can_begin_type())
},
TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident"
6 changes: 3 additions & 3 deletions src/libsyntax/ext/tt/quoted.rs
Original file line number Diff line number Diff line change
@@ -200,7 +200,7 @@ pub fn parse(
let span = match trees.next() {
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
Some(kind) => {
Some((kind, _)) => {
let span = end_sp.with_lo(start_sp.lo());
result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue;
@@ -289,14 +289,14 @@ where
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
// metavariable that names the crate of the invokation.
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
let ident = token.ident().unwrap();
let (ident, _) = token.ident().unwrap();
let span = ident_span.with_lo(span.lo());
if ident.name == keywords::Crate.name() {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm, I think this will accept $r#crate in the crate root meaning while we should treat it as a usual metavar named "crate".

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you add a test case for this as well?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Currently r#crate is forbidden along with the rest of the keywords matching is_path_segment_keyword.

let ident = ast::Ident {
name: keywords::DollarCrate.name(),
..ident
};
TokenTree::Token(span, token::Ident(ident))
TokenTree::Token(span, token::Ident(ident, false))
} else {
TokenTree::MetaVar(span, ident)
}
2 changes: 1 addition & 1 deletion src/libsyntax/ext/tt/transcribe.rs
Original file line number Diff line number Diff line change
@@ -169,7 +169,7 @@ pub fn transcribe(cx: &ExtCtxt,
Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident };
sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
result.push(TokenTree::Token(sp, token::Dollar).into());
result.push(TokenTree::Token(sp, token::Ident(ident)).into());
result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into());
}
}
quoted::TokenTree::Delimited(mut span, delimited) => {
14 changes: 14 additions & 0 deletions src/libsyntax/feature_gate.rs
Original file line number Diff line number Diff line change
@@ -452,6 +452,9 @@ declare_features! (

// `use path as _;` and `extern crate c as _;`
(active, underscore_imports, "1.26.0", Some(48216), None),

// Allows keywords to be escaped for use as identifiers
(active, raw_identifiers, "1.26.0", Some(48589), None),
);

declare_features! (
@@ -1932,6 +1935,17 @@ pub fn check_crate(krate: &ast::Crate,
parse_sess: sess,
plugin_attributes,
};

if !features.raw_identifiers {
for &span in sess.raw_identifier_spans.borrow().iter() {
if !span.allows_unstable() {
gate_feature!(&ctx, raw_identifiers, span,
"raw identifiers are experimental and subject to change"
);
}
}
}

let visitor = &mut PostExpansionVisitor { context: &ctx };
visitor.whole_crate_feature_gates(krate);
visit::walk_crate(visitor, krate);
5 changes: 3 additions & 2 deletions src/libsyntax/fold.rs
Original file line number Diff line number Diff line change
@@ -578,7 +578,7 @@ pub fn noop_fold_tts<T: Folder>(tts: TokenStream, fld: &mut T) -> TokenStream {
// apply ident folder if it's an ident, apply other folds to interpolated nodes
pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token {
match t {
token::Ident(id) => token::Ident(fld.fold_ident(id)),
token::Ident(id, is_raw) => token::Ident(fld.fold_ident(id), is_raw),
token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
token::Interpolated(nt) => {
let nt = match Lrc::try_unwrap(nt) {
@@ -630,7 +630,8 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)),
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
token::NtIdent(id) => token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}),
token::NtIdent(id, is_raw) =>
token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}, is_raw),
token::NtMeta(meta) => token::NtMeta(fld.fold_meta_item(meta)),
token::NtPath(path) => token::NtPath(fld.fold_path(path)),
token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)),
74 changes: 51 additions & 23 deletions src/libsyntax/parse/lexer/mod.rs
Original file line number Diff line number Diff line change
@@ -14,7 +14,7 @@ use codemap::{CodeMap, FilePathMapping};
use errors::{FatalError, DiagnosticBuilder};
use parse::{token, ParseSess};
use str::char_at;
use symbol::Symbol;
use symbol::{Symbol, keywords};
use std_unicode::property::Pattern_White_Space;

use std::borrow::Cow;
@@ -1115,26 +1115,53 @@ impl<'a> StringReader<'a> {
/// token, and updates the interner
fn next_token_inner(&mut self) -> Result<token::Token, ()> {
let c = self.ch;
if ident_start(c) &&
match (c.unwrap(), self.nextch(), self.nextnextch()) {
// Note: r as in r" or r#" is part of a raw string literal,
// b as in b' is part of a byte literal.
// They are not identifiers, and are handled further down.
('r', Some('"'), _) |
('r', Some('#'), _) |
('b', Some('"'), _) |
('b', Some('\''), _) |
('b', Some('r'), Some('"')) |
('b', Some('r'), Some('#')) => false,
_ => true,
} {
let start = self.pos;
while ident_continue(self.ch) {
self.bump();
}

// FIXME: perform NFKC normalization here. (Issue #2253)
return Ok(self.with_str_from(start, |string| token::Ident(self.mk_ident(string))));
if ident_start(c) {
let (is_ident_start, is_raw_ident) =
match (c.unwrap(), self.nextch(), self.nextnextch()) {
// r# followed by an identifier starter is a raw identifier.
// This is an exception to the r# case below.
('r', Some('#'), x) if ident_start(x) => (true, true),
// r as in r" or r#" is part of a raw string literal.
// b as in b' is part of a byte literal.
// They are not identifiers, and are handled further down.
('r', Some('"'), _) |
('r', Some('#'), _) |
('b', Some('"'), _) |
('b', Some('\''), _) |
('b', Some('r'), Some('"')) |
('b', Some('r'), Some('#')) => (false, false),
_ => (true, false),
};
if is_ident_start {
let raw_start = self.pos;
if is_raw_ident {
// Consume the 'r#' characters.
self.bump();
self.bump();
}

let start = self.pos;
while ident_continue(self.ch) {
self.bump();
}

return Ok(self.with_str_from(start, |string| {
// FIXME: perform NFKC normalization here. (Issue #2253)
let ident = self.mk_ident(string);
if is_raw_ident && (token::is_path_segment_keyword(ident) ||
ident.name == keywords::Underscore.name()) {
self.fatal_span_(raw_start, self.pos,
&format!("`r#{}` is not currently supported.", ident.name)
).raise();
}
if is_raw_ident {
let span = self.mk_sp(raw_start, self.pos);
self.sess.raw_identifier_spans.borrow_mut().push(span);
}
token::Ident(ident, is_raw_ident)
}));
}
}

if is_dec_digit(c) {
@@ -1773,6 +1800,7 @@ mod tests {
included_mod_stack: RefCell::new(Vec::new()),
code_map: cm,
missing_fragment_specifiers: RefCell::new(HashSet::new()),
raw_identifier_spans: RefCell::new(Vec::new()),
registered_diagnostics: Lock::new(ErrorMap::new()),
non_modrs_mods: RefCell::new(vec![]),
}
@@ -1801,7 +1829,7 @@ mod tests {
assert_eq!(string_reader.next_token().tok, token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan {
tok: token::Ident(id),
tok: token::Ident(id, false),
sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
};
assert_eq!(tok1, tok2);
@@ -1811,7 +1839,7 @@ mod tests {
// read another token:
let tok3 = string_reader.next_token();
let tok4 = TokenAndSpan {
tok: token::Ident(Ident::from_str("main")),
tok: mk_ident("main"),
sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
};
assert_eq!(tok3, tok4);
@@ -1830,7 +1858,7 @@ mod tests {

// make the identifier by looking up the string in the interner
fn mk_ident(id: &str) -> token::Token {
token::Ident(Ident::from_str(id))
token::Token::from_ast_ident(Ident::from_str(id))
}

#[test]
24 changes: 15 additions & 9 deletions src/libsyntax/parse/mod.rs
Original file line number Diff line number Diff line change
@@ -48,6 +48,9 @@ pub struct ParseSess {
pub unstable_features: UnstableFeatures,
pub config: CrateConfig,
pub missing_fragment_specifiers: RefCell<HashSet<Span>>,
/// Places where raw identifiers were used. This is used for feature gating
/// raw identifiers
pub raw_identifier_spans: RefCell<Vec<Span>>,
/// The registered diagnostics codes
pub registered_diagnostics: Lock<ErrorMap>,
// Spans where a `mod foo;` statement was included in a non-mod.rs file.
@@ -74,6 +77,7 @@ impl ParseSess {
unstable_features: UnstableFeatures::from_environment(),
config: HashSet::new(),
missing_fragment_specifiers: RefCell::new(HashSet::new()),
raw_identifier_spans: RefCell::new(Vec::new()),
registered_diagnostics: Lock::new(ErrorMap::new()),
included_mod_stack: RefCell::new(vec![]),
code_map,
@@ -741,9 +745,9 @@ mod tests {
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
(
4,
Some(&TokenTree::Token(_, token::Ident(name_macro_rules))),
Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))),
Some(&TokenTree::Token(_, token::Not)),
Some(&TokenTree::Token(_, token::Ident(name_zip))),
Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
Some(&TokenTree::Delimited(_, ref macro_delimed)),
)
if name_macro_rules.name == "macro_rules"
@@ -762,7 +766,7 @@ mod tests {
(
2,
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))),
Some(&TokenTree::Token(_, token::Ident(ident, false))),
)
if first_delimed.delim == token::Paren && ident.name == "a" => {},
_ => panic!("value 3: {:?}", *first_delimed),
@@ -772,7 +776,7 @@ mod tests {
(
2,
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))),
Some(&TokenTree::Token(_, token::Ident(ident, false))),
)
if second_delimed.delim == token::Paren
&& ident.name == "a" => {},
@@ -793,25 +797,27 @@ mod tests {
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());

let expected = TokenStream::concat(vec![
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(),
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
TokenTree::Delimited(
sp(5, 14),
tokenstream::Delimited {
delim: token::DelimToken::Paren,
tts: TokenStream::concat(vec![
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(6, 7),
token::Ident(Ident::from_str("b"), false)).into(),
TokenTree::Token(sp(8, 9), token::Colon).into(),
TokenTree::Token(sp(10, 13),
token::Ident(Ident::from_str("i32"))).into(),
token::Ident(Ident::from_str("i32"), false)).into(),
]).into(),
}).into(),
TokenTree::Delimited(
sp(15, 21),
tokenstream::Delimited {
delim: token::DelimToken::Brace,
tts: TokenStream::concat(vec![
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(17, 18),
token::Ident(Ident::from_str("b"), false)).into(),
TokenTree::Token(sp(18, 19), token::Semi).into(),
]).into(),
}).into()
29 changes: 17 additions & 12 deletions src/libsyntax/parse/parser.rs
Original file line number Diff line number Diff line change
@@ -358,7 +358,7 @@ impl TokenCursor {

let body = TokenTree::Delimited(sp, Delimited {
delim: token::Bracket,
tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
TokenTree::Token(sp, token::Eq),
TokenTree::Token(sp, token::Literal(
token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))]
@@ -784,7 +784,7 @@ impl<'a> Parser<'a> {

fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
match self.token {
token::Ident(i) => {
token::Ident(i, _) => {
if self.token.is_reserved_ident() {
let mut err = self.expected_ident_found();
if recover {
@@ -1925,7 +1925,7 @@ impl<'a> Parser<'a> {

pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
match self.token {
token::Ident(sid) if self.token.is_path_segment_keyword() => {
token::Ident(sid, _) if self.token.is_path_segment_keyword() => {
self.bump();
Ok(sid)
}
@@ -2740,11 +2740,14 @@ impl<'a> Parser<'a> {
}

pub fn process_potential_macro_variable(&mut self) {
let ident = match self.token {
let (ident, is_raw) = match self.token {
token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
self.look_ahead(1, |t| t.is_ident()) => {
self.bump();
let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() };
let name = match self.token {
token::Ident(ident, _) => ident,
_ => unreachable!()
};
let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
err.span_label(self.span, "unknown macro variable");
err.emit();
@@ -2753,13 +2756,13 @@ impl<'a> Parser<'a> {
token::Interpolated(ref nt) => {
self.meta_var_span = Some(self.span);
match nt.0 {
token::NtIdent(ident) => ident,
token::NtIdent(ident, is_raw) => (ident, is_raw),
_ => return,
}
}
_ => return,
};
self.token = token::Ident(ident.node);
self.token = token::Ident(ident.node, is_raw);
self.span = ident.span;
}

@@ -4245,7 +4248,7 @@ impl<'a> Parser<'a> {
-> PResult<'a, Option<P<Item>>> {
let token_lo = self.span;
let (ident, def) = match self.token {
token::Ident(ident) if ident.name == keywords::Macro.name() => {
token::Ident(ident, false) if ident.name == keywords::Macro.name() => {
self.bump();
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
@@ -4273,7 +4276,7 @@ impl<'a> Parser<'a> {

(ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
}
token::Ident(ident) if ident.name == "macro_rules" &&
token::Ident(ident, _) if ident.name == "macro_rules" &&
self.look_ahead(1, |t| *t == token::Not) => {
let prev_span = self.prev_span;
self.complain_if_pub_macro(&vis.node, prev_span);
@@ -5078,7 +5081,9 @@ impl<'a> Parser<'a> {
fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
let expect_ident = |this: &mut Self| match this.token {
// Preserve hygienic context.
token::Ident(ident) => { let sp = this.span; this.bump(); codemap::respan(sp, ident) }
token::Ident(ident, _) => {
let sp = this.span; this.bump(); codemap::respan(sp, ident)
}
_ => unreachable!()
};
let isolated_self = |this: &mut Self, n| {
@@ -5375,7 +5380,7 @@ impl<'a> Parser<'a> {
VisibilityKind::Inherited => Ok(()),
_ => {
let is_macro_rules: bool = match self.token {
token::Ident(sid) => sid.name == Symbol::intern("macro_rules"),
token::Ident(sid, _) => sid.name == Symbol::intern("macro_rules"),
_ => false,
};
if is_macro_rules {
@@ -7016,7 +7021,7 @@ impl<'a> Parser<'a> {
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
if self.eat_keyword(keywords::As) {
match self.token {
token::Ident(ident) if ident.name == keywords::Underscore.name() => {
token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
self.bump(); // `_`
Ok(Some(Ident { name: ident.name.gensymed(), ..ident }))
}
87 changes: 61 additions & 26 deletions src/libsyntax/parse/token.rs
Original file line number Diff line number Diff line change
@@ -91,8 +91,8 @@ impl Lit {
}
}

fn ident_can_begin_expr(ident: ast::Ident) -> bool {
let ident_token: Token = Ident(ident);
fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
let ident_token: Token = Ident(ident, is_raw);

!ident_token.is_reserved_ident() ||
ident_token.is_path_segment_keyword() ||
@@ -116,8 +116,8 @@ fn ident_can_begin_expr(ident: ast::Ident) -> bool {
].contains(&ident.name)
}

fn ident_can_begin_type(ident: ast::Ident) -> bool {
let ident_token: Token = Ident(ident);
fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
let ident_token: Token = Ident(ident, is_raw);

!ident_token.is_reserved_ident() ||
ident_token.is_path_segment_keyword() ||
@@ -132,6 +132,37 @@ fn ident_can_begin_type(ident: ast::Ident) -> bool {
].contains(&ident.name)
}

pub fn is_path_segment_keyword(id: ast::Ident) -> bool {
id.name == keywords::Super.name() ||
id.name == keywords::SelfValue.name() ||
id.name == keywords::SelfType.name() ||
id.name == keywords::Extern.name() ||
id.name == keywords::Crate.name() ||
id.name == keywords::CrateRoot.name() ||
id.name == keywords::DollarCrate.name()
}

// Returns true for reserved identifiers used internally for elided lifetimes,
// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special_ident(id: ast::Ident) -> bool {
id.name <= keywords::Underscore.name()
}

/// Returns `true` if the token is a keyword used in the language.
pub fn is_used_keyword(id: ast::Ident) -> bool {
id.name >= keywords::As.name() && id.name <= keywords::While.name()
}

/// Returns `true` if the token is a keyword reserved for possible future use.
pub fn is_unused_keyword(id: ast::Ident) -> bool {
id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name()
}

/// Returns `true` if the token is either a special identifier or a keyword.
pub fn is_reserved_ident(id: ast::Ident) -> bool {
is_special_ident(id) || is_used_keyword(id) || is_unused_keyword(id)
}

#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug)]
pub enum Token {
/* Expression-operator symbols. */
@@ -175,7 +206,7 @@ pub enum Token {
Literal(Lit, Option<ast::Name>),

/* Name components */
Ident(ast::Ident),
Ident(ast::Ident, /* is_raw */ bool),
Lifetime(ast::Ident),

// The `LazyTokenStream` is a pure function of the `Nonterminal`,
@@ -203,6 +234,11 @@ impl Token {
Token::Interpolated(Lrc::new((nt, LazyTokenStream::new())))
}

/// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
pub fn from_ast_ident(ident: ast::Ident) -> Token {
Ident(ident, is_reserved_ident(ident) && !is_path_segment_keyword(ident))
}

/// Returns `true` if the token starts with '>'.
pub fn is_like_gt(&self) -> bool {
match *self {
@@ -214,7 +250,8 @@ impl Token {
/// Returns `true` if the token can appear at the start of an expression.
pub fn can_begin_expr(&self) -> bool {
match *self {
Ident(ident) => ident_can_begin_expr(ident), // value name or keyword
Ident(ident, is_raw) =>
ident_can_begin_expr(ident, is_raw), // value name or keyword
OpenDelim(..) | // tuple, array or block
Literal(..) | // literal
Not | // operator not
@@ -239,7 +276,8 @@ impl Token {
/// Returns `true` if the token can appear at the start of a type.
pub fn can_begin_type(&self) -> bool {
match *self {
Ident(ident) => ident_can_begin_type(ident), // type name or keyword
Ident(ident, is_raw) =>
ident_can_begin_type(ident, is_raw), // type name or keyword
OpenDelim(Paren) | // tuple
OpenDelim(Bracket) | // array
Not | // never
@@ -272,11 +310,11 @@ impl Token {
}
}

pub fn ident(&self) -> Option<ast::Ident> {
pub fn ident(&self) -> Option<(ast::Ident, bool)> {
match *self {
Ident(ident) => Some(ident),
Ident(ident, is_raw) => Some((ident, is_raw)),
Interpolated(ref nt) => match nt.0 {
NtIdent(ident) => Some(ident.node),
NtIdent(ident, is_raw) => Some((ident.node, is_raw)),
_ => None,
},
_ => None,
@@ -351,43 +389,37 @@ impl Token {

/// Returns `true` if the token is a given keyword, `kw`.
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
self.ident().map(|ident| ident.name == kw.name()).unwrap_or(false)
self.ident().map(|(ident, is_raw)| ident.name == kw.name() && !is_raw).unwrap_or(false)
}

pub fn is_path_segment_keyword(&self) -> bool {
match self.ident() {
Some(id) => id.name == keywords::Super.name() ||
id.name == keywords::SelfValue.name() ||
id.name == keywords::SelfType.name() ||
id.name == keywords::Extern.name() ||
id.name == keywords::Crate.name() ||
id.name == keywords::CrateRoot.name() ||
id.name == keywords::DollarCrate.name(),
None => false,
Some((id, false)) => is_path_segment_keyword(id),
_ => false,
}
}

// Returns true for reserved identifiers used internally for elided lifetimes,
// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special_ident(&self) -> bool {
match self.ident() {
Some(id) => id.name <= keywords::Underscore.name(),
Some((id, false)) => is_special_ident(id),
_ => false,
}
}

/// Returns `true` if the token is a keyword used in the language.
pub fn is_used_keyword(&self) -> bool {
match self.ident() {
Some(id) => id.name >= keywords::As.name() && id.name <= keywords::While.name(),
Some((id, false)) => is_used_keyword(id),
_ => false,
}
}

/// Returns `true` if the token is a keyword reserved for possible future use.
pub fn is_unused_keyword(&self) -> bool {
match self.ident() {
Some(id) => id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name(),
Some((id, false)) => is_unused_keyword(id),
_ => false,
}
}
@@ -460,7 +492,10 @@ impl Token {

/// Returns `true` if the token is either a special identifier or a keyword.
pub fn is_reserved_ident(&self) -> bool {
self.is_special_ident() || self.is_used_keyword() || self.is_unused_keyword()
match self.ident() {
Some((id, false)) => is_reserved_ident(id),
_ => false,
}
}

pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span)
@@ -496,8 +531,8 @@ impl Token {
Nonterminal::NtImplItem(ref item) => {
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
}
Nonterminal::NtIdent(ident) => {
let token = Token::Ident(ident.node);
Nonterminal::NtIdent(ident, is_raw) => {
let token = Token::Ident(ident.node, is_raw);
tokens = Some(TokenTree::Token(ident.span, token).into());
}
Nonterminal::NtLifetime(lifetime) => {
@@ -529,7 +564,7 @@ pub enum Nonterminal {
NtPat(P<ast::Pat>),
NtExpr(P<ast::Expr>),
NtTy(P<ast::Ty>),
NtIdent(ast::SpannedIdent),
NtIdent(ast::SpannedIdent, /* is_raw */ bool),
/// Stuff inside brackets for attributes
NtMeta(ast::MetaItem),
NtPath(ast::Path),
40 changes: 21 additions & 19 deletions src/libsyntax/print/pprust.rs
Original file line number Diff line number Diff line change
@@ -250,7 +250,8 @@ pub fn token_to_string(tok: &Token) -> String {
}

/* Name components */
token::Ident(s) => s.to_string(),
token::Ident(s, false) => s.to_string(),
token::Ident(s, true) => format!("r#{}", s),
token::Lifetime(s) => s.to_string(),

/* Other */
@@ -261,24 +262,25 @@ pub fn token_to_string(tok: &Token) -> String {
token::Shebang(s) => format!("/* shebang: {}*/", s),

token::Interpolated(ref nt) => match nt.0 {
token::NtExpr(ref e) => expr_to_string(e),
token::NtMeta(ref e) => meta_item_to_string(e),
token::NtTy(ref e) => ty_to_string(e),
token::NtPath(ref e) => path_to_string(e),
token::NtItem(ref e) => item_to_string(e),
token::NtBlock(ref e) => block_to_string(e),
token::NtStmt(ref e) => stmt_to_string(e),
token::NtPat(ref e) => pat_to_string(e),
token::NtIdent(ref e) => ident_to_string(e.node),
token::NtTT(ref tree) => tt_to_string(tree.clone()),
token::NtArm(ref e) => arm_to_string(e),
token::NtImplItem(ref e) => impl_item_to_string(e),
token::NtTraitItem(ref e) => trait_item_to_string(e),
token::NtGenerics(ref e) => generic_params_to_string(&e.params),
token::NtWhereClause(ref e) => where_clause_to_string(e),
token::NtArg(ref e) => arg_to_string(e),
token::NtVis(ref e) => vis_to_string(e),
token::NtLifetime(ref e) => lifetime_to_string(e),
token::NtExpr(ref e) => expr_to_string(e),
token::NtMeta(ref e) => meta_item_to_string(e),
token::NtTy(ref e) => ty_to_string(e),
token::NtPath(ref e) => path_to_string(e),
token::NtItem(ref e) => item_to_string(e),
token::NtBlock(ref e) => block_to_string(e),
token::NtStmt(ref e) => stmt_to_string(e),
token::NtPat(ref e) => pat_to_string(e),
token::NtIdent(ref e, false) => ident_to_string(e.node),
token::NtIdent(ref e, true) => format!("r#{}", ident_to_string(e.node)),
token::NtTT(ref tree) => tt_to_string(tree.clone()),
token::NtArm(ref e) => arm_to_string(e),
token::NtImplItem(ref e) => impl_item_to_string(e),
token::NtTraitItem(ref e) => trait_item_to_string(e),
token::NtGenerics(ref e) => generic_params_to_string(&e.params),
token::NtWhereClause(ref e) => where_clause_to_string(e),
token::NtArg(ref e) => arg_to_string(e),
token::NtVis(ref e) => vis_to_string(e),
token::NtLifetime(ref e) => lifetime_to_string(e),
}
}
}
2 changes: 1 addition & 1 deletion src/libsyntax/tokenstream.rs
Original file line number Diff line number Diff line change
@@ -684,7 +684,7 @@ mod tests {
with_globals(|| {
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
let test1: TokenStream =
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into();
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"), false)).into();
let test2 = string_to_ts("foo(bar::baz)");

assert_eq!(test0.is_empty(), true);
3 changes: 2 additions & 1 deletion src/libsyntax_ext/concat_idents.rs
Original file line number Diff line number Diff line change
@@ -44,7 +44,8 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
}
} else {
match *e {
TokenTree::Token(_, token::Ident(ident)) => res_str.push_str(&ident.name.as_str()),
TokenTree::Token(_, token::Ident(ident, _)) =>
res_str.push_str(&ident.name.as_str()),
_ => {
cx.span_err(sp, "concat_idents! requires ident args.");
return DummyResult::expr(sp);
2 changes: 1 addition & 1 deletion src/libsyntax_ext/format.rs
Original file line number Diff line number Diff line change
@@ -149,7 +149,7 @@ fn parse_args(ecx: &mut ExtCtxt,
if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
named = true;
let ident = match p.token {
token::Ident(i) => {
token::Ident(i, _) => {
p.bump();
i
}
2 changes: 1 addition & 1 deletion src/test/parse-fail/raw-str-delim.rs
Original file line number Diff line number Diff line change
@@ -11,5 +11,5 @@
// compile-flags: -Z parse-only

static s: &'static str =
r#x"#"x# //~ ERROR found invalid character; only `#` is allowed in raw string delimitation
r#~"#"~# //~ ERROR found invalid character; only `#` is allowed in raw string delimitation
;
2 changes: 1 addition & 1 deletion src/test/run-pass-fulldeps/auxiliary/roman_numerals.rs
Original file line number Diff line number Diff line change
@@ -49,7 +49,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
}

let text = match args[0] {
TokenTree::Token(_, token::Ident(s)) => s.to_string(),
TokenTree::Token(_, token::Ident(s, _)) => s.to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);
26 changes: 26 additions & 0 deletions src/test/run-pass/rfc-2151-raw-identifiers/attr.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

#![feature(raw_identifiers)]

use std::mem;

#[r#repr(r#C, r#packed)]
struct Test {
a: bool, b: u64
}

#[r#derive(r#Debug)]
struct Test2(u32);

pub fn main() {
assert_eq!(mem::size_of::<Test>(), 9);
assert_eq!("Test2(123)", format!("{:?}", Test2(123)));
}
31 changes: 31 additions & 0 deletions src/test/run-pass/rfc-2151-raw-identifiers/basic.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

#![feature(raw_identifiers)]

fn r#fn(r#match: u32) -> u32 {
r#match
}

pub fn main() {
let r#struct = 1;
assert_eq!(1, r#struct);

let foo = 2;
assert_eq!(2, r#foo);

let r#bar = 3;
assert_eq!(3, bar);

assert_eq!(4, r#fn(4));

let r#true = false;
assert_eq!(r#true, false);
}
43 changes: 43 additions & 0 deletions src/test/run-pass/rfc-2151-raw-identifiers/items.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

#![feature(raw_identifiers)]

#[derive(Debug, PartialEq, Eq)]
struct IntWrapper(u32);

#[derive(Debug, Ord, PartialOrd, PartialEq, Eq, Hash, Copy, Clone, Default)]
struct HasKeywordField {
r#struct: u32,
}

struct Generic<r#T>(T);

trait Trait {
fn r#trait(&self) -> u32;
}
impl Trait for Generic<u32> {
fn r#trait(&self) -> u32 {
self.0
}
}

pub fn main() {
assert_eq!(IntWrapper(1), r#IntWrapper(1));

match IntWrapper(2) {
r#IntWrapper(r#struct) => assert_eq!(2, r#struct),
}

assert_eq!("HasKeywordField { struct: 3 }", format!("{:?}", HasKeywordField { r#struct: 3 }));

assert_eq!(4, Generic(4).0);
assert_eq!(5, Generic(5).r#trait());
}
48 changes: 48 additions & 0 deletions src/test/run-pass/rfc-2151-raw-identifiers/macros.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

#![feature(decl_macro)]
#![feature(raw_identifiers)]

r#macro_rules! r#struct {
($r#struct:expr) => { $r#struct }
}

macro_rules! old_macro {
($a:expr) => {$a}
}

macro r#decl_macro($r#fn:expr) {
$r#fn
}

macro passthrough($id:ident) {
$id
}

macro_rules! test_pat_match {
(a) => { 6 };
(r#a) => { 7 };
}

pub fn main() {
r#println!("{struct}", r#struct = 1);
assert_eq!(2, r#struct!(2));
assert_eq!(3, r#old_macro!(3));
assert_eq!(4, decl_macro!(4));

let r#match = 5;
assert_eq!(5, passthrough!(r#match));

assert_eq!("r#struct", stringify!(r#struct));

assert_eq!(6, test_pat_match!(a));
assert_eq!(7, test_pat_match!(r#a));
}
14 changes: 14 additions & 0 deletions src/test/ui/feature-gate-raw-identifiers.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

fn main() {
let r#foo = 3; //~ ERROR raw identifiers are experimental and subject to change
println!("{}", foo);
}
11 changes: 11 additions & 0 deletions src/test/ui/feature-gate-raw-identifiers.stderr
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
error[E0658]: raw identifiers are experimental and subject to change (see issue #48589)
--> $DIR/feature-gate-raw-identifiers.rs:12:9
|
LL | let r#foo = 3; //~ ERROR raw identifiers are experimental and subject to change
| ^^^^^
|
= help: add #![feature(raw_identifiers)] to the crate attributes to enable

error: aborting due to previous error

For more information about this error, try `rustc --explain E0658`.
26 changes: 26 additions & 0 deletions src/test/ui/raw-literal-keywords.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

// compile-flags: -Z parse-only

#![feature(dyn_trait)]
#![feature(raw_identifiers)]

fn test_if() {
r#if true { } //~ ERROR found `true`
}

fn test_struct() {
r#struct Test; //~ ERROR found `Test`
}

fn test_union() {
r#union Test; //~ ERROR found `Test`
}
20 changes: 20 additions & 0 deletions src/test/ui/raw-literal-keywords.stderr
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found `true`
--> $DIR/raw-literal-keywords.rs:17:10
|
LL | r#if true { } //~ ERROR found `true`
| ^^^^ expected one of 8 possible tokens here

error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found `Test`
--> $DIR/raw-literal-keywords.rs:21:14
|
LL | r#struct Test; //~ ERROR found `Test`
| ^^^^ expected one of 8 possible tokens here

error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found `Test`
--> $DIR/raw-literal-keywords.rs:25:13
|
LL | r#union Test; //~ ERROR found `Test`
| ^^^^ expected one of 8 possible tokens here

error: aborting due to 3 previous errors

17 changes: 17 additions & 0 deletions src/test/ui/raw-literal-self.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

// compile-flags: -Z parse-only

#![feature(raw_identifiers)]

fn self_test(r#self: u32) {
//~^ ERROR `r#self` is not currently supported.
}
8 changes: 8 additions & 0 deletions src/test/ui/raw-literal-self.stderr
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
error: `r#self` is not currently supported.
--> $DIR/raw-literal-self.rs:15:14
|
LL | fn self_test(r#self: u32) {
| ^^^^^^

error: aborting due to previous error

15 changes: 15 additions & 0 deletions src/test/ui/raw-literal-underscore.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

// compile-flags: -Z parse-only

fn underscore_test(r#_: u32) {
//~^ ERROR `r#_` is not currently supported.
}
8 changes: 8 additions & 0 deletions src/test/ui/raw-literal-underscore.stderr
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
error: `r#_` is not currently supported.
--> $DIR/raw-literal-underscore.rs:13:20
|
LL | fn underscore_test(r#_: u32) {
| ^^^

error: aborting due to previous error