diff --git a/src/libcollections/range.rs b/src/libcollections/range.rs index 1df4ace377707..e4b94a1d70ee4 100644 --- a/src/libcollections/range.rs +++ b/src/libcollections/range.rs @@ -14,7 +14,7 @@ //! Range syntax. -use core::ops::{RangeFull, Range, RangeTo, RangeFrom}; +use core::ops::{RangeFull, Range, RangeTo, RangeFrom, RangeInclusive, RangeToInclusive}; use Bound::{self, Excluded, Included, Unbounded}; /// **RangeArgument** is implemented by Rust's built-in range types, produced @@ -105,6 +105,32 @@ impl<T> RangeArgument<T> for Range<T> { } } +#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] +impl<T> RangeArgument<T> for RangeInclusive<T> { + fn start(&self) -> Bound<&T> { + match *self { + RangeInclusive::Empty{ ref at } => Included(at), + RangeInclusive::NonEmpty { ref start, .. } => Included(start), + } + } + fn end(&self) -> Bound<&T> { + match *self { + RangeInclusive::Empty{ ref at } => Excluded(at), + RangeInclusive::NonEmpty { ref end, .. } => Included(end), + } + } +} + +#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] +impl<T> RangeArgument<T> for RangeToInclusive<T> { + fn start(&self) -> Bound<&T> { + Unbounded + } + fn end(&self) -> Bound<&T> { + Included(&self.end) + } +} + impl<T> RangeArgument<T> for (Bound<T>, Bound<T>) { fn start(&self) -> Bound<&T> { match *self { diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 6839b698a5611..4b37aef860d72 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -1483,6 +1483,15 @@ impl FromIterator<char> for String { } } +#[stable(feature = "string_from_iter_by_ref", since = "1.17.0")] +impl<'a> FromIterator<&'a char> for String { + fn from_iter<I: IntoIterator<Item = &'a char>>(iter: I) -> String { + let mut buf = String::new(); + buf.extend(iter); + buf + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl<'a> FromIterator<&'a str> for String { fn from_iter<I: IntoIterator<Item = &'a str>>(iter: I) -> String { diff --git a/src/libcollectionstest/btree/map.rs b/src/libcollectionstest/btree/map.rs index f33923f996319..2c899d96940ec 100644 --- a/src/libcollectionstest/btree/map.rs +++ b/src/libcollectionstest/btree/map.rs @@ -178,6 +178,43 @@ fn test_range_small() { assert_eq!(j, size - 2); } +#[test] +fn test_range_inclusive() { + let size = 500; + + let map: BTreeMap<_, _> = (0...size).map(|i| (i, i)).collect(); + + fn check<'a, L, R>(lhs: L, rhs: R) + where L: IntoIterator<Item=(&'a i32, &'a i32)>, + R: IntoIterator<Item=(&'a i32, &'a i32)>, + { + let lhs: Vec<_> = lhs.into_iter().collect(); + let rhs: Vec<_> = rhs.into_iter().collect(); + assert_eq!(lhs, rhs); + } + + check(map.range(size + 1...size + 1), vec![]); + check(map.range(size...size), vec![(&size, &size)]); + check(map.range(size...size + 1), vec![(&size, &size)]); + check(map.range(0...0), vec![(&0, &0)]); + check(map.range(0...size - 1), map.range(..size)); + check(map.range(-1...-1), vec![]); + check(map.range(-1...size), map.range(..)); + check(map.range(...size), map.range(..)); + check(map.range(...200), map.range(..201)); + check(map.range(5...8), vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)]); + check(map.range(-1...0), vec![(&0, &0)]); + check(map.range(-1...2), vec![(&0, &0), (&1, &1), (&2, &2)]); +} + +#[test] +fn test_range_inclusive_max_value() { + let max = ::std::usize::MAX; + let map: BTreeMap<_, _> = vec![(max, 0)].into_iter().collect(); + + assert_eq!(map.range(max...max).collect::<Vec<_>>(), &[(&max, &0)]); +} + #[test] fn test_range_equal_empty_cases() { let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect(); diff --git a/src/libcollectionstest/lib.rs b/src/libcollectionstest/lib.rs index 57e3c2df059e1..849d240169169 100644 --- a/src/libcollectionstest/lib.rs +++ b/src/libcollectionstest/lib.rs @@ -14,6 +14,7 @@ #![feature(binary_heap_peek_mut_pop)] #![feature(box_syntax)] #![feature(btree_range)] +#![feature(inclusive_range_syntax)] #![feature(collection_placement)] #![feature(collections)] #![feature(collections_bound)] diff --git a/src/libcollectionstest/vec.rs b/src/libcollectionstest/vec.rs index edeedf1d40baf..06d70800d3925 100644 --- a/src/libcollectionstest/vec.rs +++ b/src/libcollectionstest/vec.rs @@ -507,6 +507,56 @@ fn test_drain_range() { assert_eq!(v, &[(), ()]); } +#[test] +fn test_drain_inclusive_range() { + let mut v = vec!['a', 'b', 'c', 'd', 'e']; + for _ in v.drain(1...3) { + } + assert_eq!(v, &['a', 'e']); + + let mut v: Vec<_> = (0...5).map(|x| x.to_string()).collect(); + for _ in v.drain(1...5) { + } + assert_eq!(v, &["0".to_string()]); + + let mut v: Vec<String> = (0...5).map(|x| x.to_string()).collect(); + for _ in v.drain(0...5) { + } + assert_eq!(v, Vec::<String>::new()); + + let mut v: Vec<_> = (0...5).map(|x| x.to_string()).collect(); + for _ in v.drain(0...3) { + } + assert_eq!(v, &["4".to_string(), "5".to_string()]); + + let mut v: Vec<_> = (0...1).map(|x| x.to_string()).collect(); + for _ in v.drain(...0) { + } + assert_eq!(v, &["1".to_string()]); +} + +#[test] +fn test_drain_max_vec_size() { + let mut v = Vec::<()>::with_capacity(usize::max_value()); + unsafe { v.set_len(usize::max_value()); } + for _ in v.drain(usize::max_value() - 1..) { + } + assert_eq!(v.len(), usize::max_value() - 1); + + let mut v = Vec::<()>::with_capacity(usize::max_value()); + unsafe { v.set_len(usize::max_value()); } + for _ in v.drain(usize::max_value() - 1...usize::max_value() - 1) { + } + assert_eq!(v.len(), usize::max_value() - 1); +} + +#[test] +#[should_panic] +fn test_drain_inclusive_out_of_bounds() { + let mut v = vec![1, 2, 3, 4, 5]; + v.drain(5...5); +} + #[test] fn test_into_boxed_slice() { let xs = vec![1, 2, 3]; diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index f962c888f42cc..b33caefbcd2ec 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -173,8 +173,7 @@ impl FromStr for TokenStream { __internal::with_parse_sess(|sess| { let src = src.to_string(); let name = "<proc-macro source code>".to_string(); - let tts = try!(parse::parse_tts_from_source_str(name, src, sess) - .map_err(parse_to_lex_err)); + let tts = parse::parse_tts_from_source_str(name, src, sess); Ok(__internal::token_stream_wrap(tts.into_iter().collect())) }) diff --git a/src/libproc_macro_plugin/qquote.rs b/src/libproc_macro_plugin/qquote.rs index 300b4df892943..dc7c96a4e2767 100644 --- a/src/libproc_macro_plugin/qquote.rs +++ b/src/libproc_macro_plugin/qquote.rs @@ -119,7 +119,6 @@ impl Quote for TokenTree { ::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP, (quote delimited)) }, - _ => panic!("unexpected `TokenTree::Sequence` in `qquote`"), } } } diff --git a/src/librustc/infer/error_reporting.rs b/src/librustc/infer/error_reporting/mod.rs similarity index 60% rename from src/librustc/infer/error_reporting.rs rename to src/librustc/infer/error_reporting/mod.rs index f48ff87689fb0..21139c8dde2a4 100644 --- a/src/librustc/infer/error_reporting.rs +++ b/src/librustc/infer/error_reporting/mod.rs @@ -55,32 +55,25 @@ //! ported to this system, and which relies on string concatenation at the //! time of error detection. -use super::InferCtxt; -use super::TypeTrace; -use super::SubregionOrigin; -use super::RegionVariableOrigin; -use super::ValuePairs; -use super::region_inference::RegionResolutionError; -use super::region_inference::ConcreteFailure; -use super::region_inference::SubSupConflict; -use super::region_inference::GenericBoundFailure; -use super::region_inference::GenericKind; +use infer; +use super::{InferCtxt, TypeTrace, SubregionOrigin, RegionVariableOrigin, ValuePairs}; +use super::region_inference::{RegionResolutionError, ConcreteFailure, SubSupConflict, + GenericBoundFailure, GenericKind}; -use hir::map as hir_map; +use std::fmt; use hir; - +use hir::map as hir_map; use hir::def_id::DefId; -use infer; use middle::region; use traits::{ObligationCause, ObligationCauseCode}; use ty::{self, TyCtxt, TypeFoldable}; use ty::{Region, Issue32330}; use ty::error::TypeError; - -use std::fmt; use syntax_pos::{Pos, Span}; use errors::DiagnosticBuilder; +mod note; + impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn note_and_explain_region(self, err: &mut DiagnosticBuilder, @@ -584,289 +577,6 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { err.emit(); } - fn report_concrete_failure(&self, - origin: SubregionOrigin<'tcx>, - sub: &'tcx Region, - sup: &'tcx Region) - -> DiagnosticBuilder<'tcx> { - match origin { - infer::Subtype(trace) => { - let terr = TypeError::RegionsDoesNotOutlive(sup, sub); - self.report_and_explain_type_error(trace, &terr) - } - infer::Reborrow(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0312, - "lifetime of reference outlives \ - lifetime of borrowed content..."); - self.tcx.note_and_explain_region(&mut err, - "...the reference is valid for ", - sub, - "..."); - self.tcx.note_and_explain_region(&mut err, - "...but the borrowed content is only valid for ", - sup, - ""); - err - } - infer::ReborrowUpvar(span, ref upvar_id) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0313, - "lifetime of borrowed pointer outlives \ - lifetime of captured variable `{}`...", - self.tcx.local_var_name_str(upvar_id.var_id)); - self.tcx.note_and_explain_region(&mut err, - "...the borrowed pointer is valid for ", - sub, - "..."); - self.tcx.note_and_explain_region(&mut err, - &format!("...but `{}` is only valid for ", - self.tcx.local_var_name_str(upvar_id.var_id)), - sup, - ""); - err - } - infer::InfStackClosure(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0314, - "closure outlives stack frame"); - self.tcx.note_and_explain_region(&mut err, - "...the closure must be valid for ", - sub, - "..."); - self.tcx.note_and_explain_region(&mut err, - "...but the closure's stack frame is only valid for ", - sup, - ""); - err - } - infer::InvokeClosure(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0315, - "cannot invoke closure outside of its lifetime"); - self.tcx.note_and_explain_region(&mut err, - "the closure is only valid for ", - sup, - ""); - err - } - infer::DerefPointer(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0473, - "dereference of reference outside its lifetime"); - self.tcx.note_and_explain_region(&mut err, - "the reference is only valid for ", - sup, - ""); - err - } - infer::FreeVariable(span, id) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0474, - "captured variable `{}` does not outlive the enclosing closure", - self.tcx.local_var_name_str(id)); - self.tcx.note_and_explain_region(&mut err, - "captured variable is valid for ", - sup, - ""); - self.tcx.note_and_explain_region(&mut err, - "closure is valid for ", - sub, - ""); - err - } - infer::IndexSlice(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0475, - "index of slice outside its lifetime"); - self.tcx.note_and_explain_region(&mut err, - "the slice is only valid for ", - sup, - ""); - err - } - infer::RelateObjectBound(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0476, - "lifetime of the source pointer does not outlive \ - lifetime bound of the object type"); - self.tcx.note_and_explain_region(&mut err, - "object type is valid for ", - sub, - ""); - self.tcx.note_and_explain_region(&mut err, - "source pointer is only valid for ", - sup, - ""); - err - } - infer::RelateParamBound(span, ty) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0477, - "the type `{}` does not fulfill the required lifetime", - self.ty_to_string(ty)); - self.tcx.note_and_explain_region(&mut err, - "type must outlive ", - sub, - ""); - err - } - infer::RelateRegionParamBound(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0478, - "lifetime bound not satisfied"); - self.tcx.note_and_explain_region(&mut err, - "lifetime parameter instantiated with ", - sup, - ""); - self.tcx.note_and_explain_region(&mut err, - "but lifetime parameter must outlive ", - sub, - ""); - err - } - infer::RelateDefaultParamBound(span, ty) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0479, - "the type `{}` (provided as the value of \ - a type parameter) is not valid at this point", - self.ty_to_string(ty)); - self.tcx.note_and_explain_region(&mut err, - "type must outlive ", - sub, - ""); - err - } - infer::CallRcvr(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0480, - "lifetime of method receiver does not outlive \ - the method call"); - self.tcx.note_and_explain_region(&mut err, - "the receiver is only valid for ", - sup, - ""); - err - } - infer::CallArg(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0481, - "lifetime of function argument does not outlive \ - the function call"); - self.tcx.note_and_explain_region(&mut err, - "the function argument is only valid for ", - sup, - ""); - err - } - infer::CallReturn(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0482, - "lifetime of return value does not outlive \ - the function call"); - self.tcx.note_and_explain_region(&mut err, - "the return value is only valid for ", - sup, - ""); - err - } - infer::Operand(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0483, - "lifetime of operand does not outlive \ - the operation"); - self.tcx.note_and_explain_region(&mut err, - "the operand is only valid for ", - sup, - ""); - err - } - infer::AddrOf(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0484, - "reference is not valid at the time of borrow"); - self.tcx.note_and_explain_region(&mut err, - "the borrow is only valid for ", - sup, - ""); - err - } - infer::AutoBorrow(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0485, - "automatically reference is not valid \ - at the time of borrow"); - self.tcx.note_and_explain_region(&mut err, - "the automatic borrow is only valid for ", - sup, - ""); - err - } - infer::ExprTypeIsNotInScope(t, span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0486, - "type of expression contains references \ - that are not valid during the expression: `{}`", - self.ty_to_string(t)); - self.tcx.note_and_explain_region(&mut err, - "type is only valid for ", - sup, - ""); - err - } - infer::SafeDestructor(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0487, - "unsafe use of destructor: destructor might be called \ - while references are dead"); - // FIXME (22171): terms "super/subregion" are suboptimal - self.tcx.note_and_explain_region(&mut err, - "superregion: ", - sup, - ""); - self.tcx.note_and_explain_region(&mut err, - "subregion: ", - sub, - ""); - err - } - infer::BindingTypeIsNotValidAtDecl(span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0488, - "lifetime of variable does not enclose its declaration"); - self.tcx.note_and_explain_region(&mut err, - "the variable is only valid for ", - sup, - ""); - err - } - infer::ParameterInScope(_, span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0489, - "type/lifetime parameter not in scope here"); - self.tcx.note_and_explain_region(&mut err, - "the parameter is only valid for ", - sub, - ""); - err - } - infer::DataBorrowed(ty, span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0490, - "a value of type `{}` is borrowed for too long", - self.ty_to_string(ty)); - self.tcx.note_and_explain_region(&mut err, "the type is valid for ", sub, ""); - self.tcx.note_and_explain_region(&mut err, "but the borrow lasts for ", sup, ""); - err - } - infer::ReferenceOutlivesReferent(ty, span) => { - let mut err = struct_span_err!(self.tcx.sess, span, E0491, - "in type `{}`, reference has a longer lifetime \ - than the data it references", - self.ty_to_string(ty)); - self.tcx.note_and_explain_region(&mut err, - "the pointer is valid for ", - sub, - ""); - self.tcx.note_and_explain_region(&mut err, - "but the referenced data is only valid for ", - sup, - ""); - err - } - infer::CompareImplMethodObligation { span, - item_name, - impl_item_def_id, - trait_item_def_id, - lint_id } => { - self.report_extra_impl_obligation(span, - item_name, - impl_item_def_id, - trait_item_def_id, - &format!("`{}: {}`", sup, sub), - lint_id) - } - } - } - fn report_sub_sup_conflict(&self, var_origin: RegionVariableOrigin, sub_origin: SubregionOrigin<'tcx>, @@ -939,170 +649,6 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { due to conflicting requirements", var_description) } - - fn note_region_origin(&self, err: &mut DiagnosticBuilder, origin: &SubregionOrigin<'tcx>) { - match *origin { - infer::Subtype(ref trace) => { - if let Some((expected, found)) = self.values_str(&trace.values) { - // FIXME: do we want a "the" here? - err.span_note( - trace.cause.span, - &format!("...so that {} (expected {}, found {})", - trace.cause.as_requirement_str(), expected, found)); - } else { - // FIXME: this really should be handled at some earlier stage. Our - // handling of region checking when type errors are present is - // *terrible*. - - err.span_note( - trace.cause.span, - &format!("...so that {}", - trace.cause.as_requirement_str())); - } - } - infer::Reborrow(span) => { - err.span_note( - span, - "...so that reference does not outlive \ - borrowed content"); - } - infer::ReborrowUpvar(span, ref upvar_id) => { - err.span_note( - span, - &format!( - "...so that closure can access `{}`", - self.tcx.local_var_name_str(upvar_id.var_id) - .to_string())); - } - infer::InfStackClosure(span) => { - err.span_note( - span, - "...so that closure does not outlive its stack frame"); - } - infer::InvokeClosure(span) => { - err.span_note( - span, - "...so that closure is not invoked outside its lifetime"); - } - infer::DerefPointer(span) => { - err.span_note( - span, - "...so that pointer is not dereferenced \ - outside its lifetime"); - } - infer::FreeVariable(span, id) => { - err.span_note( - span, - &format!("...so that captured variable `{}` \ - does not outlive the enclosing closure", - self.tcx.local_var_name_str(id))); - } - infer::IndexSlice(span) => { - err.span_note( - span, - "...so that slice is not indexed outside the lifetime"); - } - infer::RelateObjectBound(span) => { - err.span_note( - span, - "...so that it can be closed over into an object"); - } - infer::CallRcvr(span) => { - err.span_note( - span, - "...so that method receiver is valid for the method call"); - } - infer::CallArg(span) => { - err.span_note( - span, - "...so that argument is valid for the call"); - } - infer::CallReturn(span) => { - err.span_note( - span, - "...so that return value is valid for the call"); - } - infer::Operand(span) => { - err.span_note( - span, - "...so that operand is valid for operation"); - } - infer::AddrOf(span) => { - err.span_note( - span, - "...so that reference is valid \ - at the time of borrow"); - } - infer::AutoBorrow(span) => { - err.span_note( - span, - "...so that auto-reference is valid \ - at the time of borrow"); - } - infer::ExprTypeIsNotInScope(t, span) => { - err.span_note( - span, - &format!("...so type `{}` of expression is valid during the \ - expression", - self.ty_to_string(t))); - } - infer::BindingTypeIsNotValidAtDecl(span) => { - err.span_note( - span, - "...so that variable is valid at time of its declaration"); - } - infer::ParameterInScope(_, span) => { - err.span_note( - span, - "...so that a type/lifetime parameter is in scope here"); - } - infer::DataBorrowed(ty, span) => { - err.span_note( - span, - &format!("...so that the type `{}` is not borrowed for too long", - self.ty_to_string(ty))); - } - infer::ReferenceOutlivesReferent(ty, span) => { - err.span_note( - span, - &format!("...so that the reference type `{}` \ - does not outlive the data it points at", - self.ty_to_string(ty))); - } - infer::RelateParamBound(span, t) => { - err.span_note( - span, - &format!("...so that the type `{}` \ - will meet its required lifetime bounds", - self.ty_to_string(t))); - } - infer::RelateDefaultParamBound(span, t) => { - err.span_note( - span, - &format!("...so that type parameter \ - instantiated with `{}`, \ - will meet its declared lifetime bounds", - self.ty_to_string(t))); - } - infer::RelateRegionParamBound(span) => { - err.span_note( - span, - "...so that the declared lifetime parameter bounds \ - are satisfied"); - } - infer::SafeDestructor(span) => { - err.span_note( - span, - "...so that references are valid when the destructor \ - runs"); - } - infer::CompareImplMethodObligation { span, .. } => { - err.span_note( - span, - "...so that the definition in impl matches the definition from the trait"); - } - } - } } impl<'tcx> ObligationCause<'tcx> { diff --git a/src/librustc/infer/error_reporting/note.rs b/src/librustc/infer/error_reporting/note.rs new file mode 100644 index 0000000000000..8f8b2603dad84 --- /dev/null +++ b/src/librustc/infer/error_reporting/note.rs @@ -0,0 +1,432 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use infer::{self, InferCtxt, SubregionOrigin}; +use ty::Region; +use ty::error::TypeError; +use errors::DiagnosticBuilder; + +impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { + pub(super) fn note_region_origin(&self, + err: &mut DiagnosticBuilder, + origin: &SubregionOrigin<'tcx>) { + match *origin { + infer::Subtype(ref trace) => { + if let Some((expected, found)) = self.values_str(&trace.values) { + // FIXME: do we want a "the" here? + err.span_note(trace.cause.span, + &format!("...so that {} (expected {}, found {})", + trace.cause.as_requirement_str(), + expected, + found)); + } else { + // FIXME: this really should be handled at some earlier stage. Our + // handling of region checking when type errors are present is + // *terrible*. + + err.span_note(trace.cause.span, + &format!("...so that {}", trace.cause.as_requirement_str())); + } + } + infer::Reborrow(span) => { + err.span_note(span, + "...so that reference does not outlive borrowed content"); + } + infer::ReborrowUpvar(span, ref upvar_id) => { + err.span_note(span, + &format!("...so that closure can access `{}`", + self.tcx + .local_var_name_str(upvar_id.var_id) + .to_string())); + } + infer::InfStackClosure(span) => { + err.span_note(span, "...so that closure does not outlive its stack frame"); + } + infer::InvokeClosure(span) => { + err.span_note(span, + "...so that closure is not invoked outside its lifetime"); + } + infer::DerefPointer(span) => { + err.span_note(span, + "...so that pointer is not dereferenced outside its lifetime"); + } + infer::FreeVariable(span, id) => { + err.span_note(span, + &format!("...so that captured variable `{}` does not outlive the \ + enclosing closure", + self.tcx.local_var_name_str(id))); + } + infer::IndexSlice(span) => { + err.span_note(span, "...so that slice is not indexed outside the lifetime"); + } + infer::RelateObjectBound(span) => { + err.span_note(span, "...so that it can be closed over into an object"); + } + infer::CallRcvr(span) => { + err.span_note(span, + "...so that method receiver is valid for the method call"); + } + infer::CallArg(span) => { + err.span_note(span, "...so that argument is valid for the call"); + } + infer::CallReturn(span) => { + err.span_note(span, "...so that return value is valid for the call"); + } + infer::Operand(span) => { + err.span_note(span, "...so that operand is valid for operation"); + } + infer::AddrOf(span) => { + err.span_note(span, "...so that reference is valid at the time of borrow"); + } + infer::AutoBorrow(span) => { + err.span_note(span, + "...so that auto-reference is valid at the time of borrow"); + } + infer::ExprTypeIsNotInScope(t, span) => { + err.span_note(span, + &format!("...so type `{}` of expression is valid during the \ + expression", + self.ty_to_string(t))); + } + infer::BindingTypeIsNotValidAtDecl(span) => { + err.span_note(span, + "...so that variable is valid at time of its declaration"); + } + infer::ParameterInScope(_, span) => { + err.span_note(span, + "...so that a type/lifetime parameter is in scope here"); + } + infer::DataBorrowed(ty, span) => { + err.span_note(span, + &format!("...so that the type `{}` is not borrowed for too long", + self.ty_to_string(ty))); + } + infer::ReferenceOutlivesReferent(ty, span) => { + err.span_note(span, + &format!("...so that the reference type `{}` does not outlive the \ + data it points at", + self.ty_to_string(ty))); + } + infer::RelateParamBound(span, t) => { + err.span_note(span, + &format!("...so that the type `{}` will meet its required \ + lifetime bounds", + self.ty_to_string(t))); + } + infer::RelateDefaultParamBound(span, t) => { + err.span_note(span, + &format!("...so that type parameter instantiated with `{}`, will \ + meet its declared lifetime bounds", + self.ty_to_string(t))); + } + infer::RelateRegionParamBound(span) => { + err.span_note(span, + "...so that the declared lifetime parameter bounds are satisfied"); + } + infer::SafeDestructor(span) => { + err.span_note(span, + "...so that references are valid when the destructor runs"); + } + infer::CompareImplMethodObligation { span, .. } => { + err.span_note(span, + "...so that the definition in impl matches the definition from the \ + trait"); + } + } + } + + pub(super) fn report_concrete_failure(&self, + origin: SubregionOrigin<'tcx>, + sub: &'tcx Region, + sup: &'tcx Region) + -> DiagnosticBuilder<'tcx> { + match origin { + infer::Subtype(trace) => { + let terr = TypeError::RegionsDoesNotOutlive(sup, sub); + self.report_and_explain_type_error(trace, &terr) + } + infer::Reborrow(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0312, + "lifetime of reference outlives lifetime of \ + borrowed content..."); + self.tcx.note_and_explain_region(&mut err, + "...the reference is valid for ", + sub, + "..."); + self.tcx.note_and_explain_region(&mut err, + "...but the borrowed content is only valid for ", + sup, + ""); + err + } + infer::ReborrowUpvar(span, ref upvar_id) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0313, + "lifetime of borrowed pointer outlives lifetime \ + of captured variable `{}`...", + self.tcx.local_var_name_str(upvar_id.var_id)); + self.tcx.note_and_explain_region(&mut err, + "...the borrowed pointer is valid for ", + sub, + "..."); + self.tcx + .note_and_explain_region(&mut err, + &format!("...but `{}` is only valid for ", + self.tcx + .local_var_name_str(upvar_id.var_id)), + sup, + ""); + err + } + infer::InfStackClosure(span) => { + let mut err = + struct_span_err!(self.tcx.sess, span, E0314, "closure outlives stack frame"); + self.tcx.note_and_explain_region(&mut err, + "...the closure must be valid for ", + sub, + "..."); + self.tcx.note_and_explain_region(&mut err, + "...but the closure's stack frame is only valid \ + for ", + sup, + ""); + err + } + infer::InvokeClosure(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0315, + "cannot invoke closure outside of its lifetime"); + self.tcx + .note_and_explain_region(&mut err, "the closure is only valid for ", sup, ""); + err + } + infer::DerefPointer(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0473, + "dereference of reference outside its lifetime"); + self.tcx + .note_and_explain_region(&mut err, "the reference is only valid for ", sup, ""); + err + } + infer::FreeVariable(span, id) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0474, + "captured variable `{}` does not outlive the \ + enclosing closure", + self.tcx.local_var_name_str(id)); + self.tcx + .note_and_explain_region(&mut err, "captured variable is valid for ", sup, ""); + self.tcx.note_and_explain_region(&mut err, "closure is valid for ", sub, ""); + err + } + infer::IndexSlice(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0475, + "index of slice outside its lifetime"); + self.tcx.note_and_explain_region(&mut err, "the slice is only valid for ", sup, ""); + err + } + infer::RelateObjectBound(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0476, + "lifetime of the source pointer does not outlive \ + lifetime bound of the object type"); + self.tcx.note_and_explain_region(&mut err, "object type is valid for ", sub, ""); + self.tcx.note_and_explain_region(&mut err, + "source pointer is only valid for ", + sup, + ""); + err + } + infer::RelateParamBound(span, ty) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0477, + "the type `{}` does not fulfill the required \ + lifetime", + self.ty_to_string(ty)); + self.tcx.note_and_explain_region(&mut err, "type must outlive ", sub, ""); + err + } + infer::RelateRegionParamBound(span) => { + let mut err = + struct_span_err!(self.tcx.sess, span, E0478, "lifetime bound not satisfied"); + self.tcx.note_and_explain_region(&mut err, + "lifetime parameter instantiated with ", + sup, + ""); + self.tcx.note_and_explain_region(&mut err, + "but lifetime parameter must outlive ", + sub, + ""); + err + } + infer::RelateDefaultParamBound(span, ty) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0479, + "the type `{}` (provided as the value of a type \ + parameter) is not valid at this point", + self.ty_to_string(ty)); + self.tcx.note_and_explain_region(&mut err, "type must outlive ", sub, ""); + err + } + infer::CallRcvr(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0480, + "lifetime of method receiver does not outlive the \ + method call"); + self.tcx + .note_and_explain_region(&mut err, "the receiver is only valid for ", sup, ""); + err + } + infer::CallArg(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0481, + "lifetime of function argument does not outlive \ + the function call"); + self.tcx.note_and_explain_region(&mut err, + "the function argument is only valid for ", + sup, + ""); + err + } + infer::CallReturn(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0482, + "lifetime of return value does not outlive the \ + function call"); + self.tcx.note_and_explain_region(&mut err, + "the return value is only valid for ", + sup, + ""); + err + } + infer::Operand(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0483, + "lifetime of operand does not outlive the \ + operation"); + self.tcx + .note_and_explain_region(&mut err, "the operand is only valid for ", sup, ""); + err + } + infer::AddrOf(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0484, + "reference is not valid at the time of borrow"); + self.tcx + .note_and_explain_region(&mut err, "the borrow is only valid for ", sup, ""); + err + } + infer::AutoBorrow(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0485, + "automatically reference is not valid at the time \ + of borrow"); + self.tcx.note_and_explain_region(&mut err, + "the automatic borrow is only valid for ", + sup, + ""); + err + } + infer::ExprTypeIsNotInScope(t, span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0486, + "type of expression contains references that are \ + not valid during the expression: `{}`", + self.ty_to_string(t)); + self.tcx.note_and_explain_region(&mut err, "type is only valid for ", sup, ""); + err + } + infer::SafeDestructor(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0487, + "unsafe use of destructor: destructor might be \ + called while references are dead"); + // FIXME (22171): terms "super/subregion" are suboptimal + self.tcx.note_and_explain_region(&mut err, "superregion: ", sup, ""); + self.tcx.note_and_explain_region(&mut err, "subregion: ", sub, ""); + err + } + infer::BindingTypeIsNotValidAtDecl(span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0488, + "lifetime of variable does not enclose its \ + declaration"); + self.tcx + .note_and_explain_region(&mut err, "the variable is only valid for ", sup, ""); + err + } + infer::ParameterInScope(_, span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0489, + "type/lifetime parameter not in scope here"); + self.tcx + .note_and_explain_region(&mut err, "the parameter is only valid for ", sub, ""); + err + } + infer::DataBorrowed(ty, span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0490, + "a value of type `{}` is borrowed for too long", + self.ty_to_string(ty)); + self.tcx.note_and_explain_region(&mut err, "the type is valid for ", sub, ""); + self.tcx.note_and_explain_region(&mut err, "but the borrow lasts for ", sup, ""); + err + } + infer::ReferenceOutlivesReferent(ty, span) => { + let mut err = struct_span_err!(self.tcx.sess, + span, + E0491, + "in type `{}`, reference has a longer lifetime \ + than the data it references", + self.ty_to_string(ty)); + self.tcx.note_and_explain_region(&mut err, "the pointer is valid for ", sub, ""); + self.tcx.note_and_explain_region(&mut err, + "but the referenced data is only valid for ", + sup, + ""); + err + } + infer::CompareImplMethodObligation { span, + item_name, + impl_item_def_id, + trait_item_def_id, + lint_id } => { + self.report_extra_impl_obligation(span, + item_name, + impl_item_def_id, + trait_item_def_id, + &format!("`{}: {}`", sup, sub), + lint_id) + } + } + } +} diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index a929060cf9890..b07ef4dfd448e 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -210,7 +210,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { /// region that each late-bound region was replaced with. pub type SkolemizationMap<'tcx> = FxHashMap<ty::BoundRegion, &'tcx ty::Region>; -/// See `error_reporting.rs` for more details +/// See `error_reporting` module for more details #[derive(Clone, Debug)] pub enum ValuePairs<'tcx> { Types(ExpectedFound<Ty<'tcx>>), @@ -221,7 +221,7 @@ pub enum ValuePairs<'tcx> { /// The trace designates the path through inference that we took to /// encounter an error or subtyping constraint. /// -/// See `error_reporting.rs` for more details. +/// See `error_reporting` module for more details. #[derive(Clone)] pub struct TypeTrace<'tcx> { cause: ObligationCause<'tcx>, @@ -230,7 +230,7 @@ pub struct TypeTrace<'tcx> { /// The origin of a `r1 <= r2` constraint. /// -/// See `error_reporting.rs` for more details +/// See `error_reporting` module for more details #[derive(Clone, Debug)] pub enum SubregionOrigin<'tcx> { // Arose from a subtyping relation @@ -348,7 +348,7 @@ pub enum LateBoundRegionConversionTime { /// Reasons to create a region inference variable /// -/// See `error_reporting.rs` for more details +/// See `error_reporting` module for more details #[derive(Clone, Debug)] pub enum RegionVariableOrigin { // Region variables created for ill-categorized reasons, @@ -1295,7 +1295,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // this infcx was in use. This is totally hokey but // otherwise we have a hard time separating legit region // errors from silly ones. - self.report_region_errors(&errors); // see error_reporting.rs + self.report_region_errors(&errors); // see error_reporting module } } diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 70f03e02f46d9..b2f508ff26d6f 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -236,6 +236,12 @@ declare_lint! { "detects use of struct constructors that would be invisible with new visibility rules" } +declare_lint! { + pub MISSING_FRAGMENT_SPECIFIER, + Warn, + "detects missing fragment specifiers in unused `macro_rules!` patterns" +} + declare_lint! { pub DEPRECATED, Warn, @@ -286,6 +292,7 @@ impl LintPass for HardwiredLints { LEGACY_DIRECTORY_OWNERSHIP, LEGACY_IMPORTS, LEGACY_CONSTRUCTOR_VISIBILITY, + MISSING_FRAGMENT_SPECIFIER, DEPRECATED ) } diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index ddf09f5cfe0e0..9619ba8472404 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -688,6 +688,14 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session, let krate = ecx.monotonic_expander().expand_crate(krate); + let mut missing_fragment_specifiers: Vec<_> = + ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect(); + missing_fragment_specifiers.sort(); + for span in missing_fragment_specifiers { + let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER; + let msg = "missing fragment specifier".to_string(); + sess.add_lint(lint, ast::CRATE_NODE_ID, span, msg); + } if ecx.parse_sess.span_diagnostic.err_count() - ecx.resolve_err_count > err_count { ecx.parse_sess.span_diagnostic.abort_if_errors(); } diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index 150a2c39db7a8..b075fa5999249 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -1044,26 +1044,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { self.hash_token_tree(sub_tt); } } - tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => { - hash_span!(self, span); - let tokenstream::SequenceRepetition { - ref tts, - ref separator, - op, - num_captures, - } = **sequence_repetition; - - tts.len().hash(self.st); - for sub_tt in tts { - self.hash_token_tree(sub_tt); - } - self.hash_discriminant(separator); - if let Some(ref separator) = *separator { - self.hash_token(separator, span); - } - op.hash(self.st); - num_captures.hash(self.st); - } } } @@ -1129,10 +1109,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { token::Token::Ident(ident) | token::Token::Lifetime(ident) | token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st), - token::Token::MatchNt(ident1, ident2) => { - ident1.name.as_str().hash(self.st); - ident2.name.as_str().hash(self.st); - } token::Token::Interpolated(ref non_terminal) => { // FIXME(mw): This could be implemented properly. It's just a diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index 8fb1740e66eac..b87edf5482324 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -247,6 +247,10 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { id: LintId::of(LEGACY_CONSTRUCTOR_VISIBILITY), reference: "issue #39207 <https://github.com/rust-lang/rust/issues/39207>", }, + FutureIncompatibleInfo { + id: LintId::of(MISSING_FRAGMENT_SPECIFIER), + reference: "issue #40107 <https://github.com/rust-lang/rust/issues/40107>", + }, ]); // Register renamed and removed lints diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index b5add6404fc9f..6c93744f014a3 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -287,7 +287,7 @@ impl<'a> SpanUtils<'a> { let mut toks = toks.parse_all_token_trees().unwrap().into_iter(); let mut prev = toks.next().unwrap(); - let first_span = prev.get_span(); + let first_span = prev.span(); let mut angle_count = 0; for tok in toks { if let TokenTree::Token(_, ref tok) = prev { @@ -305,10 +305,10 @@ impl<'a> SpanUtils<'a> { continue; } if let TokenTree::Token(_, token::Semi) = tok { - return self.snippet(mk_sp(first_span.lo, prev.get_span().hi)); + return self.snippet(mk_sp(first_span.lo, prev.span().hi)); } else if let TokenTree::Delimited(_, ref d) = tok { if d.delim == token::Brace { - return self.snippet(mk_sp(first_span.lo, prev.get_span().hi)); + return self.snippet(mk_sp(first_span.lo, prev.span().hi)); } } prev = tok; diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 73b82fbad5dfd..1294296840ebd 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -1476,7 +1476,7 @@ pub struct PolyTrait { /// A representation of a Type suitable for hyperlinking purposes. Ideally one can get the original /// type out of the AST/TyCtxt given one of these, if more information is needed. Most importantly /// it does not preserve mutability or boxes. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub enum Type { /// structs/enums/traits (most that'd be an hir::TyPath) ResolvedPath { diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 6f8c6aa7094dd..23507dc889b71 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -90,6 +90,16 @@ impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> { } } +impl<'a, T: fmt::Debug> fmt::Debug for CommaSep<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + for (i, item) in self.0.iter().enumerate() { + if i != 0 { write!(f, ", ")?; } + fmt::Debug::fmt(item, f)?; + } + Ok(()) + } +} + impl<'a> fmt::Display for TyParamBounds<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let &TyParamBounds(bounds) = self; @@ -165,7 +175,7 @@ impl<'a> fmt::Display for WhereClause<'a> { if f.alternate() { clause.push_str(" where "); } else { - clause.push_str(" <span class='where fmt-newline'>where "); + clause.push_str(" <span class=\"where fmt-newline\">where "); } for (i, pred) in gens.where_predicates.iter().enumerate() { if i > 0 { @@ -449,8 +459,8 @@ fn resolved_path(w: &mut fmt::Formatter, did: DefId, path: &clean::Path, } else { root.push_str(&seg.name); root.push_str("/"); - write!(w, "<a class='mod' - href='{}index.html'>{}</a>::", + write!(w, "<a class=\"mod\" + href=\"{}index.html\">{}</a>::", root, seg.name)?; } @@ -491,7 +501,7 @@ fn primitive_link(f: &mut fmt::Formatter, Some(&def_id) if def_id.is_local() => { let len = CURRENT_LOCATION_KEY.with(|s| s.borrow().len()); let len = if len == 0 {0} else {len - 1}; - write!(f, "<a class='primitive' href='{}primitive.{}.html'>", + write!(f, "<a class=\"primitive\" href=\"{}primitive.{}.html\">", repeat("../").take(len).collect::<String>(), prim.to_url_str())?; needs_termination = true; @@ -508,7 +518,7 @@ fn primitive_link(f: &mut fmt::Formatter, (.., render::Unknown) => None, }; if let Some((cname, root)) = loc { - write!(f, "<a class='primitive' href='{}{}/primitive.{}.html'>", + write!(f, "<a class=\"primitive\" href=\"{}{}/primitive.{}.html\">", root, cname, prim.to_url_str())?; @@ -550,7 +560,7 @@ impl<'a> fmt::Display for HRef<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match href(self.did) { Some((url, shortty, fqp)) => if !f.alternate() { - write!(f, "<a class='{}' href='{}' title='{} {}'>{}</a>", + write!(f, "<a class=\"{}\" href=\"{}\" title=\"{} {}\">{}</a>", shortty, url, shortty, fqp.join("::"), self.text) } else { write!(f, "{}", self.text) @@ -560,7 +570,8 @@ impl<'a> fmt::Display for HRef<'a> { } } -fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt::Result { +fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool, + is_not_debug: bool) -> fmt::Result { match *t { clean::Generic(ref name) => { f.write_str(name) @@ -571,7 +582,8 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt: tybounds(f, typarams) } clean::Infer => write!(f, "_"), - clean::Primitive(prim) => primitive_link(f, prim, prim.as_str()), + clean::Primitive(prim) if is_not_debug => primitive_link(f, prim, prim.as_str()), + clean::Primitive(prim) => write!(f, "{}", prim.as_str()), clean::BareFunction(ref decl) => { if f.alternate() { write!(f, "{}{}fn{:#}{:#}", @@ -589,26 +601,30 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt: } clean::Tuple(ref typs) => { match &typs[..] { - &[] => primitive_link(f, PrimitiveType::Tuple, "()"), - &[ref one] => { + &[] if is_not_debug => primitive_link(f, PrimitiveType::Tuple, "()"), + &[] => write!(f, "()"), + &[ref one] if is_not_debug => { primitive_link(f, PrimitiveType::Tuple, "(")?; //carry f.alternate() into this display w/o branching manually fmt::Display::fmt(one, f)?; primitive_link(f, PrimitiveType::Tuple, ",)") } - many => { + &[ref one] => write!(f, "({:?},)", one), + many if is_not_debug => { primitive_link(f, PrimitiveType::Tuple, "(")?; fmt::Display::fmt(&CommaSep(&many), f)?; primitive_link(f, PrimitiveType::Tuple, ")") } + many => write!(f, "({:?})", &CommaSep(&many)), } } - clean::Vector(ref t) => { + clean::Vector(ref t) if is_not_debug => { primitive_link(f, PrimitiveType::Slice, &format!("["))?; fmt::Display::fmt(t, f)?; primitive_link(f, PrimitiveType::Slice, &format!("]")) } - clean::FixedVector(ref t, ref s) => { + clean::Vector(ref t) => write!(f, "[{:?}]", t), + clean::FixedVector(ref t, ref s) if is_not_debug => { primitive_link(f, PrimitiveType::Array, "[")?; fmt::Display::fmt(t, f)?; if f.alternate() { @@ -619,10 +635,17 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt: &format!("; {}]", Escape(s))) } } + clean::FixedVector(ref t, ref s) => { + if f.alternate() { + write!(f, "[{:?}; {}]", t, s) + } else { + write!(f, "[{:?}; {}]", t, Escape(s)) + } + } clean::Never => f.write_str("!"), clean::RawPointer(m, ref t) => { match **t { - clean::Generic(_) | clean::ResolvedPath {is_generic: true, ..} => { + clean::Generic(_) | clean::ResolvedPath {is_generic: true, ..} if is_not_debug => { if f.alternate() { primitive_link(f, clean::PrimitiveType::RawPointer, &format!("*{}{:#}", RawMutableSpace(m), t)) @@ -631,11 +654,21 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt: &format!("*{}{}", RawMutableSpace(m), t)) } } - _ => { + clean::Generic(_) | clean::ResolvedPath {is_generic: true, ..} => { + if f.alternate() { + write!(f, "*{}{:#?}", RawMutableSpace(m), t) + } else { + write!(f, "*{}{:?}", RawMutableSpace(m), t) + } + } + _ if is_not_debug => { primitive_link(f, clean::PrimitiveType::RawPointer, &format!("*{}", RawMutableSpace(m)))?; fmt::Display::fmt(t, f) } + _ => { + write!(f, "*{}{:?}", RawMutableSpace(m), t) + } } } clean::BorrowedRef{ lifetime: ref l, mutability, type_: ref ty} => { @@ -647,15 +680,23 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt: match **ty { clean::Vector(ref bt) => { // BorrowedRef{ ... Vector(T) } is &[T] match **bt { - clean::Generic(_) => + clean::Generic(_) if is_not_debug => { if f.alternate() { primitive_link(f, PrimitiveType::Slice, &format!("&{}{}[{:#}]", lt, m, **bt)) } else { primitive_link(f, PrimitiveType::Slice, &format!("&{}{}[{}]", lt, m, **bt)) - }, - _ => { + } + } + clean::Generic(_) => { + if f.alternate() { + write!(f, "&{}{}[{:#?}]", lt, m, **bt) + } else { + write!(f, "&{}{}[{:?}]", lt, m, **bt) + } + } + _ if is_not_debug => { if f.alternate() { primitive_link(f, PrimitiveType::Slice, &format!("&{}{}[", lt, m))?; @@ -667,15 +708,26 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt: } primitive_link(f, PrimitiveType::Slice, "]") } + _ => { + if f.alternate() { + write!(f, "&{}{}[{:#?}]", lt, m, **bt) + } else { + write!(f, "&{}{}[{:?}]", lt, m, **bt) + } + } } } _ => { if f.alternate() { write!(f, "&{}{}", lt, m)?; - fmt_type(&ty, f, use_absolute) + fmt_type(&ty, f, use_absolute, is_not_debug) } else { - write!(f, "&{}{}", lt, m)?; - fmt_type(&ty, f, use_absolute) + if is_not_debug { + write!(f, "&{}{}", lt, m)?; + } else { + write!(f, "&{}{}", lt, m)?; + } + fmt_type(&ty, f, use_absolute, is_not_debug) } } } @@ -723,9 +775,17 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt: } clean::QPath { ref name, ref self_type, ref trait_ } => { if f.alternate() { - write!(f, "<{:#} as {:#}>::{}", self_type, trait_, name) + if is_not_debug { + write!(f, "<{:#} as {:#}>::{}", self_type, trait_, name) + } else { + write!(f, "<{:#?} as {:#?}>::{}", self_type, trait_, name) + } } else { - write!(f, "<{} as {}>::{}", self_type, trait_, name) + if is_not_debug { + write!(f, "<{} as {}>::{}", self_type, trait_, name) + } else { + write!(f, "<{:?} as {:?}>::{}", self_type, trait_, name) + } } } clean::Unique(..) => { @@ -736,7 +796,13 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt: impl fmt::Display for clean::Type { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt_type(self, f, false) + fmt_type(self, f, false, true) + } +} + +impl fmt::Debug for clean::Type { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt_type(self, f, false, false) } } @@ -777,7 +843,7 @@ fn fmt_impl(i: &clean::Impl, plain.push_str(" for "); } - fmt_type(&i.for_, f, use_absolute)?; + fmt_type(&i.for_, f, use_absolute, true)?; plain.push_str(&format!("{:#}", i.for_)); fmt::Display::fmt(&WhereClause(&i.generics, plain.len() + 1), f)?; diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 0629e93e7ef5d..0dafc4225a321 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -144,12 +144,12 @@ impl<U: Write> Writer for U { -> io::Result<()> { match klass { Class::None => write!(self, "{}", text), - klass => write!(self, "<span class='{}'>{}</span>", klass.rustdoc_class(), text), + klass => write!(self, "<span class=\"{}\">{}</span>", klass.rustdoc_class(), text), } } fn enter_span(&mut self, klass: Class) -> io::Result<()> { - write!(self, "<span class='{}'>", klass.rustdoc_class()) + write!(self, "<span class=\"{}\">", klass.rustdoc_class()) } fn exit_span(&mut self) -> io::Result<()> { @@ -315,7 +315,7 @@ impl<'a> Classifier<'a> { token::Lifetime(..) => Class::Lifetime, token::Underscore | token::Eof | token::Interpolated(..) | - token::MatchNt(..) | token::SubstNt(..) | token::Tilde | token::At => Class::None, + token::SubstNt(..) | token::Tilde | token::At => Class::None, }; // Anything that didn't return above is the simple case where we the @@ -363,7 +363,7 @@ fn write_header(class: Option<&str>, if let Some(id) = id { write!(out, "id='{}' ", id)?; } - write!(out, "class='rust {}'>\n", class.unwrap_or("")) + write!(out, "class=\"rust {}\">\n", class.unwrap_or("")) } fn write_footer(out: &mut Write) -> io::Result<()> { diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index bb39c8c4f22ff..44f71d8952985 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -1547,7 +1547,7 @@ impl<'a> fmt::Display for Item<'a> { component)?; } } - write!(fmt, "<a class='{}' href=''>{}</a>", + write!(fmt, "<a class=\"{}\" href=''>{}</a>", self.item.type_(), self.item.name.as_ref().unwrap())?; write!(fmt, "</span>")?; // in-band @@ -1654,9 +1654,35 @@ fn document_short(w: &mut fmt::Formatter, item: &clean::Item, link: AssocItemLin Ok(()) } +fn md_render_assoc_item(item: &clean::Item) -> String { + match item.inner { + clean::AssociatedConstItem(ref ty, ref default) => { + if let Some(default) = default.as_ref() { + format!("```\n{}: {:?} = {}\n```\n\n", item.name.as_ref().unwrap(), ty, default) + } else { + format!("```\n{}: {:?}\n```\n\n", item.name.as_ref().unwrap(), ty) + } + } + _ => String::new(), + } +} + +fn get_doc_value(item: &clean::Item) -> Option<&str> { + let x = item.doc_value(); + if x.is_none() { + match item.inner { + clean::AssociatedConstItem(_, _) => Some(""), + _ => None, + } + } else { + x + } +} + fn document_full(w: &mut fmt::Formatter, item: &clean::Item) -> fmt::Result { - if let Some(s) = item.doc_value() { - write!(w, "<div class='docblock'>{}</div>", Markdown(s))?; + if let Some(s) = get_doc_value(item) { + write!(w, "<div class='docblock'>{}</div>", + Markdown(&format!("{}{}", md_render_assoc_item(item), s)))?; } Ok(()) } @@ -1817,7 +1843,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, let doc_value = myitem.doc_value().unwrap_or(""); write!(w, " <tr class='{stab} module-item'> - <td><a class='{class}' href='{href}' + <td><a class=\"{class}\" href=\"{href}\" title='{title_type} {title}'>{name}</a>{unsafety_flag}</td> <td class='docblock-short'> {stab_docs} {docs} @@ -2215,16 +2241,12 @@ fn naive_assoc_href(it: &clean::Item, link: AssocItemLink) -> String { fn assoc_const(w: &mut fmt::Formatter, it: &clean::Item, ty: &clean::Type, - default: Option<&String>, + _default: Option<&String>, link: AssocItemLink) -> fmt::Result { - write!(w, "const <a href='{}' class='constant'>{}</a>", + write!(w, "const <a href='{}' class=\"constant\"><b>{}</b></a>: {}", naive_assoc_href(it, link), - it.name.as_ref().unwrap())?; - - write!(w, ": {}", ty)?; - if let Some(default) = default { - write!(w, " = {}", Escape(default))?; - } + it.name.as_ref().unwrap(), + ty)?; Ok(()) } @@ -2232,7 +2254,7 @@ fn assoc_type(w: &mut fmt::Formatter, it: &clean::Item, bounds: &Vec<clean::TyParamBound>, default: Option<&clean::Type>, link: AssocItemLink) -> fmt::Result { - write!(w, "type <a href='{}' class='type'>{}</a>", + write!(w, "type <a href='{}' class=\"type\">{}</a>", naive_assoc_href(it, link), it.name.as_ref().unwrap())?; if !bounds.is_empty() { @@ -2375,7 +2397,7 @@ fn item_struct(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, let ns_id = derive_id(format!("{}.{}", field.name.as_ref().unwrap(), ItemType::StructField.name_space())); - write!(w, "<span id='{id}' class='{item_type}'> + write!(w, "<span id='{id}' class=\"{item_type}\"> <span id='{ns_id}' class='invisible'> <code>{name}: {ty}</code> </span></span>", @@ -2417,7 +2439,7 @@ fn item_union(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, if fields.peek().is_some() { write!(w, "<h2 class='fields'>Fields</h2>")?; for (field, ty) in fields { - write!(w, "<span id='{shortty}.{name}' class='{shortty}'><code>{name}: {ty}</code> + write!(w, "<span id='{shortty}.{name}' class=\"{shortty}\"><code>{name}: {ty}</code> </span>", shortty = ItemType::StructField, name = field.name.as_ref().unwrap(), @@ -2902,7 +2924,7 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi if render_method_item { let id = derive_id(format!("{}.{}", item_type, name)); let ns_id = derive_id(format!("{}.{}", name, item_type.name_space())); - write!(w, "<h4 id='{}' class='{}'>", id, item_type)?; + write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?; write!(w, "<span id='{}' class='invisible'>", ns_id)?; write!(w, "<code>")?; render_assoc_item(w, item, link.anchor(&id), ItemType::Impl)?; @@ -2914,7 +2936,7 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi clean::TypedefItem(ref tydef, _) => { let id = derive_id(format!("{}.{}", ItemType::AssociatedType, name)); let ns_id = derive_id(format!("{}.{}", name, item_type.name_space())); - write!(w, "<h4 id='{}' class='{}'>", id, item_type)?; + write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?; write!(w, "<span id='{}' class='invisible'><code>", ns_id)?; assoc_type(w, item, &Vec::new(), Some(&tydef.type_), link.anchor(&id))?; write!(w, "</code></span></h4>\n")?; @@ -2922,7 +2944,7 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi clean::AssociatedConstItem(ref ty, ref default) => { let id = derive_id(format!("{}.{}", item_type, name)); let ns_id = derive_id(format!("{}.{}", name, item_type.name_space())); - write!(w, "<h4 id='{}' class='{}'>", id, item_type)?; + write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?; write!(w, "<span id='{}' class='invisible'><code>", ns_id)?; assoc_const(w, item, ty, default.as_ref(), link.anchor(&id))?; write!(w, "</code></span></h4>\n")?; @@ -2930,7 +2952,7 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi clean::ConstantItem(ref c) => { let id = derive_id(format!("{}.{}", item_type, name)); let ns_id = derive_id(format!("{}.{}", name, item_type.name_space())); - write!(w, "<h4 id='{}' class='{}'>", id, item_type)?; + write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?; write!(w, "<span id='{}' class='invisible'><code>", ns_id)?; assoc_const(w, item, &c.type_, Some(&c.expr), link.anchor(&id))?; write!(w, "</code></span></h4>\n")?; @@ -2938,7 +2960,7 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi clean::AssociatedTypeItem(ref bounds, ref default) => { let id = derive_id(format!("{}.{}", item_type, name)); let ns_id = derive_id(format!("{}.{}", name, item_type.name_space())); - write!(w, "<h4 id='{}' class='{}'>", id, item_type)?; + write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?; write!(w, "<span id='{}' class='invisible'><code>", ns_id)?; assoc_type(w, item, bounds, default.as_ref(), link.anchor(&id))?; write!(w, "</code></span></h4>\n")?; @@ -2956,7 +2978,7 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi // We need the stability of the item from the trait // because impls can't have a stability. document_stability(w, cx, it)?; - if item.doc_value().is_some() { + if get_doc_value(item).is_some() { document_full(w, item)?; } else { // In case the item isn't documented, diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index c12e1e7d6080d..200285862276a 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -979,7 +979,7 @@ .html("[<span class='inner'></span>]"); toggle.children(".inner").text(labelForToggleButton(false)); - $(".method").each(function() { + $(".method, .impl-items > .associatedconstant").each(function() { if ($(this).next().is(".docblock") || ($(this).next().is(".stability") && $(this).next().next().is(".docblock"))) { $(this).children().last().after(toggle.clone()); diff --git a/src/librustdoc/html/static/rustdoc.css b/src/librustdoc/html/static/rustdoc.css index 681d2354056f6..b0bf69b0181f2 100644 --- a/src/librustdoc/html/static/rustdoc.css +++ b/src/librustdoc/html/static/rustdoc.css @@ -89,7 +89,7 @@ h2 { h3 { font-size: 1.3em; } -h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) { +h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod):not(.associatedconstant) { font-weight: 500; margin: 20px 0 15px 0; padding-bottom: 6px; @@ -99,10 +99,10 @@ h1.fqn { margin-top: 0; position: relative; } -h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) { +h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod):not(.associatedconstant) { border-bottom: 1px solid; } -h3.impl, h3.method, h4.method, h3.type, h4.type { +h3.impl, h3.method, h4.method, h3.type, h4.type, h4.associatedconstant { font-weight: 600; margin-top: 10px; margin-bottom: 10px; @@ -382,7 +382,7 @@ h4 > code, h3 > code, .invisible > code { .content .impl-items .docblock, .content .impl-items .stability { margin-left: 40px; } -.content .impl-items .method, .content .impl-items > .type { +.content .impl-items .method, .content .impl-items > .type, .impl-items > .associatedconstant { margin-left: 20px; } diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 64f37925a98e2..236d9f230b5d4 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -211,7 +211,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { }; // FIXME(jseyfried) merge with `self.visit_macro()` - let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect(); + let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect(); om.macros.push(Macro { def_id: def_id, attrs: def.attrs.clone().into(), @@ -521,7 +521,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { // convert each exported_macro into a doc item fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro { // Extract the spans of all matchers. They represent the "interface" of the macro. - let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect(); + let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect(); Macro { def_id: self.cx.tcx.hir.local_def_id(def.id), diff --git a/src/libstd/ffi/c_str.rs b/src/libstd/ffi/c_str.rs index dc3855367ae27..bc678fcb8385b 100644 --- a/src/libstd/ffi/c_str.rs +++ b/src/libstd/ffi/c_str.rs @@ -455,6 +455,20 @@ impl From<NulError> for io::Error { } } +#[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")] +impl Error for FromBytesWithNulError { + fn description(&self) -> &str { + "data provided is not null terminated or contains an interior nul byte" + } +} + +#[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")] +impl fmt::Display for FromBytesWithNulError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.description().fmt(f) + } +} + impl IntoStringError { /// Consumes this error, returning original `CString` which generated the /// error. diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 6c46f90f3d4b9..b1b69c80f4d00 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -14,10 +14,9 @@ use ext::base::ExtCtxt; use ext::base; use ext::build::AstBuilder; use parse::parser::{Parser, PathStyle}; -use parse::token::*; use parse::token; use ptr::P; -use tokenstream::{self, TokenTree}; +use tokenstream::TokenTree; /// Quasiquoting works via token trees. @@ -356,14 +355,35 @@ pub mod rt { } fn parse_tts(&self, s: String) -> Vec<TokenTree> { - panictry!(parse::parse_tts_from_source_str( - "<quote expansion>".to_string(), - s, - self.parse_sess())) + parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.parse_sess()) } } } +// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`. +pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> { + use std::rc::Rc; + use tokenstream::Delimited; + + let mut results = Vec::new(); + let mut result = Vec::new(); + for tree in tts { + match tree { + TokenTree::Token(_, token::OpenDelim(..)) => { + results.push(::std::mem::replace(&mut result, Vec::new())); + } + TokenTree::Token(span, token::CloseDelim(delim)) => { + let tree = + TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result })); + result = results.pop().unwrap(); + result.push(tree); + } + tree @ _ => result.push(tree), + } + } + result +} + // These panicking parsing functions are used by the quote_*!() syntax extensions, // but shouldn't be used otherwise. pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> { @@ -510,20 +530,6 @@ pub fn expand_quote_path(cx: &mut ExtCtxt, base::MacEager::expr(expanded) } -pub fn expand_quote_matcher(cx: &mut ExtCtxt, - sp: Span, - tts: &[TokenTree]) - -> Box<base::MacResult+'static> { - let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); - let mut vector = mk_stmts_let(cx, sp); - vector.extend(statements_mk_tts(cx, &tts[..], true)); - vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt")))); - let block = cx.expr_block(cx.block(sp, vector)); - - let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]); - base::MacEager::expr(expanded) -} - fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> { strs.iter().map(|s| ast::Ident::from_str(s)).collect() } @@ -669,12 +675,6 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]); } - token::MatchNt(name, kind) => { - return cx.expr_call(sp, - mk_token_path(cx, sp, "MatchNt"), - vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]); - } - token::Interpolated(_) => panic!("quote! with interpolated token"), _ => () @@ -712,9 +712,9 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { mk_token_path(cx, sp, name) } -fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> { +fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> { match *tt { - TokenTree::Token(sp, SubstNt(ident)) => { + TokenTree::Token(sp, token::Ident(ident)) if quoted => { // tt.extend($ident.to_tokens(ext_cx)) let e_to_toks = @@ -733,13 +733,6 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm vec![cx.stmt_expr(e_push)] } - ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => { - let mut seq = vec![]; - for i in 0..tt.len() { - seq.push(tt.get_tt(i)); - } - statements_mk_tts(cx, &seq[..], matcher) - } TokenTree::Token(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_tok = cx.expr_call(sp, @@ -753,77 +746,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm vec![cx.stmt_expr(e_push)] }, TokenTree::Delimited(span, ref delimed) => { - statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter() - .chain(delimed.tts.iter() - .flat_map(|tt| statements_mk_tt(cx, tt, matcher))) - .chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher)) - .collect() - }, - TokenTree::Sequence(sp, ref seq) => { - if !matcher { - panic!("TokenTree::Sequence in quote!"); - } - - let e_sp = cx.expr_ident(sp, id_ext("_sp")); - - let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); - let mut tts_stmts = vec![stmt_let_tt]; - tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher)); - tts_stmts.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt")))); - let e_tts = cx.expr_block(cx.block(sp, tts_stmts)); - - let e_separator = match seq.separator { - Some(ref sep) => cx.expr_some(sp, expr_mk_token(cx, sp, sep)), - None => cx.expr_none(sp), - }; - let e_op = match seq.op { - tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore", - tokenstream::KleeneOp::OneOrMore => "OneOrMore", - }; - let e_op_idents = vec![ - id_ext("syntax"), - id_ext("tokenstream"), - id_ext("KleeneOp"), - id_ext(e_op), - ]; - let e_op = cx.expr_path(cx.path_global(sp, e_op_idents)); - let fields = vec![cx.field_imm(sp, id_ext("tts"), e_tts), - cx.field_imm(sp, id_ext("separator"), e_separator), - cx.field_imm(sp, id_ext("op"), e_op), - cx.field_imm(sp, id_ext("num_captures"), - cx.expr_usize(sp, seq.num_captures))]; - let seq_path = vec![id_ext("syntax"), - id_ext("tokenstream"), - id_ext("SequenceRepetition")]; - let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields); - let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"), - id_ext("rc"), - id_ext("Rc"), - id_ext("new")], - vec![e_seq_struct]); - let e_tok = cx.expr_call(sp, - mk_tt_path(cx, sp, "Sequence"), - vec![e_sp, e_rc_new]); - let e_push = - cx.expr_method_call(sp, - cx.expr_ident(sp, id_ext("tt")), - id_ext("push"), - vec![e_tok]); - vec![cx.stmt_expr(e_push)] + let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false); + stmts.extend(statements_mk_tts(cx, &delimed.tts)); + stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false)); + stmts } } } fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree]) -> (P<ast::Expr>, Vec<TokenTree>) { - // NB: It appears that the main parser loses its mind if we consider - // $foo as a SubstNt during the main parse, so we have to re-parse - // under quote_depth > 0. This is silly and should go away; the _guess_ is - // it has to do with transition away from supporting old-style macros, so - // try removing it when enough of them are gone. - let mut p = cx.new_parser_from_tts(tts); - p.quote_depth += 1; let cx_expr = panictry!(p.parse_expr()); if !p.eat(&token::Comma) { @@ -877,24 +810,31 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> { vec![stmt_let_sp, stmt_let_tt] } -fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec<ast::Stmt> { +fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec<ast::Stmt> { let mut ss = Vec::new(); + let mut quoted = false; for tt in tts { - ss.extend(statements_mk_tt(cx, tt, matcher)); + quoted = match *tt { + TokenTree::Token(_, token::Dollar) if !quoted => true, + _ => { + ss.extend(statements_mk_tt(cx, tt, quoted)); + false + } + } } ss } -fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) - -> (P<ast::Expr>, P<ast::Expr>) { +fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast::Expr>) { let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); let mut vector = mk_stmts_let(cx, sp); - vector.extend(statements_mk_tts(cx, &tts[..], false)); + vector.extend(statements_mk_tts(cx, &tts[..])); vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt")))); let block = cx.expr_block(cx.block(sp, vector)); + let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")]; - (cx_expr, block) + (cx_expr, cx.expr_call_global(sp, unflatten, vec![block])) } fn expand_wrapper(cx: &ExtCtxt, diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 089c35c694a78..6ab5123bc87b1 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -82,13 +82,13 @@ use ast::Ident; use syntax_pos::{self, BytePos, mk_sp, Span}; use codemap::Spanned; use errors::FatalError; +use ext::tt::quoted; use parse::{Directory, ParseSess}; use parse::parser::{PathStyle, Parser}; -use parse::token::{DocComment, MatchNt, SubstNt}; -use parse::token::{Token, Nonterminal}; -use parse::token; +use parse::token::{self, DocComment, Token, Nonterminal}; use print::pprust; -use tokenstream::{self, TokenTree}; +use symbol::keywords; +use tokenstream::TokenTree; use util::small_vector::SmallVector; use std::mem; @@ -101,8 +101,8 @@ use std::collections::hash_map::Entry::{Vacant, Occupied}; #[derive(Clone)] enum TokenTreeOrTokenTreeVec { - Tt(tokenstream::TokenTree), - TtSeq(Vec<tokenstream::TokenTree>), + Tt(quoted::TokenTree), + TtSeq(Vec<quoted::TokenTree>), } impl TokenTreeOrTokenTreeVec { @@ -113,7 +113,7 @@ impl TokenTreeOrTokenTreeVec { } } - fn get_tt(&self, index: usize) -> TokenTree { + fn get_tt(&self, index: usize) -> quoted::TokenTree { match *self { TtSeq(ref v) => v[index].clone(), Tt(ref tt) => tt.get_tt(index), @@ -144,7 +144,9 @@ struct MatcherPos { pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>; -pub fn count_names(ms: &[TokenTree]) -> usize { +pub fn count_names(ms: &[quoted::TokenTree]) -> usize { + use self::quoted::TokenTree; + ms.iter().fold(0, |count, elt| { count + match *elt { TokenTree::Sequence(_, ref seq) => { @@ -153,7 +155,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize { TokenTree::Delimited(_, ref delim) => { count_names(&delim.tts) } - TokenTree::Token(_, MatchNt(..)) => { + TokenTree::MetaVarDecl(..) => { 1 } TokenTree::Token(..) => 0, @@ -161,7 +163,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize { }) } -fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> { +fn initial_matcher_pos(ms: Vec<quoted::TokenTree>, lo: BytePos) -> Box<MatcherPos> { let match_idx_hi = count_names(&ms[..]); let matches = create_matches(match_idx_hi); Box::new(MatcherPos { @@ -200,22 +202,30 @@ pub enum NamedMatch { MatchedNonterminal(Rc<Nonterminal>) } -fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> NamedParseResult { - fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I, +fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[quoted::TokenTree], mut res: I) + -> NamedParseResult { + use self::quoted::TokenTree; + + fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, m: &TokenTree, mut res: &mut I, ret_val: &mut HashMap<Ident, Rc<NamedMatch>>) -> Result<(), (syntax_pos::Span, String)> { match *m { TokenTree::Sequence(_, ref seq) => { for next_m in &seq.tts { - n_rec(next_m, res.by_ref(), ret_val)? + n_rec(sess, next_m, res.by_ref(), ret_val)? } } TokenTree::Delimited(_, ref delim) => { for next_m in &delim.tts { - n_rec(next_m, res.by_ref(), ret_val)?; + n_rec(sess, next_m, res.by_ref(), ret_val)?; } } - TokenTree::Token(sp, MatchNt(bind_name, _)) => { + TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => { + if sess.missing_fragment_specifiers.borrow_mut().remove(&span) { + return Err((span, "missing fragment specifier".to_string())); + } + } + TokenTree::MetaVarDecl(sp, bind_name, _) => { match ret_val.entry(bind_name) { Vacant(spot) => { spot.insert(res.next().unwrap()); @@ -225,9 +235,6 @@ fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> Na } } } - TokenTree::Token(sp, SubstNt(..)) => { - return Err((sp, "missing fragment specifier".to_string())) - } TokenTree::Token(..) => (), } @@ -236,7 +243,7 @@ fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> Na let mut ret_val = HashMap::new(); for m in ms { - match n_rec(m, res.by_ref(), &mut ret_val) { + match n_rec(sess, m, res.by_ref(), &mut ret_val) { Ok(_) => {}, Err((sp, msg)) => return Error(sp, msg), } @@ -276,11 +283,15 @@ fn create_matches(len: usize) -> Vec<Vec<Rc<NamedMatch>>> { (0..len).into_iter().map(|_| Vec::new()).collect() } -fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, +fn inner_parse_loop(sess: &ParseSess, + cur_eis: &mut SmallVector<Box<MatcherPos>>, next_eis: &mut Vec<Box<MatcherPos>>, eof_eis: &mut SmallVector<Box<MatcherPos>>, bb_eis: &mut SmallVector<Box<MatcherPos>>, - token: &Token, span: &syntax_pos::Span) -> ParseResult<()> { + token: &Token, + span: &syntax_pos::Span) -> ParseResult<()> { + use self::quoted::TokenTree; + while let Some(mut ei) = cur_eis.pop() { // When unzipped trees end, remove them while ei.idx >= ei.top_elts.len() { @@ -346,7 +357,7 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, match ei.top_elts.get_tt(idx) { /* need to descend into sequence */ TokenTree::Sequence(sp, seq) => { - if seq.op == tokenstream::KleeneOp::ZeroOrMore { + if seq.op == quoted::KleeneOp::ZeroOrMore { // Examine the case where there are 0 matches of this sequence let mut new_ei = ei.clone(); new_ei.match_cur += seq.num_captures; @@ -372,7 +383,12 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, top_elts: Tt(TokenTree::Sequence(sp, seq)), })); } - TokenTree::Token(_, MatchNt(..)) => { + TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => { + if sess.missing_fragment_specifiers.borrow_mut().remove(&span) { + return Error(span, "missing fragment specifier".to_string()); + } + } + TokenTree::MetaVarDecl(..) => { // Built-in nonterminals never start with these tokens, // so we can eliminate them from consideration. match *token { @@ -380,9 +396,6 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, _ => bb_eis.push(ei), } } - TokenTree::Token(sp, SubstNt(..)) => { - return Error(sp, "missing fragment specifier".to_string()) - } seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq)); let idx = ei.idx; @@ -406,8 +419,13 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, Success(()) } -pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: Option<Directory>) +pub fn parse(sess: &ParseSess, + tts: Vec<TokenTree>, + ms: &[quoted::TokenTree], + directory: Option<Directory>) -> NamedParseResult { + use self::quoted::TokenTree; + let mut parser = Parser::new(sess, tts, directory, true); let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo)); let mut next_eis = Vec::new(); // or proceed normally @@ -417,7 +435,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: let mut eof_eis = SmallVector::new(); assert!(next_eis.is_empty()); - match inner_parse_loop(&mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis, + match inner_parse_loop(sess, &mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis, &parser.token, &parser.span) { Success(_) => {}, Failure(sp, tok) => return Failure(sp, tok), @@ -430,7 +448,8 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: /* error messages here could be improved with links to orig. rules */ if token_name_eq(&parser.token, &token::Eof) { if eof_eis.len() == 1 { - return nameize(ms, eof_eis[0].matches.iter_mut().map(|mut dv| dv.pop().unwrap())); + let matches = eof_eis[0].matches.iter_mut().map(|mut dv| dv.pop().unwrap()); + return nameize(sess, ms, matches); } else if eof_eis.len() > 1 { return Error(parser.span, "ambiguity: multiple successful parses".to_string()); } else { @@ -438,7 +457,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: } } else if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 { let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) { - TokenTree::Token(_, MatchNt(bind, name)) => { + TokenTree::MetaVarDecl(_, bind, name) => { format!("{} ('{}')", name, bind) } _ => panic!() @@ -460,7 +479,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: parser.bump(); } else /* bb_eis.len() == 1 */ { let mut ei = bb_eis.pop().unwrap(); - if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) { + if let TokenTree::MetaVarDecl(span, _, ident) = ei.top_elts.get_tt(ei.idx) { let match_cur = ei.match_cur; ei.matches[match_cur].push(Rc::new(MatchedNonterminal( Rc::new(parse_nt(&mut parser, span, &ident.name.as_str()))))); @@ -479,10 +498,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { match name { "tt" => { - p.quote_depth += 1; //but in theory, non-quoted tts might be useful - let tt = panictry!(p.parse_token_tree()); - p.quote_depth -= 1; - return token::NtTT(tt); + return token::NtTT(panictry!(p.parse_token_tree())); } _ => {} } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index d0c1c0efea7a3..193c06707c7a6 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -16,14 +16,15 @@ use ext::expand::{Expansion, ExpansionKind}; use ext::tt::macro_parser::{Success, Error, Failure}; use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{parse, parse_failure_msg}; +use ext::tt::quoted; use ext::tt::transcribe::transcribe; use parse::{Directory, ParseSess}; use parse::parser::Parser; -use parse::token::{self, NtTT, Token}; +use parse::token::{self, NtTT}; use parse::token::Token::*; use print; use symbol::Symbol; -use tokenstream::{self, TokenTree}; +use tokenstream::TokenTree; use std::collections::{HashMap}; use std::collections::hash_map::{Entry}; @@ -58,8 +59,8 @@ impl<'a> ParserAnyMacro<'a> { struct MacroRulesMacroExpander { name: ast::Ident, - lhses: Vec<TokenTree>, - rhses: Vec<TokenTree>, + lhses: Vec<quoted::TokenTree>, + rhses: Vec<quoted::TokenTree>, valid: bool, } @@ -86,8 +87,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, sp: Span, name: ast::Ident, arg: &[TokenTree], - lhses: &[TokenTree], - rhses: &[TokenTree]) + lhses: &[quoted::TokenTree], + rhses: &[quoted::TokenTree]) -> Box<MacResult+'cx> { if cx.trace_macros() { println!("{}! {{ {} }}", @@ -101,7 +102,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers let lhs_tt = match *lhs { - TokenTree::Delimited(_, ref delim) => &delim.tts[..], + quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..], _ => cx.span_bug(sp, "malformed macro lhs") }; @@ -109,7 +110,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, Success(named_matches) => { let rhs = match rhses[i] { // ignore delimiters - TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(), + quoted::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(), _ => cx.span_bug(sp, "malformed macro rhs"), }; // rhs has holes ( `$id` and `$(...)` that need filled) @@ -164,24 +165,22 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { // $( $lhs:tt => $rhs:tt );+ // ...quasiquoting this would be nice. // These spans won't matter, anyways - let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt")); - let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt")); let argument_gram = vec![ - TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { + quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition { tts: vec![ - TokenTree::Token(DUMMY_SP, match_lhs_tok), - TokenTree::Token(DUMMY_SP, token::FatArrow), - TokenTree::Token(DUMMY_SP, match_rhs_tok), + quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), + quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), + quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), ], separator: Some(token::Semi), - op: tokenstream::KleeneOp::OneOrMore, + op: quoted::KleeneOp::OneOrMore, num_captures: 2, })), // to phase into semicolon-termination instead of semicolon-separation - TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { - tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], + quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition { + tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], separator: None, - op: tokenstream::KleeneOp::ZeroOrMore, + op: quoted::KleeneOp::ZeroOrMore, num_captures: 0 })), ]; @@ -206,12 +205,13 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = **m { if let NtTT(ref tt) = **nt { - valid &= check_lhs_nt_follows(sess, tt); - return (*tt).clone(); + let tt = quoted::parse(&[tt.clone()], true, sess).pop().unwrap(); + valid &= check_lhs_nt_follows(sess, &tt); + return tt; } } sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") - }).collect::<Vec<TokenTree>>() + }).collect::<Vec<quoted::TokenTree>>() } _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") }; @@ -221,11 +221,11 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = **m { if let NtTT(ref tt) = **nt { - return (*tt).clone(); + return quoted::parse(&[tt.clone()], false, sess).pop().unwrap(); } } sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") - }).collect() + }).collect::<Vec<quoted::TokenTree>>() } _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs") }; @@ -249,14 +249,14 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable")) } -fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool { +fn check_lhs_nt_follows(sess: &ParseSess, lhs: "ed::TokenTree) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. match lhs { - &TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts), + "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts), _ => { let msg = "invalid macro matcher; matchers must be contained in balanced delimiters"; - sess.span_diagnostic.span_err(lhs.get_span(), msg); + sess.span_diagnostic.span_err(lhs.span(), msg); false } } @@ -266,10 +266,11 @@ fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool { /// Check that the lhs contains no repetition which could match an empty token /// tree, because then the matcher would hang indefinitely. -fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool { +fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { + use self::quoted::TokenTree; for tt in tts { match *tt { - TokenTree::Token(_, _) => (), + TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => (), TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) { return false; }, @@ -278,7 +279,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool { if seq.tts.iter().all(|seq_tt| { match *seq_tt { TokenTree::Sequence(_, ref sub_seq) => - sub_seq.op == tokenstream::KleeneOp::ZeroOrMore, + sub_seq.op == quoted::KleeneOp::ZeroOrMore, _ => false, } }) { @@ -296,15 +297,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool { true } -fn check_rhs(sess: &ParseSess, rhs: &TokenTree) -> bool { +fn check_rhs(sess: &ParseSess, rhs: "ed::TokenTree) -> bool { match *rhs { - TokenTree::Delimited(..) => return true, - _ => sess.span_diagnostic.span_err(rhs.get_span(), "macro rhs must be delimited") + quoted::TokenTree::Delimited(..) => return true, + _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited") } false } -fn check_matcher(sess: &ParseSess, matcher: &[TokenTree]) -> bool { +fn check_matcher(sess: &ParseSess, matcher: &[quoted::TokenTree]) -> bool { let first_sets = FirstSets::new(matcher); let empty_suffix = TokenSet::empty(); let err = sess.span_diagnostic.err_count(); @@ -335,7 +336,9 @@ struct FirstSets { } impl FirstSets { - fn new(tts: &[TokenTree]) -> FirstSets { + fn new(tts: &[quoted::TokenTree]) -> FirstSets { + use self::quoted::TokenTree; + let mut sets = FirstSets { first: HashMap::new() }; build_recur(&mut sets, tts); return sets; @@ -347,13 +350,12 @@ impl FirstSets { let mut first = TokenSet::empty(); for tt in tts.iter().rev() { match *tt { - TokenTree::Token(sp, ref tok) => { - first.replace_with((sp, tok.clone())); + TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => { + first.replace_with(tt.clone()); } TokenTree::Delimited(span, ref delimited) => { build_recur(sets, &delimited.tts[..]); - first.replace_with((delimited.open_tt(span).span(), - Token::OpenDelim(delimited.delim))); + first.replace_with(delimited.open_tt(span)); } TokenTree::Sequence(sp, ref seq_rep) => { let subfirst = build_recur(sets, &seq_rep.tts[..]); @@ -378,11 +380,11 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe((sp, sep.clone())); + first.add_one_maybe(TokenTree::Token(sp, sep.clone())); } // Reverse scan: Sequence comes before `first`. - if subfirst.maybe_empty || seq_rep.op == tokenstream::KleeneOp::ZeroOrMore { + if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore { // If sequence is potentially empty, then // union them (preserving first emptiness). first.add_all(&TokenSet { maybe_empty: true, ..subfirst }); @@ -401,18 +403,19 @@ impl FirstSets { // walks forward over `tts` until all potential FIRST tokens are // identified. - fn first(&self, tts: &[TokenTree]) -> TokenSet { + fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet { + use self::quoted::TokenTree; + let mut first = TokenSet::empty(); for tt in tts.iter() { assert!(first.maybe_empty); match *tt { - TokenTree::Token(sp, ref tok) => { - first.add_one((sp, tok.clone())); + TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => { + first.add_one(tt.clone()); return first; } TokenTree::Delimited(span, ref delimited) => { - first.add_one((delimited.open_tt(span).span(), - Token::OpenDelim(delimited.delim))); + first.add_one(delimited.open_tt(span)); return first; } TokenTree::Sequence(sp, ref seq_rep) => { @@ -424,13 +427,13 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe((sp, sep.clone())); + first.add_one_maybe(TokenTree::Token(sp, sep.clone())); } assert!(first.maybe_empty); first.add_all(subfirst); if subfirst.maybe_empty || - seq_rep.op == tokenstream::KleeneOp::ZeroOrMore { + seq_rep.op == quoted::KleeneOp::ZeroOrMore { // continue scanning for more first // tokens, but also make sure we // restore empty-tracking state @@ -460,8 +463,8 @@ impl FirstSets { } } -// A set of Tokens, which may include MatchNt tokens (for -// macro-by-example syntactic variables). It also carries the +// A set of `quoted::TokenTree`s, which may include `TokenTree::Match`s +// (for macro-by-example syntactic variables). It also carries the // `maybe_empty` flag; that is true if and only if the matcher can // match an empty token sequence. // @@ -472,7 +475,7 @@ impl FirstSets { // (Notably, we must allow for *-op to occur zero times.) #[derive(Clone, Debug)] struct TokenSet { - tokens: Vec<(Span, Token)>, + tokens: Vec<quoted::TokenTree>, maybe_empty: bool, } @@ -482,13 +485,13 @@ impl TokenSet { // Returns the set `{ tok }` for the single-token (and thus // non-empty) sequence [tok]. - fn singleton(tok: (Span, Token)) -> Self { + fn singleton(tok: quoted::TokenTree) -> Self { TokenSet { tokens: vec![tok], maybe_empty: false } } // Changes self to be the set `{ tok }`. // Since `tok` is always present, marks self as non-empty. - fn replace_with(&mut self, tok: (Span, Token)) { + fn replace_with(&mut self, tok: quoted::TokenTree) { self.tokens.clear(); self.tokens.push(tok); self.maybe_empty = false; @@ -503,7 +506,7 @@ impl TokenSet { } // Adds `tok` to the set for `self`, marking sequence as non-empy. - fn add_one(&mut self, tok: (Span, Token)) { + fn add_one(&mut self, tok: quoted::TokenTree) { if !self.tokens.contains(&tok) { self.tokens.push(tok); } @@ -511,7 +514,7 @@ impl TokenSet { } // Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.) - fn add_one_maybe(&mut self, tok: (Span, Token)) { + fn add_one_maybe(&mut self, tok: quoted::TokenTree) { if !self.tokens.contains(&tok) { self.tokens.push(tok); } @@ -549,9 +552,9 @@ impl TokenSet { // see `FirstSets::new`. fn check_matcher_core(sess: &ParseSess, first_sets: &FirstSets, - matcher: &[TokenTree], + matcher: &[quoted::TokenTree], follow: &TokenSet) -> TokenSet { - use print::pprust::token_to_string; + use self::quoted::TokenTree; let mut last = TokenSet::empty(); @@ -576,11 +579,11 @@ fn check_matcher_core(sess: &ParseSess, // First, update `last` so that it corresponds to the set // of NT tokens that might end the sequence `... token`. match *token { - TokenTree::Token(sp, ref tok) => { + TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => { let can_be_followed_by_any; - if let Err(bad_frag) = has_legal_fragment_specifier(tok) { + if let Err(bad_frag) = has_legal_fragment_specifier(token) { let msg = format!("invalid fragment specifier `{}`", bad_frag); - sess.span_diagnostic.struct_span_err(sp, &msg) + sess.span_diagnostic.struct_span_err(token.span(), &msg) .help("valid fragment specifiers are `ident`, `block`, \ `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \ and `item`") @@ -589,7 +592,7 @@ fn check_matcher_core(sess: &ParseSess, // from error messages.) can_be_followed_by_any = true; } else { - can_be_followed_by_any = token_can_be_followed_by_any(tok); + can_be_followed_by_any = token_can_be_followed_by_any(token); } if can_be_followed_by_any { @@ -599,13 +602,12 @@ fn check_matcher_core(sess: &ParseSess, // followed by anything against SUFFIX. continue 'each_token; } else { - last.replace_with((sp, tok.clone())); + last.replace_with(token.clone()); suffix_first = build_suffix_first(); } } TokenTree::Delimited(span, ref d) => { - let my_suffix = TokenSet::singleton((d.close_tt(span).span(), - Token::CloseDelim(d.delim))); + let my_suffix = TokenSet::singleton(d.close_tt(span)); check_matcher_core(sess, first_sets, &d.tts, &my_suffix); // don't track non NT tokens last.replace_with_irrelevant(); @@ -629,7 +631,7 @@ fn check_matcher_core(sess: &ParseSess, let mut new; let my_suffix = if let Some(ref u) = seq_rep.separator { new = suffix_first.clone(); - new.add_one_maybe((sp, u.clone())); + new.add_one_maybe(TokenTree::Token(sp, u.clone())); &new } else { &suffix_first @@ -655,12 +657,13 @@ fn check_matcher_core(sess: &ParseSess, // Now `last` holds the complete set of NT tokens that could // end the sequence before SUFFIX. Check that every one works with `suffix`. - 'each_last: for &(_sp, ref t) in &last.tokens { - if let MatchNt(ref name, ref frag_spec) = *t { - for &(sp, ref next_token) in &suffix_first.tokens { + 'each_last: for token in &last.tokens { + if let TokenTree::MetaVarDecl(_, ref name, ref frag_spec) = *token { + for next_token in &suffix_first.tokens { match is_in_follow(next_token, &frag_spec.name.as_str()) { Err((msg, help)) => { - sess.span_diagnostic.struct_span_err(sp, &msg).help(help).emit(); + sess.span_diagnostic.struct_span_err(next_token.span(), &msg) + .help(help).emit(); // don't bother reporting every source of // conflict for a particular element of `last`. continue 'each_last; @@ -676,12 +679,12 @@ fn check_matcher_core(sess: &ParseSess, }; sess.span_diagnostic.span_err( - sp, + next_token.span(), &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \ is not allowed for `{frag}` fragments", name=name, frag=frag_spec, - next=token_to_string(next_token), + next=quoted_tt_to_string(next_token), may_be=may_be) ); } @@ -693,8 +696,8 @@ fn check_matcher_core(sess: &ParseSess, last } -fn token_can_be_followed_by_any(tok: &Token) -> bool { - if let &MatchNt(_, ref frag_spec) = tok { +fn token_can_be_followed_by_any(tok: "ed::TokenTree) -> bool { + if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok { frag_can_be_followed_by_any(&frag_spec.name.as_str()) } else { // (Non NT's can always be followed by anthing in matchers.) @@ -732,8 +735,10 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool { /// break macros that were relying on that binary operator as a /// separator. // when changing this do not forget to update doc/book/macros.md! -fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> { - if let &CloseDelim(_) = tok { +fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result<bool, (String, &'static str)> { + use self::quoted::TokenTree; + + if let TokenTree::Token(_, token::CloseDelim(_)) = *tok { // closing a token tree can never be matched by any fragment; // iow, we always require that `(` and `)` match, etc. Ok(true) @@ -749,27 +754,30 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> // maintain Ok(true) }, - "stmt" | "expr" => { - match *tok { + "stmt" | "expr" => match *tok { + TokenTree::Token(_, ref tok) => match *tok { FatArrow | Comma | Semi => Ok(true), _ => Ok(false) - } + }, + _ => Ok(false), }, - "pat" => { - match *tok { + "pat" => match *tok { + TokenTree::Token(_, ref tok) => match *tok { FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), Ident(i) if i.name == "if" || i.name == "in" => Ok(true), _ => Ok(false) - } + }, + _ => Ok(false), }, - "path" | "ty" => { - match *tok { + "path" | "ty" => match *tok { + TokenTree::Token(_, ref tok) => match *tok { OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true), - MatchNt(_, ref frag) if frag.name == "block" => Ok(true), Ident(i) if i.name == "as" || i.name == "where" => Ok(true), _ => Ok(false) - } + }, + TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true), + _ => Ok(false), }, "ident" => { // being a single token, idents are harmless @@ -780,6 +788,7 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> // harmless Ok(true) }, + "" => Ok(true), // keywords::Invalid _ => Err((format!("invalid fragment specifier `{}`", frag), "valid fragment specifiers are `ident`, `block`, \ `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \ @@ -788,9 +797,9 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> } } -fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> { +fn has_legal_fragment_specifier(tok: "ed::TokenTree) -> Result<(), String> { debug!("has_legal_fragment_specifier({:?})", tok); - if let &MatchNt(_, ref frag_spec) = tok { + if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok { let s = &frag_spec.name.as_str(); if !is_legal_fragment_specifier(s) { return Err(s.to_string()); @@ -802,7 +811,15 @@ fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> { fn is_legal_fragment_specifier(frag: &str) -> bool { match frag { "item" | "block" | "stmt" | "expr" | "pat" | - "path" | "ty" | "ident" | "meta" | "tt" => true, + "path" | "ty" | "ident" | "meta" | "tt" | "" => true, _ => false, } } + +fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { + match *tt { + quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok), + quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), + _ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"), + } +} diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs new file mode 100644 index 0000000000000..530824b28348a --- /dev/null +++ b/src/libsyntax/ext/tt/quoted.rs @@ -0,0 +1,234 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use ast; +use ext::tt::macro_parser; +use parse::{ParseSess, token}; +use print::pprust; +use symbol::{keywords, Symbol}; +use syntax_pos::{DUMMY_SP, Span, BytePos}; +use tokenstream; + +use std::rc::Rc; + +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub struct Delimited { + pub delim: token::DelimToken, + pub tts: Vec<TokenTree>, +} + +impl Delimited { + pub fn open_token(&self) -> token::Token { + token::OpenDelim(self.delim) + } + + pub fn close_token(&self) -> token::Token { + token::CloseDelim(self.delim) + } + + pub fn open_tt(&self, span: Span) -> TokenTree { + let open_span = match span { + DUMMY_SP => DUMMY_SP, + _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }, + }; + TokenTree::Token(open_span, self.open_token()) + } + + pub fn close_tt(&self, span: Span) -> TokenTree { + let close_span = match span { + DUMMY_SP => DUMMY_SP, + _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }, + }; + TokenTree::Token(close_span, self.close_token()) + } +} + +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub struct SequenceRepetition { + /// The sequence of token trees + pub tts: Vec<TokenTree>, + /// The optional separator + pub separator: Option<token::Token>, + /// Whether the sequence can be repeated zero (*), or one or more times (+) + pub op: KleeneOp, + /// The number of `Match`s that appear in the sequence (and subsequences) + pub num_captures: usize, +} + +/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) +/// for token sequences. +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] +pub enum KleeneOp { + ZeroOrMore, + OneOrMore, +} + +/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)` +/// are "first-class" token trees. +#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] +pub enum TokenTree { + Token(Span, token::Token), + Delimited(Span, Rc<Delimited>), + /// A kleene-style repetition sequence with a span + Sequence(Span, Rc<SequenceRepetition>), + /// Matches a nonterminal. This is only used in the left hand side of MBE macros. + MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */), +} + +impl TokenTree { + pub fn len(&self) -> usize { + match *self { + TokenTree::Delimited(_, ref delimed) => match delimed.delim { + token::NoDelim => delimed.tts.len(), + _ => delimed.tts.len() + 2, + }, + TokenTree::Sequence(_, ref seq) => seq.tts.len(), + _ => 0, + } + } + + pub fn get_tt(&self, index: usize) -> TokenTree { + match (self, index) { + (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => { + delimed.tts[index].clone() + } + (&TokenTree::Delimited(span, ref delimed), _) => { + if index == 0 { + return delimed.open_tt(span); + } + if index == delimed.tts.len() + 1 { + return delimed.close_tt(span); + } + delimed.tts[index - 1].clone() + } + (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(), + _ => panic!("Cannot expand a token tree"), + } + } + + /// Retrieve the TokenTree's span. + pub fn span(&self) -> Span { + match *self { + TokenTree::Token(sp, _) | + TokenTree::MetaVarDecl(sp, _, _) | + TokenTree::Delimited(sp, _) | + TokenTree::Sequence(sp, _) => sp, + } + } +} + +pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &ParseSess) + -> Vec<TokenTree> { + let mut result = Vec::new(); + let mut trees = input.iter().cloned(); + while let Some(tree) = trees.next() { + let tree = parse_tree(tree, &mut trees, expect_matchers, sess); + match tree { + TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => { + let span = match trees.next() { + Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { + Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => { + let span = Span { lo: start_sp.lo, ..end_sp }; + result.push(TokenTree::MetaVarDecl(span, ident, kind)); + continue + } + tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + }, + tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), + }; + sess.missing_fragment_specifiers.borrow_mut().insert(span); + result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident())); + } + _ => result.push(tree), + } + } + result +} + +fn parse_tree<I>(tree: tokenstream::TokenTree, + trees: &mut I, + expect_matchers: bool, + sess: &ParseSess) + -> TokenTree + where I: Iterator<Item = tokenstream::TokenTree>, +{ + match tree { + tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { + Some(tokenstream::TokenTree::Delimited(span, ref delimited)) => { + if delimited.delim != token::Paren { + let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim)); + let msg = format!("expected `(`, found `{}`", tok); + sess.span_diagnostic.span_err(span, &msg); + } + let sequence = parse(&delimited.tts, expect_matchers, sess); + let (separator, op) = parse_sep_and_kleene_op(trees, span, sess); + let name_captures = macro_parser::count_names(&sequence); + TokenTree::Sequence(span, Rc::new(SequenceRepetition { + tts: sequence, + separator: separator, + op: op, + num_captures: name_captures, + })) + } + Some(tokenstream::TokenTree::Token(ident_span, token::Ident(ident))) => { + let span = Span { lo: span.lo, ..ident_span }; + if ident.name == keywords::Crate.name() { + let ident = ast::Ident { name: Symbol::intern("$crate"), ..ident }; + TokenTree::Token(span, token::Ident(ident)) + } else { + TokenTree::Token(span, token::SubstNt(ident)) + } + } + Some(tokenstream::TokenTree::Token(span, tok)) => { + let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok)); + sess.span_diagnostic.span_err(span, &msg); + TokenTree::Token(span, token::SubstNt(keywords::Invalid.ident())) + } + None => TokenTree::Token(span, token::Dollar), + }, + tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), + tokenstream::TokenTree::Delimited(span, delimited) => { + TokenTree::Delimited(span, Rc::new(Delimited { + delim: delimited.delim, + tts: parse(&delimited.tts, expect_matchers, sess), + })) + } + } +} + +fn parse_sep_and_kleene_op<I>(input: &mut I, span: Span, sess: &ParseSess) + -> (Option<token::Token>, KleeneOp) + where I: Iterator<Item = tokenstream::TokenTree>, +{ + fn kleene_op(token: &token::Token) -> Option<KleeneOp> { + match *token { + token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore), + token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore), + _ => None, + } + } + + let span = match input.next() { + Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) { + Some(op) => return (None, op), + None => match input.next() { + Some(tokenstream::TokenTree::Token(span, tok2)) => match kleene_op(&tok2) { + Some(op) => return (Some(tok), op), + None => span, + }, + tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + } + }, + tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + }; + + sess.span_diagnostic.span_err(span, "expected `*` or `+`"); + (None, KleeneOp::ZeroOrMore) +} diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 38becbe7b1d30..90f64a5208f75 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -7,268 +7,249 @@ // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. -use self::LockstepIterSize::*; use ast::Ident; use errors::Handler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; -use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT}; +use ext::tt::quoted; +use parse::token::{self, SubstNt, Token, NtIdent, NtTT}; use syntax_pos::{Span, DUMMY_SP}; -use tokenstream::{self, TokenTree}; +use tokenstream::{TokenTree, Delimited}; use util::small_vector::SmallVector; use std::rc::Rc; +use std::mem; use std::ops::Add; use std::collections::HashMap; -///an unzipping of `TokenTree`s -#[derive(Clone)] -struct TtFrame { - forest: TokenTree, - idx: usize, - dotdotdoted: bool, - sep: Option<Token>, +// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`). +enum Frame { + Delimited { + forest: Rc<quoted::Delimited>, + idx: usize, + span: Span, + }, + Sequence { + forest: Rc<quoted::SequenceRepetition>, + idx: usize, + sep: Option<Token>, + }, } -#[derive(Clone)] -struct TtReader<'a> { - sp_diag: &'a Handler, - /// the unzipped tree: - stack: SmallVector<TtFrame>, - /* for MBE-style macro transcription */ - interpolations: HashMap<Ident, Rc<NamedMatch>>, +impl Frame { + fn new(tts: Vec<quoted::TokenTree>) -> Frame { + let forest = Rc::new(quoted::Delimited { delim: token::NoDelim, tts: tts }); + Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP } + } +} + +impl Iterator for Frame { + type Item = quoted::TokenTree; - repeat_idx: Vec<usize>, - repeat_len: Vec<usize>, + fn next(&mut self) -> Option<quoted::TokenTree> { + match *self { + Frame::Delimited { ref forest, ref mut idx, .. } => { + *idx += 1; + forest.tts.get(*idx - 1).cloned() + } + Frame::Sequence { ref forest, ref mut idx, .. } => { + *idx += 1; + forest.tts.get(*idx - 1).cloned() + } + } + } } /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can +/// `src` contains no `TokenTree::{Sequence, Match}`s, or `SubstNt`s, `interp` can /// (and should) be None. pub fn transcribe(sp_diag: &Handler, interp: Option<HashMap<Ident, Rc<NamedMatch>>>, - src: Vec<tokenstream::TokenTree>) + src: Vec<quoted::TokenTree>) -> Vec<TokenTree> { - let mut r = TtReader { - sp_diag: sp_diag, - stack: SmallVector::one(TtFrame { - forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { - tts: src, - // doesn't matter. This merely holds the root unzipping. - separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0 - })), - idx: 0, - dotdotdoted: false, - sep: None, - }), - interpolations: match interp { /* just a convenience */ - None => HashMap::new(), - Some(x) => x, - }, - repeat_idx: Vec::new(), - repeat_len: Vec::new(), - }; + let mut stack = SmallVector::one(Frame::new(src)); + let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */ + let mut repeats = Vec::new(); + let mut result = Vec::new(); + let mut result_stack = Vec::new(); - let mut tts = Vec::new(); - let mut prev_span = DUMMY_SP; - while let Some(tt) = tt_next_token(&mut r, prev_span) { - prev_span = tt.span(); - tts.push(tt); - } - tts -} - -fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> { - r.repeat_idx.iter().fold(start, |ad, idx| { - match *ad { - MatchedNonterminal(_) => { - // end of the line; duplicate henceforth - ad.clone() + loop { + let tree = if let Some(tree) = stack.last_mut().unwrap().next() { + tree + } else { + if let Frame::Sequence { ref mut idx, ref sep, .. } = *stack.last_mut().unwrap() { + let (ref mut repeat_idx, repeat_len) = *repeats.last_mut().unwrap(); + *repeat_idx += 1; + if *repeat_idx < repeat_len { + *idx = 0; + if let Some(sep) = sep.clone() { + // repeat same span, I guess + let prev_span = result.last().map(TokenTree::span).unwrap_or(DUMMY_SP); + result.push(TokenTree::Token(prev_span, sep)); + } + continue + } } - MatchedSeq(ref ads, _) => ads[*idx].clone() - } - }) -} - -fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> { - let matched_opt = r.interpolations.get(&name).cloned(); - matched_opt.map(|s| lookup_cur_matched_by_matched(r, s)) -} - -#[derive(Clone)] -enum LockstepIterSize { - LisUnconstrained, - LisConstraint(usize, Ident), - LisContradiction(String), -} - -impl Add for LockstepIterSize { - type Output = LockstepIterSize; - fn add(self, other: LockstepIterSize) -> LockstepIterSize { - match self { - LisUnconstrained => other, - LisContradiction(_) => self, - LisConstraint(l_len, ref l_id) => match other { - LisUnconstrained => self.clone(), - LisContradiction(_) => other, - LisConstraint(r_len, _) if l_len == r_len => self.clone(), - LisConstraint(r_len, r_id) => { - LisContradiction(format!("inconsistent lockstep iteration: \ - '{}' has {} items, but '{}' has {}", - l_id, l_len, r_id, r_len)) + match stack.pop().unwrap() { + Frame::Sequence { .. } => { + repeats.pop(); + } + Frame::Delimited { forest, span, .. } => { + if result_stack.is_empty() { + return result; + } + let tree = TokenTree::Delimited(span, Rc::new(Delimited { + delim: forest.delim, + tts: result, + })); + result = result_stack.pop().unwrap(); + result.push(tree); } - }, - } - } -} - -fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { - match *t { - TokenTree::Delimited(_, ref delimed) => { - delimed.tts.iter().fold(LisUnconstrained, |size, tt| { - size + lockstep_iter_size(tt, r) - }) - }, - TokenTree::Sequence(_, ref seq) => { - seq.tts.iter().fold(LisUnconstrained, |size, tt| { - size + lockstep_iter_size(tt, r) - }) - }, - TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) => - match lookup_cur_matched(r, name) { - Some(matched) => match *matched { - MatchedNonterminal(_) => LisUnconstrained, - MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name), - }, - _ => LisUnconstrained - }, - TokenTree::Token(..) => LisUnconstrained, - } -} - -/// Return the next token from the TtReader. -/// EFFECT: advances the reader's token field -fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> { - loop { - let should_pop = if let Some(frame) = r.stack.last() { - if frame.idx < frame.forest.len() { - break; } - !frame.dotdotdoted || *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1 - } else { - return None; + continue }; - /* done with this set; pop or repeat? */ - if should_pop { - let prev = r.stack.pop().unwrap(); - if let Some(frame) = r.stack.last_mut() { - frame.idx += 1; - } else { - return None; - } - if prev.dotdotdoted { - r.repeat_idx.pop(); - r.repeat_len.pop(); - } - } else { /* repeat */ - *r.repeat_idx.last_mut().unwrap() += 1; - r.stack.last_mut().unwrap().idx = 0; - if let Some(tk) = r.stack.last().unwrap().sep.clone() { - return Some(TokenTree::Token(prev_span, tk)); // repeat same span, I guess - } - } - } - loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting - with a `TokenTree::Token`, even though it won't happen */ - let t = { - let frame = r.stack.last().unwrap(); - // FIXME(pcwalton): Bad copy. - frame.forest.get_tt(frame.idx) - }; - match t { - TokenTree::Sequence(sp, seq) => { + match tree { + quoted::TokenTree::Sequence(sp, seq) => { // FIXME(pcwalton): Bad copy. - match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()), - r) { - LisUnconstrained => { - panic!(r.sp_diag.span_fatal( + match lockstep_iter_size("ed::TokenTree::Sequence(sp, seq.clone()), + &interpolations, + &repeats) { + LockstepIterSize::Unconstrained => { + panic!(sp_diag.span_fatal( sp.clone(), /* blame macro writer */ "attempted to repeat an expression \ containing no syntax \ variables matched as repeating at this depth")); } - LisContradiction(ref msg) => { + LockstepIterSize::Contradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..])); + panic!(sp_diag.span_fatal(sp.clone(), &msg[..])); } - LisConstraint(len, _) => { + LockstepIterSize::Constraint(len, _) => { if len == 0 { - if seq.op == tokenstream::KleeneOp::OneOrMore { + if seq.op == quoted::KleeneOp::OneOrMore { // FIXME #2887 blame invoker - panic!(r.sp_diag.span_fatal(sp.clone(), - "this must repeat at least once")); + panic!(sp_diag.span_fatal(sp.clone(), + "this must repeat at least once")); } - - r.stack.last_mut().unwrap().idx += 1; - return tt_next_token(r, prev_span); + } else { + repeats.push((0, len)); + stack.push(Frame::Sequence { + idx: 0, + sep: seq.separator.clone(), + forest: seq, + }); } - r.repeat_len.push(len); - r.repeat_idx.push(0); - r.stack.push(TtFrame { - idx: 0, - dotdotdoted: true, - sep: seq.separator.clone(), - forest: TokenTree::Sequence(sp, seq), - }); } } } // FIXME #2887: think about span stuff here - TokenTree::Token(sp, SubstNt(ident)) => { - r.stack.last_mut().unwrap().idx += 1; - match lookup_cur_matched(r, ident) { - None => { - return Some(TokenTree::Token(sp, SubstNt(ident))); - // this can't be 0 length, just like TokenTree::Delimited - } + quoted::TokenTree::Token(sp, SubstNt(ident)) => { + match lookup_cur_matched(ident, &interpolations, &repeats) { + None => result.push(TokenTree::Token(sp, SubstNt(ident))), Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched { match **nt { // sidestep the interpolation tricks for ident because // (a) idents can be in lots of places, so it'd be a pain // (b) we actually can, since it's a token. NtIdent(ref sn) => { - return Some(TokenTree::Token(sn.span, token::Ident(sn.node))); + result.push(TokenTree::Token(sn.span, token::Ident(sn.node))); } - NtTT(ref tt) => return Some(tt.clone()), + NtTT(ref tt) => result.push(tt.clone()), _ => { // FIXME(pcwalton): Bad copy - return Some(TokenTree::Token(sp, token::Interpolated(nt.clone()))); + result.push(TokenTree::Token(sp, token::Interpolated(nt.clone()))); } } } else { - panic!(r.sp_diag.span_fatal( + panic!(sp_diag.span_fatal( sp, /* blame the macro writer */ &format!("variable '{}' is still repeating at this depth", ident))); } } } - // TokenTree::Delimited or any token that can be unzipped - seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => { - // do not advance the idx yet - r.stack.push(TtFrame { - forest: seq, - idx: 0, - dotdotdoted: false, - sep: None - }); - // if this could be 0-length, we'd need to potentially recur here + quoted::TokenTree::Delimited(span, delimited) => { + stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span }); + result_stack.push(mem::replace(&mut result, Vec::new())); } - tt @ TokenTree::Token(..) => { - r.stack.last_mut().unwrap().idx += 1; - return Some(tt); + quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)), + quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"), + } + } +} + +fn lookup_cur_matched(ident: Ident, + interpolations: &HashMap<Ident, Rc<NamedMatch>>, + repeats: &[(usize, usize)]) + -> Option<Rc<NamedMatch>> { + interpolations.get(&ident).map(|matched| { + repeats.iter().fold(matched.clone(), |ad, &(idx, _)| { + match *ad { + MatchedNonterminal(_) => { + // end of the line; duplicate henceforth + ad.clone() + } + MatchedSeq(ref ads, _) => ads[idx].clone() } + }) + }) +} + +#[derive(Clone)] +enum LockstepIterSize { + Unconstrained, + Constraint(usize, Ident), + Contradiction(String), +} + +impl Add for LockstepIterSize { + type Output = LockstepIterSize; + + fn add(self, other: LockstepIterSize) -> LockstepIterSize { + match self { + LockstepIterSize::Unconstrained => other, + LockstepIterSize::Contradiction(_) => self, + LockstepIterSize::Constraint(l_len, ref l_id) => match other { + LockstepIterSize::Unconstrained => self.clone(), + LockstepIterSize::Contradiction(_) => other, + LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self.clone(), + LockstepIterSize::Constraint(r_len, r_id) => { + let msg = format!("inconsistent lockstep iteration: \ + '{}' has {} items, but '{}' has {}", + l_id, l_len, r_id, r_len); + LockstepIterSize::Contradiction(msg) + } + }, } } } + +fn lockstep_iter_size(tree: "ed::TokenTree, + interpolations: &HashMap<Ident, Rc<NamedMatch>>, + repeats: &[(usize, usize)]) + -> LockstepIterSize { + use self::quoted::TokenTree; + match *tree { + TokenTree::Delimited(_, ref delimed) => { + delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| { + size + lockstep_iter_size(tt, interpolations, repeats) + }) + }, + TokenTree::Sequence(_, ref seq) => { + seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| { + size + lockstep_iter_size(tt, interpolations, repeats) + }) + }, + TokenTree::Token(_, SubstNt(name)) | TokenTree::MetaVarDecl(_, name, _) => + match lookup_cur_matched(name, interpolations, repeats) { + Some(matched) => match *matched { + MatchedNonterminal(_) => LockstepIterSize::Unconstrained, + MatchedSeq(ref ads, _) => LockstepIterSize::Constraint(ads.len(), name), + }, + _ => LockstepIterSize::Unconstrained + }, + TokenTree::Token(..) => LockstepIterSize::Unconstrained, + } +} diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 1ee070cb92d9f..257b7efba5c8e 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -551,13 +551,6 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree { } )) }, - TokenTree::Sequence(span, ref seq) => - TokenTree::Sequence(fld.new_span(span), - Rc::new(SequenceRepetition { - tts: fld.fold_tts(&seq.tts), - separator: seq.separator.clone().map(|tok| fld.fold_token(tok)), - ..**seq - })), } } @@ -578,7 +571,6 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token token::Interpolated(Rc::new(fld.fold_interpolated(nt))) } token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)), - token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)), _ => t } } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 87a03adf6b77c..39a9aff48bf27 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -139,6 +139,7 @@ pub mod ext { pub mod transcribe; pub mod macro_parser; pub mod macro_rules; + pub mod quoted; } } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index b7f6e6a2384f7..de8a87e3a2b32 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1693,6 +1693,7 @@ mod tests { use feature_gate::UnstableFeatures; use parse::token; use std::cell::RefCell; + use std::collections::HashSet; use std::io; use std::rc::Rc; @@ -1704,6 +1705,7 @@ mod tests { config: CrateConfig::new(), included_mod_stack: RefCell::new(Vec::new()), code_map: cm, + missing_fragment_specifiers: RefCell::new(HashSet::new()), } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 20e80afc115f5..6fec49b229abe 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -46,6 +46,7 @@ pub struct ParseSess { pub span_diagnostic: Handler, pub unstable_features: UnstableFeatures, pub config: CrateConfig, + pub missing_fragment_specifiers: RefCell<HashSet<Span>>, /// Used to determine and report recursive mod inclusions included_mod_stack: RefCell<Vec<PathBuf>>, code_map: Rc<CodeMap>, @@ -66,6 +67,7 @@ impl ParseSess { span_diagnostic: handler, unstable_features: UnstableFeatures::from_environment(), config: HashSet::new(), + missing_fragment_specifiers: RefCell::new(HashSet::new()), included_mod_stack: RefCell::new(vec![]), code_map: code_map } @@ -139,13 +141,9 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa new_parser_from_source_str(sess, name, source).parse_stmt() } -// Warning: This parses with quote_depth > 0, which is not the default. pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) - -> PResult<'a, Vec<tokenstream::TokenTree>> { - let mut p = new_parser_from_source_str(sess, name, source); - p.quote_depth += 1; - // right now this is re-creating the token trees from ... token trees. - p.parse_all_token_trees() + -> Vec<tokenstream::TokenTree> { + filemap_to_tts(sess, sess.codemap().new_filemap(name, None, source)) } // Create a new parser from a source string @@ -986,7 +984,7 @@ mod tests { _ => panic!("not a macro"), }; - let span = tts.iter().rev().next().unwrap().get_span(); + let span = tts.iter().rev().next().unwrap().span(); match sess.codemap().span_to_snippet(span) { Ok(s) => assert_eq!(&s[..], "{ body }"), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index b5b8a6bc0ef64..71274c4fdaa4e 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -43,19 +43,16 @@ use {ast, attr}; use codemap::{self, CodeMap, Spanned, spanned, respan}; use syntax_pos::{self, Span, Pos, BytePos, mk_sp}; use errors::{self, DiagnosticBuilder}; -use ext::tt::macro_parser; -use parse; -use parse::classify; +use parse::{self, classify, token}; use parse::common::SeqSep; use parse::lexer::TokenAndSpan; use parse::obsolete::ObsoleteSyntax; -use parse::token::{self, MatchNt, SubstNt}; use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use util::parser::{AssocOp, Fixity}; use print::pprust; use ptr::P; use parse::PResult; -use tokenstream::{self, Delimited, SequenceRepetition, TokenTree}; +use tokenstream::{Delimited, TokenTree}; use symbol::{Symbol, keywords}; use util::ThinVec; @@ -168,8 +165,6 @@ pub struct Parser<'a> { /// the previous token kind prev_token_kind: PrevTokenKind, pub restrictions: Restrictions, - pub quote_depth: usize, // not (yet) related to the quasiquoter - parsing_token_tree: bool, /// The set of seen errors about obsolete syntax. Used to suppress /// extra detail when the same error is seen twice pub obsolete_set: HashSet<ObsoleteSyntax>, @@ -329,8 +324,6 @@ impl<'a> Parser<'a> { prev_span: syntax_pos::DUMMY_SP, prev_token_kind: PrevTokenKind::Other, restrictions: Restrictions::empty(), - quote_depth: 0, - parsing_token_tree: false, obsolete_set: HashSet::new(), directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned }, root_module_name: None, @@ -359,20 +352,11 @@ impl<'a> Parser<'a> { if i + 1 < tts.len() { self.tts.push((tts, i + 1)); } - // FIXME(jseyfried): remove after fixing #39390 in #39419. - if self.quote_depth > 0 { - if let TokenTree::Sequence(sp, _) = tt { - self.span_err(sp, "attempted to repeat an expression containing no \ - syntax variables matched as repeating at this depth"); - } - } - match tt { - TokenTree::Token(sp, tok) => TokenAndSpan { tok: tok, sp: sp }, - _ if tt.len() > 0 => { - self.tts.push((tt, 0)); - continue - } - _ => continue, + if let TokenTree::Token(sp, tok) = tt { + TokenAndSpan { tok: tok, sp: sp } + } else { + self.tts.push((tt, 0)); + continue } } else { TokenAndSpan { tok: token::Eof, sp: self.span } @@ -997,7 +981,6 @@ impl<'a> Parser<'a> { tok = match tts.get_tt(i) { TokenTree::Token(_, tok) => tok, TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim), - TokenTree::Sequence(..) => token::Dollar, }; } } @@ -1187,10 +1170,7 @@ impl<'a> Parser<'a> { self.expect(&token::Not)?; // eat a matched-delimiter token tree: - let delim = self.expect_open_delim()?; - let tts = self.parse_seq_to_end(&token::CloseDelim(delim), - SeqSep::none(), - |pp| pp.parse_token_tree())?; + let (delim, tts) = self.expect_delimited_token_tree()?; if delim != token::Brace { self.expect(&token::Semi)? } @@ -1448,10 +1428,7 @@ impl<'a> Parser<'a> { let path = self.parse_path(PathStyle::Type)?; if self.eat(&token::Not) { // MACRO INVOCATION - let delim = self.expect_open_delim()?; - let tts = self.parse_seq_to_end(&token::CloseDelim(delim), - SeqSep::none(), - |p| p.parse_token_tree())?; + let (_, tts) = self.expect_delimited_token_tree()?; let hi = self.span.hi; TyKind::Mac(spanned(lo, hi, Mac_ { path: path, tts: tts })) } else { @@ -2045,13 +2022,12 @@ impl<'a> Parser<'a> { }) } - fn expect_open_delim(&mut self) -> PResult<'a, token::DelimToken> { - self.expected_tokens.push(TokenType::Token(token::Gt)); + fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, Vec<TokenTree>)> { match self.token { - token::OpenDelim(delim) => { - self.bump(); - Ok(delim) - }, + token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree { + TokenTree::Delimited(_, delimited) => (delim, delimited.tts.clone()), + _ => unreachable!(), + }), _ => Err(self.fatal("expected open delimiter")), } } @@ -2261,10 +2237,7 @@ impl<'a> Parser<'a> { // `!`, as an operator, is prefix, so we know this isn't that if self.eat(&token::Not) { // MACRO INVOCATION expression - let delim = self.expect_open_delim()?; - let tts = self.parse_seq_to_end(&token::CloseDelim(delim), - SeqSep::none(), - |p| p.parse_token_tree())?; + let (_, tts) = self.expect_delimited_token_tree()?; let hi = self.prev_span.hi; return Ok(self.mk_mac_expr(lo, hi, Mac_ { path: pth, tts: tts }, attrs)); } @@ -2586,139 +2559,22 @@ impl<'a> Parser<'a> { return Ok(e); } - // Parse unquoted tokens after a `$` in a token tree - fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> { - let mut sp = self.span; - let name = match self.token { - token::Dollar => { - self.bump(); - - if self.token == token::OpenDelim(token::Paren) { - let Spanned { node: seq, span: seq_span } = self.parse_seq( - &token::OpenDelim(token::Paren), - &token::CloseDelim(token::Paren), - SeqSep::none(), - |p| p.parse_token_tree() - )?; - let (sep, repeat) = self.parse_sep_and_kleene_op()?; - let name_num = macro_parser::count_names(&seq); - return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), - Rc::new(SequenceRepetition { - tts: seq, - separator: sep, - op: repeat, - num_captures: name_num - }))); - } else if self.token.is_keyword(keywords::Crate) { - let ident = match self.token { - token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id }, - _ => unreachable!(), - }; - self.bump(); - return Ok(TokenTree::Token(sp, token::Ident(ident))); - } else { - sp = mk_sp(sp.lo, self.span.hi); - self.parse_ident().unwrap_or_else(|mut e| { - e.emit(); - keywords::Invalid.ident() - }) - } - } - token::SubstNt(name) => { - self.bump(); - name - } - _ => unreachable!() - }; - // continue by trying to parse the `:ident` after `$name` - if self.token == token::Colon && - self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword()) { - self.bump(); - sp = mk_sp(sp.lo, self.span.hi); - let nt_kind = self.parse_ident()?; - Ok(TokenTree::Token(sp, MatchNt(name, nt_kind))) - } else { - Ok(TokenTree::Token(sp, SubstNt(name))) - } - } - pub fn check_unknown_macro_variable(&mut self) { - if self.quote_depth == 0 && !self.parsing_token_tree { - match self.token { - token::SubstNt(name) => - self.fatal(&format!("unknown macro variable `{}`", name)).emit(), - _ => {} - } - } - } - - /// Parse an optional separator followed by a Kleene-style - /// repetition token (+ or *). - pub fn parse_sep_and_kleene_op(&mut self) - -> PResult<'a, (Option<token::Token>, tokenstream::KleeneOp)> { - fn parse_kleene_op<'a>(parser: &mut Parser<'a>) -> - PResult<'a, Option<tokenstream::KleeneOp>> { - match parser.token { - token::BinOp(token::Star) => { - parser.bump(); - Ok(Some(tokenstream::KleeneOp::ZeroOrMore)) - }, - token::BinOp(token::Plus) => { - parser.bump(); - Ok(Some(tokenstream::KleeneOp::OneOrMore)) - }, - _ => Ok(None) - } - }; - - if let Some(kleene_op) = parse_kleene_op(self)? { - return Ok((None, kleene_op)); - } - - let separator = match self.token { - token::CloseDelim(..) => None, - _ => Some(self.bump_and_get()), - }; - match parse_kleene_op(self)? { - Some(zerok) => Ok((separator, zerok)), - None => return Err(self.fatal("expected `*` or `+`")) + if let token::SubstNt(name) = self.token { + self.fatal(&format!("unknown macro variable `{}`", name)).emit() } } /// parse a single token tree from the input. pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> { - // FIXME #6994: currently, this is too eager. It - // parses token trees but also identifies TokenType::Sequence's - // and token::SubstNt's; it's too early to know yet - // whether something will be a nonterminal or a seq - // yet. match self.token { - token::OpenDelim(delim) => { - if self.quote_depth == 0 && self.tts.last().map(|&(_, i)| i == 1).unwrap_or(false) { - let tt = self.tts.pop().unwrap().0; - self.bump(); - return Ok(tt); - } - - let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true); - let lo = self.span.lo; - self.bump(); - let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace), - &token::CloseDelim(token::Paren), - &token::CloseDelim(token::Bracket)], - SeqSep::none(), - |p| p.parse_token_tree(), - |mut e| e.emit()); - self.parsing_token_tree = parsing_token_tree; + token::OpenDelim(..) => { + let tt = self.tts.pop().unwrap().0; + self.span = tt.span(); self.bump(); - - Ok(TokenTree::Delimited(Span { lo: lo, ..self.prev_span }, Rc::new(Delimited { - delim: delim, - tts: tts, - }))) + return Ok(tt); }, - token::CloseDelim(..) | token::Eof => Ok(TokenTree::Token(self.span, token::Eof)), - token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => self.parse_unquoted(), + token::CloseDelim(_) | token::Eof => unreachable!(), _ => Ok(TokenTree::Token(self.span, self.bump_and_get())), } } @@ -3528,10 +3384,7 @@ impl<'a> Parser<'a> { token::Not if qself.is_none() => { // Parse macro invocation self.bump(); - let delim = self.expect_open_delim()?; - let tts = self.parse_seq_to_end(&token::CloseDelim(delim), - SeqSep::none(), - |p| p.parse_token_tree())?; + let (_, tts) = self.expect_delimited_token_tree()?; let mac = spanned(lo, self.prev_span.hi, Mac_ { path: path, tts: tts }); pat = PatKind::Mac(mac); } @@ -3831,12 +3684,7 @@ impl<'a> Parser<'a> { }, }; - let tts = self.parse_unspanned_seq( - &token::OpenDelim(delim), - &token::CloseDelim(delim), - SeqSep::none(), - |p| p.parse_token_tree() - )?; + let (_, tts) = self.expect_delimited_token_tree()?; let hi = self.prev_span.hi; let style = if delim == token::Brace { @@ -4744,10 +4592,7 @@ impl<'a> Parser<'a> { self.expect(&token::Not)?; // eat a matched-delimiter token tree: - let delim = self.expect_open_delim()?; - let tts = self.parse_seq_to_end(&token::CloseDelim(delim), - SeqSep::none(), - |p| p.parse_token_tree())?; + let (delim, tts) = self.expect_delimited_token_tree()?; if delim != token::Brace { self.expect(&token::Semi)? } @@ -5893,10 +5738,7 @@ impl<'a> Parser<'a> { keywords::Invalid.ident() // no special identifier }; // eat a matched-delimiter token tree: - let delim = self.expect_open_delim()?; - let tts = self.parse_seq_to_end(&token::CloseDelim(delim), - SeqSep::none(), - |p| p.parse_token_tree())?; + let (delim, tts) = self.expect_delimited_token_tree()?; if delim != token::Brace { if !self.eat(&token::Semi) { let prev_span = self.prev_span; diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 0f0c6d0ca83f5..5b65aac92b81c 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -50,8 +50,8 @@ pub enum DelimToken { } impl DelimToken { - pub fn len(&self) -> u32 { - if *self == NoDelim { 0 } else { 1 } + pub fn len(self) -> usize { + if self == NoDelim { 0 } else { 1 } } } @@ -152,9 +152,6 @@ pub enum Token { // Can be expanded into several tokens. /// Doc comment DocComment(ast::Name), - // In left-hand-sides of MBE macros: - /// Parse a nonterminal (name to bind, name of NT) - MatchNt(ast::Ident, ast::Ident), // In right-hand-sides of MBE macros: /// A syntactic variable that will be filled in by macro expansion. SubstNt(ast::Ident), diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index f8f1820d0b97e..ec962d03458d1 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -271,7 +271,6 @@ pub fn token_to_string(tok: &Token) -> String { /* Other */ token::DocComment(s) => s.to_string(), token::SubstNt(s) => format!("${}", s), - token::MatchNt(s, t) => format!("${}:{}", s, t), token::Eof => "<eof>".to_string(), token::Whitespace => " ".to_string(), token::Comment => "/* */".to_string(), @@ -1475,20 +1474,6 @@ impl<'a> State<'a> { space(&mut self.s)?; word(&mut self.s, &token_to_string(&delimed.close_token())) }, - TokenTree::Sequence(_, ref seq) => { - word(&mut self.s, "$(")?; - for tt_elt in &seq.tts { - self.print_tt(tt_elt)?; - } - word(&mut self.s, ")")?; - if let Some(ref tk) = seq.separator { - word(&mut self.s, &token_to_string(tk))?; - } - match seq.op { - tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"), - tokenstream::KleeneOp::OneOrMore => word(&mut self.s, "+"), - } - } } } diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 86b0fcebeb21e..6665404672133 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -12,9 +12,7 @@ //! //! TokenStreams represent syntactic objects before they are converted into ASTs. //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s, -//! which are themselves either a single Token, a Delimited subsequence of tokens, -//! or a SequenceRepetition specifier (for the purpose of sequence generation during macro -//! expansion). +//! which are themselves a single `Token` or a `Delimited` subsequence of tokens. //! //! ## Ownership //! TokenStreams are persistent data structures constructed as ropes with reference @@ -28,10 +26,10 @@ use ast::{self, AttrStyle, LitKind}; use syntax_pos::{BytePos, Span, DUMMY_SP}; use codemap::Spanned; use ext::base; -use ext::tt::macro_parser; +use ext::tt::{macro_parser, quoted}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::{self, Directory}; -use parse::token::{self, Token, Lit, Nonterminal}; +use parse::token::{self, Token, Lit}; use print::pprust; use serialize::{Decoder, Decodable, Encoder, Encodable}; use symbol::Symbol; @@ -64,7 +62,7 @@ impl Delimited { pub fn open_tt(&self, span: Span) -> TokenTree { let open_span = match span { DUMMY_SP => DUMMY_SP, - _ => Span { hi: span.lo + BytePos(self.delim.len()), ..span }, + _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }, }; TokenTree::Token(open_span, self.open_token()) } @@ -73,7 +71,7 @@ impl Delimited { pub fn close_tt(&self, span: Span) -> TokenTree { let close_span = match span { DUMMY_SP => DUMMY_SP, - _ => Span { lo: span.hi - BytePos(self.delim.len()), ..span }, + _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }, }; TokenTree::Token(close_span, self.close_token()) } @@ -84,27 +82,6 @@ impl Delimited { } } -/// A sequence of token trees -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] -pub struct SequenceRepetition { - /// The sequence of token trees - pub tts: Vec<TokenTree>, - /// The optional separator - pub separator: Option<token::Token>, - /// Whether the sequence can be repeated zero (*), or one or more times (+) - pub op: KleeneOp, - /// The number of `MatchNt`s that appear in the sequence (and subsequences) - pub num_captures: usize, -} - -/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) -/// for token sequences. -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] -pub enum KleeneOp { - ZeroOrMore, - OneOrMore, -} - /// When the main rust parser encounters a syntax-extension invocation, it /// parses the arguments to the invocation as a token-tree. This is a very /// loose structure, such that all sorts of different AST-fragments can @@ -123,10 +100,6 @@ pub enum TokenTree { Token(Span, token::Token), /// A delimited sequence of token trees Delimited(Span, Rc<Delimited>), - - // This only makes sense in MBE macros. - /// A kleene-style repetition sequence with a span - Sequence(Span, Rc<SequenceRepetition>), } impl TokenTree { @@ -138,15 +111,10 @@ impl TokenTree { AttrStyle::Inner => 3, } } - TokenTree::Token(_, token::Interpolated(ref nt)) => { - if let Nonterminal::NtTT(..) = **nt { 1 } else { 0 } - }, - TokenTree::Token(_, token::MatchNt(..)) => 3, TokenTree::Delimited(_, ref delimed) => match delimed.delim { token::NoDelim => delimed.tts.len(), _ => delimed.tts.len() + 2, }, - TokenTree::Sequence(_, ref seq) => seq.tts.len(), TokenTree::Token(..) => 0, } } @@ -197,30 +165,12 @@ impl TokenTree { } delimed.tts[index - 1].clone() } - (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => { - let v = [TokenTree::Token(sp, token::SubstNt(name)), - TokenTree::Token(sp, token::Colon), - TokenTree::Token(sp, token::Ident(kind))]; - v[index].clone() - } - (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(), _ => panic!("Cannot expand a token tree"), } } - /// Returns the `Span` corresponding to this token tree. - pub fn get_span(&self) -> Span { - match *self { - TokenTree::Token(span, _) => span, - TokenTree::Delimited(span, _) => span, - TokenTree::Sequence(span, _) => span, - } - } - /// Use this token tree as a matcher to parse given tts. - pub fn parse(cx: &base::ExtCtxt, - mtch: &[TokenTree], - tts: &[TokenTree]) + pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: &[TokenTree]) -> macro_parser::NamedParseResult { // `None` is because we're not interpolating let directory = Directory { @@ -252,9 +202,7 @@ impl TokenTree { /// Retrieve the TokenTree's span. pub fn span(&self) -> Span { match *self { - TokenTree::Token(sp, _) | - TokenTree::Delimited(sp, _) | - TokenTree::Sequence(sp, _) => sp, + TokenTree::Token(sp, _) | TokenTree::Delimited(sp, _) => sp, } } diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 7533171b08556..f92cde4019f67 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -79,7 +79,6 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, quote_pat: expand_quote_pat, quote_arm: expand_quote_arm, quote_stmt: expand_quote_stmt, - quote_matcher: expand_quote_matcher, quote_attr: expand_quote_attr, quote_arg: expand_quote_arg, quote_block: expand_quote_block, diff --git a/src/test/compile-fail-fulldeps/gated-quote.rs b/src/test/compile-fail-fulldeps/gated-quote.rs index 726af9864b482..63e1c6f16b3e6 100644 --- a/src/test/compile-fail-fulldeps/gated-quote.rs +++ b/src/test/compile-fail-fulldeps/gated-quote.rs @@ -54,8 +54,6 @@ pub fn main() { //~^ ERROR cannot find macro `quote_arm!` in this scope let x = quote_stmt!(ecx, 3); //~^ ERROR cannot find macro `quote_stmt!` in this scope - let x = quote_matcher!(ecx, 3); - //~^ ERROR cannot find macro `quote_matcher!` in this scope let x = quote_attr!(ecx, 3); //~^ ERROR cannot find macro `quote_attr!` in this scope let x = quote_arg!(ecx, 3); diff --git a/src/test/compile-fail/issue-35450.rs b/src/test/compile-fail/issue-35450.rs index d890d02a91047..5f54f269c6c55 100644 --- a/src/test/compile-fail/issue-35450.rs +++ b/src/test/compile-fail/issue-35450.rs @@ -8,9 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -macro_rules! m { ($t:tt) => { $t } } +macro_rules! m { ($($t:tt)*) => { $($t)* } } fn main() { - m!($t); //~ ERROR unknown macro variable - //~| ERROR expected expression + m!($t); //~ ERROR expected expression } diff --git a/src/test/compile-fail/issue-39404.rs b/src/test/compile-fail/issue-39404.rs new file mode 100644 index 0000000000000..0168ae7d91017 --- /dev/null +++ b/src/test/compile-fail/issue-39404.rs @@ -0,0 +1,18 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![deny(missing_fragment_specifier)] //~ NOTE lint level defined here + +macro_rules! m { ($i) => {} } +//~^ ERROR missing fragment specifier +//~| WARN previously accepted +//~| NOTE issue #40107 + +fn main() {} diff --git a/src/test/compile-fail/macro-error.rs b/src/test/compile-fail/macro-error.rs index 78f95e365c44b..82a5aa4872913 100644 --- a/src/test/compile-fail/macro-error.rs +++ b/src/test/compile-fail/macro-error.rs @@ -9,7 +9,7 @@ // except according to those terms. macro_rules! foo { - ($a:expr) => $a; //~ ERROR macro rhs must be delimited + ($a:expr) => a; //~ ERROR macro rhs must be delimited } fn main() { diff --git a/src/test/compile-fail/macro-tt-matchers.rs b/src/test/compile-fail/macro-tt-matchers.rs index 969f1500717d7..7255e7d00b611 100644 --- a/src/test/compile-fail/macro-tt-matchers.rs +++ b/src/test/compile-fail/macro-tt-matchers.rs @@ -17,16 +17,5 @@ macro_rules! foo { foo!(Box); -macro_rules! bar { - ($x:tt) => { - macro_rules! baz { - ($x:tt, $y:tt) => { ($x, $y) } - } - } -} - #[rustc_error] -fn main() { //~ ERROR compilation successful - bar!($y); - let _: (i8, i16) = baz!(0i8, 0i16); -} +fn main() {} //~ ERROR compilation successful diff --git a/src/test/compile-fail/malformed_macro_lhs.rs b/src/test/compile-fail/malformed_macro_lhs.rs index 5d81e21f05684..0b437be5393ed 100644 --- a/src/test/compile-fail/malformed_macro_lhs.rs +++ b/src/test/compile-fail/malformed_macro_lhs.rs @@ -9,7 +9,7 @@ // except according to those terms. macro_rules! my_precioooous { - $($t:tt)* => (1); //~ ERROR invalid macro matcher + t => (1); //~ ERROR invalid macro matcher } fn main() { diff --git a/src/test/parse-fail/issue-33569.rs b/src/test/parse-fail/issue-33569.rs index e3c17af82aab4..15d491719a6d5 100644 --- a/src/test/parse-fail/issue-33569.rs +++ b/src/test/parse-fail/issue-33569.rs @@ -12,7 +12,9 @@ macro_rules! foo { { $+ } => { //~ ERROR expected identifier, found `+` + //~^ ERROR missing fragment specifier $(x)(y) //~ ERROR expected `*` or `+` - //~^ ERROR no rules expected the token `)` } } + +foo!(); diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs index 2b3857048f367..3db69f2167cc6 100644 --- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs +++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs @@ -23,6 +23,7 @@ use syntax::ast::{Ident, Pat}; use syntax::tokenstream::{TokenTree}; use syntax::ext::base::{ExtCtxt, MacResult, MacEager}; use syntax::ext::build::AstBuilder; +use syntax::ext::tt::quoted; use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use syntax::ext::tt::macro_parser::{Success, Failure, Error}; use syntax::ext::tt::macro_parser::parse_failure_msg; @@ -33,7 +34,8 @@ use rustc_plugin::Registry; fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree]) -> Box<MacResult + 'static> { - let mbe_matcher = quote_matcher!(cx, $matched:expr, $($pat:pat)|+); + let mbe_matcher = quote_tokens!(cx, $$matched:expr, $$($$pat:pat)|+); + let mbe_matcher = quoted::parse(&mbe_matcher, true, cx.parse_sess); let map = match TokenTree::parse(cx, &mbe_matcher, args) { Success(map) => map, Failure(_, tok) => { diff --git a/src/test/run-pass-fulldeps/mbe_matching_test_macro.rs b/src/test/run-pass-fulldeps/mbe_matching_test_macro.rs index 5383b11cf5363..822b2c9b93b4a 100644 --- a/src/test/run-pass-fulldeps/mbe_matching_test_macro.rs +++ b/src/test/run-pass-fulldeps/mbe_matching_test_macro.rs @@ -14,11 +14,7 @@ #![feature(plugin)] #![plugin(procedural_mbe_matching)] -#[no_link] -extern crate procedural_mbe_matching; - pub fn main() { - let abc = 123u32; assert_eq!(matches!(Some(123), None | Some(0)), false); assert_eq!(matches!(Some(123), None | Some(123)), true); assert_eq!(matches!(true, true), true); diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-39889.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-39889.rs new file mode 100644 index 0000000000000..9094310fb3e76 --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-39889.rs @@ -0,0 +1,27 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// force-host +// no-prefer-dynamic + +#![crate_type = "proc-macro"] + +extern crate proc_macro; +use proc_macro::TokenStream; + +#[proc_macro_derive(Issue39889)] +pub fn f(_input: TokenStream) -> TokenStream { + let rules = r#" + macro_rules! id { + ($($tt:tt)*) => { $($tt)* }; + } + "#; + rules.parse().unwrap() +} diff --git a/src/test/run-pass-fulldeps/proc-macro/issue-39889.rs b/src/test/run-pass-fulldeps/proc-macro/issue-39889.rs new file mode 100644 index 0000000000000..05610116ad6bf --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/issue-39889.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:issue-39889.rs + +#![feature(proc_macro)] +#![allow(unused)] + +extern crate issue_39889; +use issue_39889::Issue39889; + +#[derive(Issue39889)] +struct S; + +fn main() {} diff --git a/src/test/run-pass-fulldeps/quote-tokens.rs b/src/test/run-pass-fulldeps/quote-tokens.rs index 9e9b7ce5bf29d..8e6a69cb58479 100644 --- a/src/test/run-pass-fulldeps/quote-tokens.rs +++ b/src/test/run-pass-fulldeps/quote-tokens.rs @@ -37,7 +37,6 @@ fn syntax_extension(cx: &ExtCtxt) { let _l: P<syntax::ast::Ty> = quote_ty!(cx, &isize); - let _m: Vec<syntax::tokenstream::TokenTree> = quote_matcher!(cx, $($foo:tt,)* bar); let _n: syntax::ast::Attribute = quote_attr!(cx, #![cfg(foo, bar = "baz")]); let _o: Option<P<syntax::ast::Item>> = quote_item!(cx, fn foo<T: ?Sized>() {}); diff --git a/src/test/compile-fail/issue-39709.rs b/src/test/run-pass/issue-39709.rs similarity index 78% rename from src/test/compile-fail/issue-39709.rs rename to src/test/run-pass/issue-39709.rs index 0f66fe8439336..ebca9312a64b0 100644 --- a/src/test/compile-fail/issue-39709.rs +++ b/src/test/run-pass/issue-39709.rs @@ -9,7 +9,6 @@ // except according to those terms. fn main() { - println!("{}", { macro_rules! x { ($()*) => {} } 33 }); - //~^ ERROR no syntax variables matched as repeating at this depth + println!("{}", { macro_rules! x { ($(t:tt)*) => {} } 33 }); } diff --git a/src/test/rustdoc/assoc-consts.rs b/src/test/rustdoc/assoc-consts.rs index 8d3f9b59bb2ee..d4119f5d351c1 100644 --- a/src/test/rustdoc/assoc-consts.rs +++ b/src/test/rustdoc/assoc-consts.rs @@ -13,14 +13,16 @@ pub trait Foo { // @has assoc_consts/trait.Foo.html '//*[@class="rust trait"]' \ // 'const FOO: usize;' - // @has - '//*[@id="associatedconstant.FOO"]' 'const FOO' - const FOO: usize; + // @has - '//*[@id="associatedconstant.FOO"]' 'const FOO: usize' + // @has - '//*[@class="docblock"]' 'FOO: usize = 12' + const FOO: usize = 12; } pub struct Bar; impl Bar { // @has assoc_consts/struct.Bar.html '//*[@id="associatedconstant.BAR"]' \ - // 'const BAR: usize = 3' + // 'const BAR: usize' + // @has - '//*[@class="docblock"]' 'BAR: usize = 3' pub const BAR: usize = 3; } diff --git a/src/test/rustdoc/issue-28478.rs b/src/test/rustdoc/issue-28478.rs index 0db92a491ed18..493c08693e94a 100644 --- a/src/test/rustdoc/issue-28478.rs +++ b/src/test/rustdoc/issue-28478.rs @@ -16,7 +16,8 @@ pub trait Bar { // @has - '//*[@id="associatedtype.Bar"]' 'type Bar = ()' // @has - '//*[@href="#associatedtype.Bar"]' 'Bar' type Bar = (); - // @has - '//*[@id="associatedconstant.Baz"]' 'const Baz: usize = 7' + // @has - '//*[@id="associatedconstant.Baz"]' 'const Baz: usize' + // @has - '//*[@class="docblock"]' 'Baz: usize = 7' // @has - '//*[@href="#associatedconstant.Baz"]' 'Baz' const Baz: usize = 7; // @has - '//*[@id="tymethod.bar"]' 'fn bar' diff --git a/src/test/rustdoc/issue-33302.rs b/src/test/rustdoc/issue-33302.rs index c6da6b0575b87..a34ee908ef295 100644 --- a/src/test/rustdoc/issue-33302.rs +++ b/src/test/rustdoc/issue-33302.rs @@ -28,18 +28,40 @@ macro_rules! make { fn ignore(_: &X) {} const C: X; // @has issue_33302/trait.T.html \ - // '//*[@class="rust trait"]' 'const D: i32 = 4 * 4;' - // @has - '//*[@id="associatedconstant.D"]' 'const D: i32 = 4 * 4' + // '//*[@class="rust trait"]' 'const D: i32' + // @has - '//*[@class="docblock"]' 'D: i32 = 4 * 4' + // @has - '//*[@id="associatedconstant.D"]' 'const D: i32' const D: i32 = ($n * $n); } // @has issue_33302/struct.S.html \ // '//h3[@class="impl"]' 'impl T<[i32; 16]> for S' - // @has - '//*[@id="associatedconstant.C"]' 'const C: [i32; 16] = [0; 4 * 4]' - // @has - '//*[@id="associatedconstant.D"]' 'const D: i32 = 4 * 4' + // @has - '//*[@id="associatedconstant.C"]' 'const C: [i32; 16]' + // @has - '//*[@id="associatedconstant.D"]' 'const D: i32' + // @has - '//*[@class="docblock"]' 'C: [i32; 16] = [0; 4 * 4]' impl T<[i32; ($n * $n)]> for S { const C: [i32; ($n * $n)] = [0; ($n * $n)]; } + + // @has issue_33302/struct.S.html \ + // '//h3[@class="impl"]' 'impl T<[i32; 16]> for S' + // @has - '//*[@id="associatedconstant.C-1"]' 'const C: (i32,)' + // @has - '//*[@id="associatedconstant.D-1"]' 'const D: i32' + // @has - '//*[@class="docblock"]' 'C: (i32,) = (4,)' + impl T<(i32,)> for S { + const C: (i32,) = ($n,); + } + + // @has issue_33302/struct.S.html \ + // '//h3[@class="impl"]' 'impl T<(i32, i32)> for S' + // @has - '//*[@id="associatedconstant.C-2"]' 'const C: (i32, i32)' + // @has - '//*[@id="associatedconstant.D-2"]' 'const D: i32' + // @has - '//*[@class="docblock"]' 'C: (i32, i32) = (4, 4)' + // @has - '//*[@class="docblock"]' 'D: i32 = 4 / 4' + impl T<(i32, i32)> for S { + const C: (i32, i32) = ($n, $n); + const D: i32 = ($n / $n); + } } }