diff --git a/src/libcore/benches/fmt.rs b/src/libcore/benches/fmt.rs new file mode 100644 index 0000000000000..92f10c760c6d2 --- /dev/null +++ b/src/libcore/benches/fmt.rs @@ -0,0 +1,110 @@ +use std::io::{self, Write as IoWrite}; +use std::fmt::{self, Write as FmtWrite}; +use test::Bencher; + +#[bench] +fn write_vec_value(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = Vec::new(); + for _ in 0..1000 { + mem.write_all("abc".as_bytes()).unwrap(); + } + }); +} + +#[bench] +fn write_vec_ref(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = Vec::new(); + let wr = &mut mem as &mut dyn io::Write; + for _ in 0..1000 { + wr.write_all("abc".as_bytes()).unwrap(); + } + }); +} + +#[bench] +fn write_vec_macro1(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = Vec::new(); + let wr = &mut mem as &mut dyn io::Write; + for _ in 0..1000 { + write!(wr, "abc").unwrap(); + } + }); +} + +#[bench] +fn write_vec_macro2(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = Vec::new(); + let wr = &mut mem as &mut dyn io::Write; + for _ in 0..1000 { + write!(wr, "{}", "abc").unwrap(); + } + }); +} + +#[bench] +fn write_vec_macro_debug(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = Vec::new(); + let wr = &mut mem as &mut dyn io::Write; + for _ in 0..1000 { + write!(wr, "{:?}", "☃").unwrap(); + } + }); +} + +#[bench] +fn write_str_value(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = String::new(); + for _ in 0..1000 { + mem.write_str("abc").unwrap(); + } + }); +} + +#[bench] +fn write_str_ref(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = String::new(); + let wr = &mut mem as &mut dyn fmt::Write; + for _ in 0..1000 { + wr.write_str("abc").unwrap(); + } + }); +} + +#[bench] +fn write_str_macro1(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = String::new(); + for _ in 0..1000 { + write!(mem, "abc").unwrap(); + } + }); +} + +#[bench] +fn write_str_macro2(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = String::new(); + let wr = &mut mem as &mut dyn fmt::Write; + for _ in 0..1000 { + write!(wr, "{}", "abc").unwrap(); + } + }); +} + +#[bench] +fn write_str_macro_debug(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = String::new(); + let wr = &mut mem as &mut dyn fmt::Write; + for _ in 0..1000 { + write!(wr, "{:?}", "☃").unwrap(); + } + }); +} diff --git a/src/libcore/benches/lib.rs b/src/libcore/benches/lib.rs index 5b4971c81dd92..48572af611a5b 100644 --- a/src/libcore/benches/lib.rs +++ b/src/libcore/benches/lib.rs @@ -11,3 +11,4 @@ mod iter; mod num; mod ops; mod slice; +mod fmt; diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 214b5d3a84f24..fe9a8e1f6fd6e 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -997,28 +997,30 @@ pub fn write(output: &mut dyn Write, args: Arguments) -> Result { curarg: args.args.iter(), }; - let mut pieces = args.pieces.iter(); + let mut idx = 0; match args.fmt { None => { // We can use default formatting parameters for all arguments. - for (arg, piece) in args.args.iter().zip(pieces.by_ref()) { + for (arg, piece) in args.args.iter().zip(args.pieces.iter()) { formatter.buf.write_str(*piece)?; (arg.formatter)(arg.value, &mut formatter)?; + idx += 1; } } Some(fmt) => { // Every spec has a corresponding argument that is preceded by // a string piece. - for (arg, piece) in fmt.iter().zip(pieces.by_ref()) { + for (arg, piece) in fmt.iter().zip(args.pieces.iter()) { formatter.buf.write_str(*piece)?; formatter.run(arg)?; + idx += 1; } } } // There can be only one trailing string piece left. - if let Some(piece) = pieces.next() { + if let Some(piece) = args.pieces.get(idx) { formatter.buf.write_str(*piece)?; } diff --git a/src/libcore/iter/iterator.rs b/src/libcore/iter/iterator.rs index 640af74817282..0ad29afbadeac 100644 --- a/src/libcore/iter/iterator.rs +++ b/src/libcore/iter/iterator.rs @@ -2358,7 +2358,7 @@ pub trait Iterator { /// /// ``` /// fn factorial(n: u32) -> u32 { - /// (1..).take_while(|&i| i <= n).product() + /// (1..=n).product() /// } /// assert_eq!(factorial(0), 1); /// assert_eq!(factorial(1), 1); diff --git a/src/libcore/iter/mod.rs b/src/libcore/iter/mod.rs index 1ef5428a789cf..974906b682d21 100644 --- a/src/libcore/iter/mod.rs +++ b/src/libcore/iter/mod.rs @@ -329,6 +329,8 @@ pub use self::sources::{RepeatWith, repeat_with}; pub use self::sources::{Empty, empty}; #[stable(feature = "iter_once", since = "1.2.0")] pub use self::sources::{Once, once}; +#[unstable(feature = "iter_once_with", issue = "57581")] +pub use self::sources::{OnceWith, once_with}; #[unstable(feature = "iter_unfold", issue = "55977")] pub use self::sources::{Unfold, unfold, Successors, successors}; diff --git a/src/libcore/iter/sources.rs b/src/libcore/iter/sources.rs index 2a39089a8a229..2590fa6023a53 100644 --- a/src/libcore/iter/sources.rs +++ b/src/libcore/iter/sources.rs @@ -377,6 +377,119 @@ pub fn once(value: T) -> Once { Once { inner: Some(value).into_iter() } } +/// An iterator that repeats elements of type `A` endlessly by +/// applying the provided closure `F: FnMut() -> A`. +/// +/// This `struct` is created by the [`once_with`] function. +/// See its documentation for more. +/// +/// [`once_with`]: fn.once_with.html +#[derive(Copy, Clone, Debug)] +#[unstable(feature = "iter_once_with", issue = "57581")] +pub struct OnceWith { + gen: Option, +} + +#[unstable(feature = "iter_once_with", issue = "57581")] +impl A> Iterator for OnceWith { + type Item = A; + + #[inline] + fn next(&mut self) -> Option { + self.gen.take().map(|f| f()) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.gen.iter().size_hint() + } +} + +#[unstable(feature = "iter_once_with", issue = "57581")] +impl A> DoubleEndedIterator for OnceWith { + fn next_back(&mut self) -> Option { + self.next() + } +} + +#[unstable(feature = "iter_once_with", issue = "57581")] +impl A> ExactSizeIterator for OnceWith { + fn len(&self) -> usize { + self.gen.iter().len() + } +} + +#[unstable(feature = "iter_once_with", issue = "57581")] +impl A> FusedIterator for OnceWith {} + +#[unstable(feature = "iter_once_with", issue = "57581")] +unsafe impl A> TrustedLen for OnceWith {} + +/// Creates an iterator that lazily generates a value exactly once by invoking +/// the provided closure. +/// +/// This is commonly used to adapt a single value generator into a [`chain`] of +/// other kinds of iteration. Maybe you have an iterator that covers almost +/// everything, but you need an extra special case. Maybe you have a function +/// which works on iterators, but you only need to process one value. +/// +/// Unlike [`once`], this function will lazily generate the value on request. +/// +/// [`once`]: fn.once.html +/// [`chain`]: trait.Iterator.html#method.chain +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// #![feature(iter_once_with)] +/// +/// use std::iter; +/// +/// // one is the loneliest number +/// let mut one = iter::once_with(|| 1); +/// +/// assert_eq!(Some(1), one.next()); +/// +/// // just one, that's all we get +/// assert_eq!(None, one.next()); +/// ``` +/// +/// Chaining together with another iterator. Let's say that we want to iterate +/// over each file of the `.foo` directory, but also a configuration file, +/// `.foorc`: +/// +/// ```no_run +/// #![feature(iter_once_with)] +/// +/// use std::iter; +/// use std::fs; +/// use std::path::PathBuf; +/// +/// let dirs = fs::read_dir(".foo").unwrap(); +/// +/// // we need to convert from an iterator of DirEntry-s to an iterator of +/// // PathBufs, so we use map +/// let dirs = dirs.map(|file| file.unwrap().path()); +/// +/// // now, our iterator just for our config file +/// let config = iter::once_with(|| PathBuf::from(".foorc")); +/// +/// // chain the two iterators together into one big iterator +/// let files = dirs.chain(config); +/// +/// // this will give us all of the files in .foo as well as .foorc +/// for f in files { +/// println!("{:?}", f); +/// } +/// ``` +#[inline] +#[unstable(feature = "iter_once_with", issue = "57581")] +pub fn once_with A>(gen: F) -> OnceWith { + OnceWith { gen: Some(gen) } +} + /// Creates a new iterator where each iteration calls the provided closure /// `F: FnMut(&mut St) -> Option`. /// diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index 8879112fcf0b9..33c0da8a54049 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -79,6 +79,7 @@ #![feature(extern_types)] #![feature(fundamental)] #![feature(intrinsics)] +#![feature(iter_once_with)] #![feature(lang_items)] #![feature(link_llvm_intrinsics)] #![feature(never_type)] diff --git a/src/libcore/tests/iter.rs b/src/libcore/tests/iter.rs index cf19851c17b35..3944bc749d029 100644 --- a/src/libcore/tests/iter.rs +++ b/src/libcore/tests/iter.rs @@ -1,3 +1,4 @@ +use core::cell::Cell; use core::iter::*; use core::{i8, i16, isize}; use core::usize; @@ -1906,6 +1907,23 @@ fn test_once() { assert_eq!(it.next(), None); } +#[test] +fn test_once_with() { + let count = Cell::new(0); + let mut it = once_with(|| { + count.set(count.get() + 1); + 42 + }); + + assert_eq!(count.get(), 0); + assert_eq!(it.next(), Some(42)); + assert_eq!(count.get(), 1); + assert_eq!(it.next(), None); + assert_eq!(count.get(), 1); + assert_eq!(it.next(), None); + assert_eq!(count.get(), 1); +} + #[test] fn test_empty() { let mut it = empty::(); diff --git a/src/libcore/tests/lib.rs b/src/libcore/tests/lib.rs index 72846daf16a6b..a9b8decfd0262 100644 --- a/src/libcore/tests/lib.rs +++ b/src/libcore/tests/lib.rs @@ -12,6 +12,7 @@ #![feature(hashmap_internals)] #![feature(iter_copied)] #![feature(iter_nth_back)] +#![feature(iter_once_with)] #![feature(iter_unfold)] #![feature(pattern)] #![feature(range_is_empty)] diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index 32ae878909f30..da440cdd72f80 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -72,6 +72,15 @@ pub enum Position<'a> { ArgumentNamed(&'a str), } +impl Position<'_> { + pub fn index(&self) -> Option { + match self { + ArgumentIs(i) | ArgumentImplicitlyIs(i) => Some(*i), + _ => None, + } + } +} + /// Enum of alignments which are supported. #[derive(Copy, Clone, PartialEq)] pub enum Alignment { diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 427fe51e6ff9c..d1067b70778ee 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -476,6 +476,9 @@ define_dep_nodes!( <'tcx> [] CheckModLoops(DefId), [] CheckModUnstableApiUsage(DefId), [] CheckModItemTypes(DefId), + [] CheckModPrivacy(DefId), + [] CheckModIntrinsics(DefId), + [] CheckModLiveness(DefId), [] CollectModItemTypes(DefId), [] Reachability, diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 869baef1f5afc..d9ca37c937bc7 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -509,6 +509,21 @@ impl<'hir> Map<'hir> { &self.forest.krate.attrs } + pub fn get_module(&self, module: DefId) -> (&'hir Mod, Span, NodeId) + { + let node_id = self.as_local_node_id(module).unwrap(); + self.read(node_id); + match self.find_entry(node_id).unwrap().node { + Node::Item(&Item { + span, + node: ItemKind::Mod(ref m), + .. + }) => (m, span, node_id), + Node::Crate => (&self.forest.krate.module, self.forest.krate.span, node_id), + _ => panic!("not a module") + } + } + pub fn visit_item_likes_in_module(&self, module: DefId, visitor: &mut V) where V: ItemLikeVisitor<'hir> { diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index 70ec72d73bc6c..de567183a3c05 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -258,7 +258,7 @@ for tokenstream::TokenTree { tokenstream::TokenTree::Delimited(span, delim, ref tts) => { span.hash_stable(hcx, hasher); std_hash::Hash::hash(&delim, hasher); - for sub_tt in tts.stream().trees() { + for sub_tt in tts.trees() { sub_tt.hash_stable(hcx, hasher); } } diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index 1716daaa107c4..a0f7954eb0c55 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -2,6 +2,7 @@ use hir::def::Def; use hir::def_id::DefId; use ty::{self, Ty, TyCtxt}; use ty::layout::{LayoutError, Pointer, SizeSkeleton, VariantIdx}; +use ty::query::{Providers, queries}; use rustc_target::spec::abi::Abi::RustIntrinsic; use rustc_data_structures::indexed_vec::Idx; @@ -10,10 +11,23 @@ use hir::intravisit::{self, Visitor, NestedVisitorMap}; use hir; pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - let mut visitor = ItemVisitor { - tcx, + for &module in tcx.hir().krate().modules.keys() { + queries::check_mod_intrinsics::ensure(tcx, tcx.hir().local_def_id(module)); + } +} + +fn check_mod_intrinsics<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) { + tcx.hir().visit_item_likes_in_module( + module_def_id, + &mut ItemVisitor { tcx }.as_deep_visitor() + ); +} + +pub fn provide(providers: &mut Providers<'_>) { + *providers = Providers { + check_mod_intrinsics, + ..*providers }; - tcx.hir().krate().visit_all_item_likes(&mut visitor.as_deep_visitor()); } struct ItemVisitor<'a, 'tcx: 'a> { diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index a78cf1a471b4b..0a1802a4e12c9 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -100,6 +100,7 @@ use self::VarKind::*; use hir::def::*; use hir::Node; use ty::{self, TyCtxt}; +use ty::query::{Providers, queries}; use lint; use errors::Applicability; use util::nodemap::{NodeMap, HirIdMap, HirIdSet}; @@ -114,8 +115,9 @@ use syntax::ptr::P; use syntax::symbol::keywords; use syntax_pos::Span; -use hir::{Expr, HirId}; use hir; +use hir::{Expr, HirId}; +use hir::def_id::DefId; use hir::intravisit::{self, Visitor, FnKind, NestedVisitorMap}; /// For use with `propagate_through_loop`. @@ -179,11 +181,24 @@ impl<'a, 'tcx> Visitor<'tcx> for IrMaps<'a, 'tcx> { fn visit_arm(&mut self, a: &'tcx hir::Arm) { visit_arm(self, a); } } +fn check_mod_liveness<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) { + tcx.hir().visit_item_likes_in_module(module_def_id, &mut IrMaps::new(tcx).as_deep_visitor()); +} + pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - tcx.hir().krate().visit_all_item_likes(&mut IrMaps::new(tcx).as_deep_visitor()); + for &module in tcx.hir().krate().modules.keys() { + queries::check_mod_liveness::ensure(tcx, tcx.hir().local_def_id(module)); + } tcx.sess.abort_if_errors(); } +pub fn provide(providers: &mut Providers<'_>) { + *providers = Providers { + check_mod_liveness, + ..*providers + }; +} + impl fmt::Debug for LiveNode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ln({})", self.get()) diff --git a/src/librustc/ty/query/config.rs b/src/librustc/ty/query/config.rs index ca5d1f6bd3203..c20846aebb877 100644 --- a/src/librustc/ty/query/config.rs +++ b/src/librustc/ty/query/config.rs @@ -109,6 +109,33 @@ impl<'tcx> QueryDescription<'tcx> for queries::check_mod_item_types<'tcx> { } } +impl<'tcx> QueryDescription<'tcx> for queries::check_mod_privacy<'tcx> { + fn describe( + tcx: TyCtxt<'_, '_, '_>, + key: DefId, + ) -> Cow<'static, str> { + format!("checking privacy in {}", key.describe_as_module(tcx)).into() + } +} + +impl<'tcx> QueryDescription<'tcx> for queries::check_mod_intrinsics<'tcx> { + fn describe( + tcx: TyCtxt<'_, '_, '_>, + key: DefId, + ) -> Cow<'static, str> { + format!("checking intrinsics in {}", key.describe_as_module(tcx)).into() + } +} + +impl<'tcx> QueryDescription<'tcx> for queries::check_mod_liveness<'tcx> { + fn describe( + tcx: TyCtxt<'_, '_, '_>, + key: DefId, + ) -> Cow<'static, str> { + format!("checking liveness of variables in {}", key.describe_as_module(tcx)).into() + } +} + impl<'tcx> QueryDescription<'tcx> for queries::collect_mod_item_types<'tcx> { fn describe( tcx: TyCtxt<'_, '_, '_>, diff --git a/src/librustc/ty/query/mod.rs b/src/librustc/ty/query/mod.rs index 39d76ceed9507..88c20547a2108 100644 --- a/src/librustc/ty/query/mod.rs +++ b/src/librustc/ty/query/mod.rs @@ -264,6 +264,12 @@ define_queries! { <'tcx> [] fn check_mod_item_types: CheckModItemTypes(DefId) -> (), + [] fn check_mod_privacy: CheckModPrivacy(DefId) -> (), + + [] fn check_mod_intrinsics: CheckModIntrinsics(DefId) -> (), + + [] fn check_mod_liveness: CheckModLiveness(DefId) -> (), + [] fn collect_mod_item_types: CollectModItemTypes(DefId) -> (), /// Caches CoerceUnsized kinds for impls on custom types. diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs index 562cd75a75ff4..17ed89d140696 100644 --- a/src/librustc/ty/query/plumbing.rs +++ b/src/librustc/ty/query/plumbing.rs @@ -1281,6 +1281,9 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>, DepKind::CheckModLoops => { force!(check_mod_loops, def_id!()); } DepKind::CheckModUnstableApiUsage => { force!(check_mod_unstable_api_usage, def_id!()); } DepKind::CheckModItemTypes => { force!(check_mod_item_types, def_id!()); } + DepKind::CheckModPrivacy => { force!(check_mod_privacy, def_id!()); } + DepKind::CheckModIntrinsics => { force!(check_mod_intrinsics, def_id!()); } + DepKind::CheckModLiveness => { force!(check_mod_liveness, def_id!()); } DepKind::CollectModItemTypes => { force!(collect_mod_item_types, def_id!()); } DepKind::Reachability => { force!(reachable_set, LOCAL_CRATE); } DepKind::MirKeys => { force!(mir_keys, LOCAL_CRATE); } diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 3b7de37ae4b3f..960faabb9b00c 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -1170,6 +1170,8 @@ pub fn default_provide(providers: &mut ty::query::Providers) { ty::provide(providers); traits::provide(providers); stability::provide(providers); + middle::intrinsicck::provide(providers); + middle::liveness::provide(providers); reachable::provide(providers); rustc_passes::provide(providers); rustc_traits::provide(providers); diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 72bcf8edfdd21..3cc2e8057b8e5 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -1540,7 +1540,7 @@ impl KeywordIdents { _ => {}, } TokenTree::Delimited(_, _, tts) => { - self.check_tokens(cx, tts.stream()) + self.check_tokens(cx, tts) }, } } diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 4890369e13f20..10ac1caa692ca 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -22,12 +22,12 @@ use rustc::lint; use rustc::middle::privacy::{AccessLevel, AccessLevels}; use rustc::ty::{self, TyCtxt, Ty, TraitRef, TypeFoldable, GenericParamDefKind}; use rustc::ty::fold::TypeVisitor; -use rustc::ty::query::Providers; +use rustc::ty::query::{Providers, queries}; use rustc::ty::subst::Substs; use rustc::util::nodemap::NodeSet; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::sync::Lrc; -use syntax::ast::{self, CRATE_NODE_ID, Ident}; +use syntax::ast::{self, DUMMY_NODE_ID, Ident}; use syntax::attr; use syntax::symbol::keywords; use syntax_pos::Span; @@ -782,6 +782,10 @@ impl<'a, 'tcx> Visitor<'tcx> for NamePrivacyVisitor<'a, 'tcx> { NestedVisitorMap::All(&self.tcx.hir()) } + fn visit_mod(&mut self, _m: &'tcx hir::Mod, _s: Span, _n: ast::NodeId) { + // Don't visit modules inside + } + fn visit_nested_body(&mut self, body: hir::BodyId) { let orig_tables = mem::replace(&mut self.tables, self.tcx.body_tables(body)); let body = self.tcx.hir().body(body); @@ -917,6 +921,10 @@ impl<'a, 'tcx> Visitor<'tcx> for TypePrivacyVisitor<'a, 'tcx> { NestedVisitorMap::All(&self.tcx.hir()) } + fn visit_mod(&mut self, _m: &'tcx hir::Mod, _s: Span, _n: ast::NodeId) { + // Don't visit modules inside + } + fn visit_nested_body(&mut self, body: hir::BodyId) { let orig_tables = mem::replace(&mut self.tables, self.tcx.body_tables(body)); let orig_in_body = mem::replace(&mut self.in_body, true); @@ -1659,6 +1667,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> pub fn provide(providers: &mut Providers) { *providers = Providers { privacy_access_levels, + check_mod_privacy, ..*providers }; } @@ -1667,34 +1676,43 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Lrc { tcx.privacy_access_levels(LOCAL_CRATE) } -fn privacy_access_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - krate: CrateNum) - -> Lrc { - assert_eq!(krate, LOCAL_CRATE); - - let krate = tcx.hir().krate(); +fn check_mod_privacy<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) { let empty_tables = ty::TypeckTables::empty(None); // Check privacy of names not checked in previous compilation stages. let mut visitor = NamePrivacyVisitor { tcx, tables: &empty_tables, - current_item: CRATE_NODE_ID, + current_item: DUMMY_NODE_ID, empty_tables: &empty_tables, }; - intravisit::walk_crate(&mut visitor, krate); + let (module, span, node_id) = tcx.hir().get_module(module_def_id); + intravisit::walk_mod(&mut visitor, module, node_id); // Check privacy of explicitly written types and traits as well as // inferred types of expressions and patterns. let mut visitor = TypePrivacyVisitor { tcx, tables: &empty_tables, - current_item: DefId::local(CRATE_DEF_INDEX), + current_item: module_def_id, in_body: false, - span: krate.span, + span, empty_tables: &empty_tables, }; - intravisit::walk_crate(&mut visitor, krate); + intravisit::walk_mod(&mut visitor, module, node_id); +} + +fn privacy_access_levels<'tcx>( + tcx: TyCtxt<'_, 'tcx, 'tcx>, + krate: CrateNum, +) -> Lrc { + assert_eq!(krate, LOCAL_CRATE); + + let krate = tcx.hir().krate(); + + for &module in tcx.hir().krate().modules.keys() { + queries::check_mod_privacy::ensure(tcx, tcx.hir().local_def_id(module)); + } // Build up a set of all exported items in the AST. This is a set of all // items which are reachable from external crates based on visibility. diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index ce71be07efd42..8053ed130e91b 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -171,13 +171,23 @@ fn check_impl_overlap<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeI // This is something like impl Trait1 for Trait2. Illegal // if Trait1 is a supertrait of Trait2 or Trait2 is not object safe. - if let Some(principal_def_id) = data.principal_def_id() { - if !tcx.is_object_safe(principal_def_id) { + let component_def_ids = data.iter().flat_map(|predicate| { + match predicate.skip_binder() { + ty::ExistentialPredicate::Trait(tr) => Some(tr.def_id), + ty::ExistentialPredicate::AutoTrait(def_id) => Some(*def_id), + // An associated type projection necessarily comes with + // an additional `Trait` requirement. + ty::ExistentialPredicate::Projection(..) => None, + } + }); + + for component_def_id in component_def_ids { + if !tcx.is_object_safe(component_def_id) { // This is an error, but it will be reported by wfcheck. Ignore it here. // This is tested by `coherence-impl-trait-for-trait-object-safe.rs`. } else { let mut supertrait_def_ids = - traits::supertrait_def_ids(tcx, principal_def_id); + traits::supertrait_def_ids(tcx, component_def_id); if supertrait_def_ids.any(|d| d == trait_def_id) { let sp = tcx.sess.source_map().def_span(tcx.span_of_impl(impl_def_id).unwrap()); struct_span_err!(tcx.sess, @@ -193,6 +203,5 @@ fn check_impl_overlap<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeI } } } - // FIXME: also check auto-trait def-ids? (e.g. `impl Sync for Foo+Sync`)? } } diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index 9db1c68609039..ceadbd5afac5f 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -2431,7 +2431,7 @@ if (!DOMTokenList.prototype.remove) { return; } var crates_text = []; - if (crates.length > 1) { + if (Object.keys(crates).length > 1) { for (var crate in crates) { if (crates.hasOwnProperty(crate)) { crates_text.push(crate); diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index e3a8980a975c1..1e91f4adc36d7 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -15,7 +15,7 @@ use rustc_target::spec::abi::Abi; use source_map::{dummy_spanned, respan, Spanned}; use symbol::{keywords, Symbol}; use syntax_pos::{Span, DUMMY_SP}; -use tokenstream::{ThinTokenStream, TokenStream}; +use tokenstream::TokenStream; use ThinVec; use rustc_data_structures::fx::FxHashSet; @@ -1216,7 +1216,7 @@ pub type Mac = Spanned; pub struct Mac_ { pub path: Path, pub delim: MacDelimiter, - pub tts: ThinTokenStream, + pub tts: TokenStream, } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)] @@ -1228,13 +1228,13 @@ pub enum MacDelimiter { impl Mac_ { pub fn stream(&self) -> TokenStream { - self.tts.stream() + self.tts.clone() } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct MacroDef { - pub tokens: ThinTokenStream, + pub tokens: TokenStream, pub legacy: bool, } diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index d03563f8891aa..0f8ca5e7b9982 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -565,7 +565,7 @@ impl MetaItemKind { } Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => { tokens.next(); - tts.stream() + tts.clone() } _ => return Some(MetaItemKind::Word), }; diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index c3124144009ab..c01e7f538b90d 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -748,7 +748,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec { let mut stmts = statements_mk_tt(cx, &TokenTree::open_tt(span.open, delim), false); - stmts.extend(statements_mk_tts(cx, tts.stream())); + stmts.extend(statements_mk_tts(cx, tts.clone())); stmts.extend(statements_mk_tt(cx, &TokenTree::close_tt(span.close, delim), false)); stmts } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 8ac103856dcd1..a4c3b38f691ed 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -598,7 +598,7 @@ pub fn noop_fold_tt(tt: TokenTree, fld: &mut T) -> TokenTree { TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)), delim, - fld.fold_tts(tts.stream()).into(), + fld.fold_tts(tts).into(), ), } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index ea205530ca5cc..ddb350faa546b 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -817,7 +817,7 @@ mod tests { ) if name_macro_rules.name == "macro_rules" && name_zip.name == "zip" => { - let tts = ¯o_tts.stream().trees().collect::>(); + let tts = ¯o_tts.trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( 3, @@ -826,7 +826,7 @@ mod tests { Some(&TokenTree::Delimited(_, second_delim, ref second_tts)), ) if macro_delim == token::Paren => { - let tts = &first_tts.stream().trees().collect::>(); + let tts = &first_tts.trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, @@ -836,7 +836,7 @@ mod tests { if first_delim == token::Paren && ident.name == "a" => {}, _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), } - let tts = &second_tts.stream().trees().collect::>(); + let tts = &second_tts.trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 823c786bded26..8073c24c7dbee 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -46,7 +46,7 @@ use print::pprust; use ptr::P; use parse::PResult; use ThinVec; -use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream}; +use tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint}; use symbol::{Symbol, keywords}; use std::borrow::Cow; @@ -280,17 +280,17 @@ struct TokenCursorFrame { /// on the parser. #[derive(Clone)] enum LastToken { - Collecting(Vec), - Was(Option), + Collecting(Vec), + Was(Option), } impl TokenCursorFrame { - fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self { + fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self { TokenCursorFrame { delim: delim, span: sp, open_delim: delim == token::NoDelim, - tree_cursor: tts.stream().into_trees(), + tree_cursor: tts.clone().into_trees(), close_delim: delim == token::NoDelim, last_token: LastToken::Was(None), } @@ -2330,7 +2330,7 @@ impl<'a> Parser<'a> { }) } - fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> { + fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> { let delim = match self.token { token::OpenDelim(delim) => delim, _ => { @@ -2350,7 +2350,7 @@ impl<'a> Parser<'a> { token::Brace => MacDelimiter::Brace, token::NoDelim => self.bug("unexpected no delimiter"), }; - Ok((delim, tts.stream().into())) + Ok((delim, tts.into())) } /// At the bottom (top?) of the precedence hierarchy, @@ -4641,7 +4641,7 @@ impl<'a> Parser<'a> { let ident = self.parse_ident()?; let tokens = if self.check(&token::OpenDelim(token::Brace)) { match self.parse_token_tree() { - TokenTree::Delimited(_, _, tts) => tts.stream(), + TokenTree::Delimited(_, _, tts) => tts, _ => unreachable!(), } } else if self.check(&token::OpenDelim(token::Paren)) { @@ -7766,7 +7766,7 @@ impl<'a> Parser<'a> { &mut self.token_cursor.stack[prev].last_token }; - // Pull our the toekns that we've collected from the call to `f` above + // Pull out the tokens that we've collected from the call to `f` above. let mut collected_tokens = match *last_token { LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()), LastToken::Was(_) => panic!("our vector went away?"), @@ -7785,10 +7785,9 @@ impl<'a> Parser<'a> { // call. In that case we need to record all the tokens we collected in // our parent list as well. To do that we push a clone of our stream // onto the previous list. - let stream = collected_tokens.into_iter().collect::(); match prev_collecting { Some(mut list) => { - list.push(stream.clone()); + list.extend(collected_tokens.iter().cloned()); list.extend(extra_token); *last_token = LastToken::Collecting(list); } @@ -7797,7 +7796,7 @@ impl<'a> Parser<'a> { } } - Ok((ret?, stream)) + Ok((ret?, TokenStream::new(collected_tokens))) } pub fn parse_item(&mut self) -> PResult<'a, Option>> { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 2ad3d3a6d6487..c53594032a00a 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -807,7 +807,7 @@ pub trait PrintState<'a> { TokenTree::Delimited(_, delim, tts) => { self.writer().word(token_to_string(&token::OpenDelim(delim)))?; self.writer().space()?; - self.print_tts(tts.stream())?; + self.print_tts(tts)?; self.writer().space()?; self.writer().word(token_to_string(&token::CloseDelim(delim))) }, diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index fb72ef9c956ce..f5d2d6f18ee87 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -41,7 +41,7 @@ pub enum TokenTree { /// A single token Token(Span, token::Token), /// A delimited sequence of token trees - Delimited(DelimSpan, DelimToken, ThinTokenStream), + Delimited(DelimSpan, DelimToken, TokenStream), } impl TokenTree { @@ -62,8 +62,7 @@ impl TokenTree { (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2, (&TokenTree::Delimited(_, delim, ref tts), &TokenTree::Delimited(_, delim2, ref tts2)) => { - delim == delim2 && - tts.stream().eq_unspanned(&tts2.stream()) + delim == delim2 && tts.eq_unspanned(&tts2) } (_, _) => false, } @@ -81,8 +80,7 @@ impl TokenTree { } (&TokenTree::Delimited(_, delim, ref tts), &TokenTree::Delimited(_, delim2, ref tts2)) => { - delim == delim2 && - tts.stream().probably_equal_for_proc_macro(&tts2.stream()) + delim == delim2 && tts.probably_equal_for_proc_macro(&tts2) } (_, _) => false, } @@ -113,7 +111,7 @@ impl TokenTree { } pub fn joint(self) -> TokenStream { - TokenStream::Tree(self, Joint) + TokenStream::new(vec![(self, Joint)]) } /// Returns the opening delimiter as a token tree. @@ -143,18 +141,19 @@ impl TokenTree { /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s /// instead of a representation of the abstract syntax tree. /// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat. +/// +/// The use of `Option` is an optimization that avoids the need for an +/// allocation when the stream is empty. However, it is not guaranteed that an +/// empty stream is represented with `None`; it may be represented as a `Some` +/// around an empty `Vec`. #[derive(Clone, Debug)] -pub enum TokenStream { - Empty, - Tree(TokenTree, IsJoint), - Stream(Lrc>), -} +pub struct TokenStream(Option>>); pub type TreeAndJoint = (TokenTree, IsJoint); // `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(target_arch = "x86_64")] -static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::() == 32); +static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::() == 8); #[derive(Clone, Copy, Debug, PartialEq)] pub enum IsJoint { @@ -169,7 +168,7 @@ impl TokenStream { /// separating the two arguments with a comma for diagnostic suggestions. pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> { // Used to suggest if a user writes `foo!(a b);` - if let TokenStream::Stream(ref stream) = self { + if let Some(ref stream) = self.0 { let mut suggestion = None; let mut iter = stream.iter().enumerate().peekable(); while let Some((pos, ts)) = iter.next() { @@ -201,7 +200,7 @@ impl TokenStream { impl From for TokenStream { fn from(tree: TokenTree) -> TokenStream { - TokenStream::Tree(tree, NonJoint) + TokenStream::new(vec![(tree, NonJoint)]) } } @@ -233,7 +232,7 @@ impl PartialEq for TokenStream { impl TokenStream { pub fn len(&self) -> usize { - if let TokenStream::Stream(ref slice) = self { + if let Some(ref slice) = self.0 { slice.len() } else { 0 @@ -241,13 +240,13 @@ impl TokenStream { } pub fn empty() -> TokenStream { - TokenStream::Empty + TokenStream(None) } pub fn is_empty(&self) -> bool { - match self { - TokenStream::Empty => true, - _ => false, + match self.0 { + None => true, + Some(ref stream) => stream.is_empty(), } } @@ -258,10 +257,9 @@ impl TokenStream { _ => { let mut vec = vec![]; for stream in streams { - match stream { - TokenStream::Empty => {}, - TokenStream::Tree(tree, is_joint) => vec.push((tree, is_joint)), - TokenStream::Stream(stream2) => vec.extend(stream2.iter().cloned()), + match stream.0 { + None => {}, + Some(stream2) => vec.extend(stream2.iter().cloned()), } } TokenStream::new(vec) @@ -269,22 +267,16 @@ impl TokenStream { } } - pub fn new(mut streams: Vec) -> TokenStream { + pub fn new(streams: Vec) -> TokenStream { match streams.len() { - 0 => TokenStream::empty(), - 1 => { - let (tree, is_joint) = streams.pop().unwrap(); - TokenStream::Tree(tree, is_joint) - } - _ => TokenStream::Stream(Lrc::new(streams)), + 0 => TokenStream(None), + _ => TokenStream(Some(Lrc::new(streams))), } } pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec) { - match self { - TokenStream::Empty => {} - TokenStream::Tree(tree, is_joint) => vec.push((tree, is_joint)), - TokenStream::Stream(stream) => vec.extend(stream.iter().cloned()), + if let Some(stream) = self.0 { + vec.extend(stream.iter().cloned()); } } @@ -349,51 +341,36 @@ impl TokenStream { } pub fn map_enumerated TokenTree>(self, mut f: F) -> TokenStream { - match self { - TokenStream::Empty => TokenStream::Empty, - TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(0, tree), is_joint), - TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new( + TokenStream(self.0.map(|stream| { + Lrc::new( stream .iter() .enumerate() .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint)) - .collect() - )), - } + .collect()) + })) } pub fn map TokenTree>(self, mut f: F) -> TokenStream { - match self { - TokenStream::Empty => TokenStream::Empty, - TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(tree), is_joint), - TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new( + TokenStream(self.0.map(|stream| { + Lrc::new( stream .iter() .map(|(tree, is_joint)| (f(tree.clone()), *is_joint)) - .collect() - )), - } + .collect()) + })) } - fn first_tree_and_joint(&self) -> Option<(TokenTree, IsJoint)> { - match self { - TokenStream::Empty => None, - TokenStream::Tree(ref tree, is_joint) => Some((tree.clone(), *is_joint)), - TokenStream::Stream(ref stream) => Some(stream.first().unwrap().clone()) - } + fn first_tree_and_joint(&self) -> Option { + self.0.as_ref().map(|stream| { + stream.first().unwrap().clone() + }) } fn last_tree_if_joint(&self) -> Option { - match self { - TokenStream::Empty => None, - TokenStream::Tree(ref tree, is_joint) => { - if *is_joint == Joint { - Some(tree.clone()) - } else { - None - } - } - TokenStream::Stream(ref stream) => { + match self.0 { + None => None, + Some(ref stream) => { if let (tree, Joint) = stream.last().unwrap() { Some(tree.clone()) } else { @@ -422,7 +399,7 @@ impl TokenStreamBuilder { self.push_all_but_last_tree(&last_stream); let glued_span = last_span.to(span); let glued_tt = TokenTree::Token(glued_span, glued_tok); - let glued_tokenstream = TokenStream::Tree(glued_tt, is_joint); + let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]); self.0.push(glued_tokenstream); self.push_all_but_first_tree(&stream); return @@ -437,23 +414,21 @@ impl TokenStreamBuilder { } fn push_all_but_last_tree(&mut self, stream: &TokenStream) { - if let TokenStream::Stream(ref streams) = stream { + if let Some(ref streams) = stream.0 { let len = streams.len(); match len { 1 => {} - 2 => self.0.push(TokenStream::Tree(streams[0].0.clone(), streams[0].1)), - _ => self.0.push(TokenStream::Stream(Lrc::new(streams[0 .. len - 1].to_vec()))), + _ => self.0.push(TokenStream(Some(Lrc::new(streams[0 .. len - 1].to_vec())))), } } } fn push_all_but_first_tree(&mut self, stream: &TokenStream) { - if let TokenStream::Stream(ref streams) = stream { + if let Some(ref streams) = stream.0 { let len = streams.len(); match len { 1 => {} - 2 => self.0.push(TokenStream::Tree(streams[1].0.clone(), streams[1].1)), - _ => self.0.push(TokenStream::Stream(Lrc::new(streams[1 .. len].to_vec()))), + _ => self.0.push(TokenStream(Some(Lrc::new(streams[1 .. len].to_vec())))), } } } @@ -479,17 +454,9 @@ impl Cursor { } pub fn next_with_joint(&mut self) -> Option { - match self.stream { - TokenStream::Empty => None, - TokenStream::Tree(ref tree, ref is_joint) => { - if self.index == 0 { - self.index = 1; - Some((tree.clone(), *is_joint)) - } else { - None - } - } - TokenStream::Stream(ref stream) => { + match self.stream.0 { + None => None, + Some(ref stream) => { if self.index < stream.len() { self.index += 1; Some(stream[self.index - 1].clone()) @@ -505,63 +472,19 @@ impl Cursor { return; } let index = self.index; - let stream = mem::replace(&mut self.stream, TokenStream::Empty); + let stream = mem::replace(&mut self.stream, TokenStream(None)); *self = TokenStream::from_streams(vec![stream, new_stream]).into_trees(); self.index = index; } pub fn look_ahead(&self, n: usize) -> Option { - match self.stream { - TokenStream::Empty => None, - TokenStream::Tree(ref tree, _) => { - if n == 0 && self.index == 0 { - Some(tree.clone()) - } else { - None - } - } - TokenStream::Stream(ref stream) => - stream[self.index ..].get(n).map(|(tree, _)| tree.clone()), + match self.stream.0 { + None => None, + Some(ref stream) => stream[self.index ..].get(n).map(|(tree, _)| tree.clone()), } } } -/// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation. -/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`. -/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion. -#[derive(Debug, Clone)] -pub struct ThinTokenStream(Option>>); - -impl ThinTokenStream { - pub fn stream(&self) -> TokenStream { - self.clone().into() - } -} - -impl From for ThinTokenStream { - fn from(stream: TokenStream) -> ThinTokenStream { - ThinTokenStream(match stream { - TokenStream::Empty => None, - TokenStream::Tree(tree, is_joint) => Some(Lrc::new(vec![(tree, is_joint)])), - TokenStream::Stream(stream) => Some(stream), - }) - } -} - -impl From for TokenStream { - fn from(stream: ThinTokenStream) -> TokenStream { - stream.0.map(TokenStream::Stream).unwrap_or_else(TokenStream::empty) - } -} - -impl Eq for ThinTokenStream {} - -impl PartialEq for ThinTokenStream { - fn eq(&self, other: &ThinTokenStream) -> bool { - TokenStream::from(self.clone()) == TokenStream::from(other.clone()) - } -} - impl fmt::Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(&pprust::tokens_to_string(self.clone())) @@ -580,18 +503,6 @@ impl Decodable for TokenStream { } } -impl Encodable for ThinTokenStream { - fn encode(&self, encoder: &mut E) -> Result<(), E::Error> { - TokenStream::from(self.clone()).encode(encoder) - } -} - -impl Decodable for ThinTokenStream { - fn decode(decoder: &mut D) -> Result { - TokenStream::decode(decoder).map(Into::into) - } -} - #[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub struct DelimSpan { pub open: Span, diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 156546bbba94a..8cbd47ca70fde 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -832,7 +832,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute) pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) { match tt { TokenTree::Token(_, tok) => visitor.visit_token(tok), - TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts.stream()), + TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts), } } diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 3e3bca7080fb6..215df4224c5f1 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -493,7 +493,10 @@ impl<'a, 'b> Context<'a, 'b> { let fill = arg.format.fill.unwrap_or(' '); - if *arg != simple_arg || fill != ' ' { + let pos_simple = + arg.position.index() == simple_arg.position.index(); + + if !pos_simple || arg.format != simple_arg.format || fill != ' ' { self.all_pieces_simple = false; } diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index 158cbc791ef50..7de9b9343a8fa 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -269,7 +269,7 @@ impl ToInternal for TokenTree { }; let tree = tokenstream::TokenTree::Token(span, token); - TokenStream::Tree(tree, if joint { Joint } else { NonJoint }) + TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })]) } } diff --git a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.rs b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.rs new file mode 100644 index 0000000000000..5ea69190951e1 --- /dev/null +++ b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.rs @@ -0,0 +1,29 @@ +#![feature(optin_builtin_traits)] + +// Test for issue #56934 - that it is impossible to redundantly +// implement an auto-trait for a trait object type that contains it. + +// Negative impl variant. + +auto trait Marker1 {} +auto trait Marker2 {} + +trait Object: Marker1 {} + +// A supertrait marker is illegal... +impl !Marker1 for dyn Object + Marker2 { } //~ ERROR E0371 +// ...and also a direct component. +impl !Marker2 for dyn Object + Marker2 { } //~ ERROR E0371 + +// But implementing a marker if it is not present is OK. +impl !Marker2 for dyn Object {} // OK + +// A non-principal trait-object type is orphan even in its crate. +impl !Send for dyn Marker2 {} //~ ERROR E0117 + +// And impl'ing a remote marker for a local trait object is forbidden +// by one of these special orphan-like rules. +impl !Send for dyn Object {} //~ ERROR E0321 +impl !Send for dyn Object + Marker2 {} //~ ERROR E0321 + +fn main() { } diff --git a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr new file mode 100644 index 0000000000000..c8a146cdd4456 --- /dev/null +++ b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr @@ -0,0 +1,37 @@ +error[E0371]: the object type `(dyn Object + Marker2 + 'static)` automatically implements the trait `Marker1` + --> $DIR/coherence-impl-trait-for-marker-trait-negative.rs:14:1 + | +LL | impl !Marker1 for dyn Object + Marker2 { } //~ ERROR E0371 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `(dyn Object + Marker2 + 'static)` automatically implements trait `Marker1` + +error[E0371]: the object type `(dyn Object + Marker2 + 'static)` automatically implements the trait `Marker2` + --> $DIR/coherence-impl-trait-for-marker-trait-negative.rs:16:1 + | +LL | impl !Marker2 for dyn Object + Marker2 { } //~ ERROR E0371 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `(dyn Object + Marker2 + 'static)` automatically implements trait `Marker2` + +error[E0117]: only traits defined in the current crate can be implemented for arbitrary types + --> $DIR/coherence-impl-trait-for-marker-trait-negative.rs:22:1 + | +LL | impl !Send for dyn Marker2 {} //~ ERROR E0117 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ impl doesn't use types inside crate + | + = note: the impl does not reference only types defined in this crate + = note: define and implement a trait or new type instead + +error[E0321]: cross-crate traits with a default impl, like `std::marker::Send`, can only be implemented for a struct/enum type, not `(dyn Object + 'static)` + --> $DIR/coherence-impl-trait-for-marker-trait-negative.rs:26:1 + | +LL | impl !Send for dyn Object {} //~ ERROR E0321 + | ^^^^^^^^^^^^^^^^^^^^^^^^^ can't implement cross-crate trait with a default impl for non-struct/enum type + +error[E0321]: cross-crate traits with a default impl, like `std::marker::Send`, can only be implemented for a struct/enum type, not `(dyn Object + Marker2 + 'static)` + --> $DIR/coherence-impl-trait-for-marker-trait-negative.rs:27:1 + | +LL | impl !Send for dyn Object + Marker2 {} //~ ERROR E0321 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ can't implement cross-crate trait with a default impl for non-struct/enum type + +error: aborting due to 5 previous errors + +Some errors occurred: E0117, E0321, E0371. +For more information about an error, try `rustc --explain E0117`. diff --git a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.rs b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.rs new file mode 100644 index 0000000000000..6b5689e8260f0 --- /dev/null +++ b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.rs @@ -0,0 +1,29 @@ +#![feature(optin_builtin_traits)] + +// Test for issue #56934 - that it is impossible to redundantly +// implement an auto-trait for a trait object type that contains it. + +// Positive impl variant. + +auto trait Marker1 {} +auto trait Marker2 {} + +trait Object: Marker1 {} + +// A supertrait marker is illegal... +impl Marker1 for dyn Object + Marker2 { } //~ ERROR E0371 +// ...and also a direct component. +impl Marker2 for dyn Object + Marker2 { } //~ ERROR E0371 + +// But implementing a marker if it is not present is OK. +impl Marker2 for dyn Object {} // OK + +// A non-principal trait-object type is orphan even in its crate. +unsafe impl Send for dyn Marker2 {} //~ ERROR E0117 + +// And impl'ing a remote marker for a local trait object is forbidden +// by one of these special orphan-like rules. +unsafe impl Send for dyn Object {} //~ ERROR E0321 +unsafe impl Send for dyn Object + Marker2 {} //~ ERROR E0321 + +fn main() { } diff --git a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr new file mode 100644 index 0000000000000..78ca2f5279d63 --- /dev/null +++ b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr @@ -0,0 +1,37 @@ +error[E0371]: the object type `(dyn Object + Marker2 + 'static)` automatically implements the trait `Marker1` + --> $DIR/coherence-impl-trait-for-marker-trait-positive.rs:14:1 + | +LL | impl Marker1 for dyn Object + Marker2 { } //~ ERROR E0371 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `(dyn Object + Marker2 + 'static)` automatically implements trait `Marker1` + +error[E0371]: the object type `(dyn Object + Marker2 + 'static)` automatically implements the trait `Marker2` + --> $DIR/coherence-impl-trait-for-marker-trait-positive.rs:16:1 + | +LL | impl Marker2 for dyn Object + Marker2 { } //~ ERROR E0371 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `(dyn Object + Marker2 + 'static)` automatically implements trait `Marker2` + +error[E0117]: only traits defined in the current crate can be implemented for arbitrary types + --> $DIR/coherence-impl-trait-for-marker-trait-positive.rs:22:1 + | +LL | unsafe impl Send for dyn Marker2 {} //~ ERROR E0117 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ impl doesn't use types inside crate + | + = note: the impl does not reference only types defined in this crate + = note: define and implement a trait or new type instead + +error[E0321]: cross-crate traits with a default impl, like `std::marker::Send`, can only be implemented for a struct/enum type, not `(dyn Object + 'static)` + --> $DIR/coherence-impl-trait-for-marker-trait-positive.rs:26:1 + | +LL | unsafe impl Send for dyn Object {} //~ ERROR E0321 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ can't implement cross-crate trait with a default impl for non-struct/enum type + +error[E0321]: cross-crate traits with a default impl, like `std::marker::Send`, can only be implemented for a struct/enum type, not `(dyn Object + Marker2 + 'static)` + --> $DIR/coherence-impl-trait-for-marker-trait-positive.rs:27:1 + | +LL | unsafe impl Send for dyn Object + Marker2 {} //~ ERROR E0321 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ can't implement cross-crate trait with a default impl for non-struct/enum type + +error: aborting due to 5 previous errors + +Some errors occurred: E0117, E0321, E0371. +For more information about an error, try `rustc --explain E0117`. diff --git a/src/test/ui/pattern/const-pat-ice.rs b/src/test/ui/pattern/const-pat-ice.rs index 6496a2ab69f5c..865c54be1ad7b 100644 --- a/src/test/ui/pattern/const-pat-ice.rs +++ b/src/test/ui/pattern/const-pat-ice.rs @@ -1,4 +1,5 @@ // failure-status: 101 +// rustc-env:RUST_BACKTRACE=0 // This is a repro test for an ICE in our pattern handling of constants.