From 88b031e6efa417bb8cc0eecb18db13bfa93be19b Mon Sep 17 00:00:00 2001 From: Andrea Pretto Date: Thu, 13 Oct 2016 17:40:58 +0200 Subject: [PATCH 01/17] save-analysis: dump data only if get_path_data doesn't fail to resolve a path. Fixes #37126. --- src/librustc_save_analysis/dump_visitor.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index 6d9cd88afb17b..1c60ccb976588 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -854,9 +854,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { let path_data = match path_data { Some(pd) => pd, None => { - span_bug!(path.span, - "Unexpected def kind while looking up path in `{}`", - self.span.snippet(path.span)) + return; } }; From 4a9364868949a5390d85d26af4d6562bc4a18fb3 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Sun, 23 Oct 2016 21:43:41 +0000 Subject: [PATCH 02/17] Support `use *;` and `use ::*;`. --- src/libsyntax/parse/parser.rs | 13 +++++++++---- src/test/run-pass/import-glob-crate.rs | 12 +++++++++--- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 463ec334cc567..cd62ecd4e9777 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -6113,15 +6113,20 @@ impl<'a> Parser<'a> { /// MOD_SEP? LBRACE item_seq RBRACE fn parse_view_path(&mut self) -> PResult<'a, P> { let lo = self.span.lo; - if self.check(&token::OpenDelim(token::Brace)) || self.is_import_coupler() { - // `{foo, bar}` or `::{foo, bar}` + if self.check(&token::OpenDelim(token::Brace)) || self.check(&token::BinOp(token::Star)) || + self.is_import_coupler() { + // `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`. let prefix = ast::Path { global: self.eat(&token::ModSep), segments: Vec::new(), span: mk_sp(lo, self.span.hi), }; - let items = self.parse_path_list_items()?; - Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items)))) + let view_path_kind = if self.eat(&token::BinOp(token::Star)) { + ViewPathGlob(prefix) + } else { + ViewPathList(prefix, self.parse_path_list_items()?) + }; + Ok(P(spanned(lo, self.span.hi, view_path_kind))) } else { let prefix = self.parse_path(PathStyle::Mod)?; if self.is_import_coupler() { diff --git a/src/test/run-pass/import-glob-crate.rs b/src/test/run-pass/import-glob-crate.rs index b2a9b08b01b86..fec46c7e1f82d 100644 --- a/src/test/run-pass/import-glob-crate.rs +++ b/src/test/run-pass/import-glob-crate.rs @@ -8,9 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. - -#![allow(dead_assignment)] - use std::mem::*; pub fn main() { @@ -20,3 +17,12 @@ pub fn main() { assert_eq!(x, 2); assert_eq!(y, 1); } + +#[allow(unused)] +fn f() { + mod foo { pub use *; } + mod bar { pub use ::*; } + + foo::main(); + bar::main(); +} From 592d7bfb3af75ded9c5233ee243cc6c751531671 Mon Sep 17 00:00:00 2001 From: Raph Levien Date: Mon, 24 Oct 2016 16:42:57 -0700 Subject: [PATCH 03/17] Add support for kernel randomness for Fuchsia Wire up cprng syscall as provider for rand::os::OsRng on Fuchsia. --- src/libstd/build.rs | 2 ++ src/libstd/sys/unix/rand.rs | 54 ++++++++++++++++++++++++++++++++++++- 2 files changed, 55 insertions(+), 1 deletion(-) diff --git a/src/libstd/build.rs b/src/libstd/build.rs index c5732278db961..d1ca183fc3e70 100644 --- a/src/libstd/build.rs +++ b/src/libstd/build.rs @@ -58,6 +58,8 @@ fn main() { println!("cargo:rustc-link-lib=ws2_32"); println!("cargo:rustc-link-lib=userenv"); println!("cargo:rustc-link-lib=shell32"); + } else if target.contains("fuchsia") { + println!("cargo:rustc-link-lib=magenta"); } } diff --git a/src/libstd/sys/unix/rand.rs b/src/libstd/sys/unix/rand.rs index f28a6ad33750a..3aebb8c18ec86 100644 --- a/src/libstd/sys/unix/rand.rs +++ b/src/libstd/sys/unix/rand.rs @@ -27,7 +27,8 @@ fn next_u64(mut fill_buf: &mut FnMut(&mut [u8])) -> u64 { #[cfg(all(unix, not(target_os = "ios"), not(target_os = "openbsd"), - not(target_os = "freebsd")))] + not(target_os = "freebsd"), + not(target_os = "fuchsia")))] mod imp { use self::OsRngInner::*; use super::{next_u32, next_u64}; @@ -339,3 +340,54 @@ mod imp { } } } + +#[cfg(target_os = "fuchsia")] +mod imp { + use super::{next_u32, next_u64}; + + use io; + use rand::Rng; + + #[link(name = "magenta")] + extern { + fn mx_cprng_draw(buffer: *mut u8, len: usize) -> isize; + } + + fn getrandom(buf: &mut [u8]) -> isize { + unsafe { mx_cprng_draw(buf.as_mut_ptr(), buf.len()) } + } + + pub struct OsRng { + // dummy field to ensure that this struct cannot be constructed outside + // of this module + _dummy: (), + } + + impl OsRng { + /// Create a new `OsRng`. + pub fn new() -> io::Result { + Ok(OsRng { _dummy: () }) + } + } + + impl Rng for OsRng { + fn next_u32(&mut self) -> u32 { + next_u32(&mut |v| self.fill_bytes(v)) + } + fn next_u64(&mut self) -> u64 { + next_u64(&mut |v| self.fill_bytes(v)) + } + fn fill_bytes(&mut self, v: &mut [u8]) { + let mut buf = v; + while !buf.is_empty() { + let ret = getrandom(buf); + if ret < 0 { + panic!("kernel mx_cprng_draw call failed! (returned {}, buf.len() {})", + ret, buf.len()); + } + let move_buf = buf; + buf = &mut move_buf[(ret as usize)..]; + } + } + } +} From c4651dba5f8fcbdd9b678db87405aaa643f82600 Mon Sep 17 00:00:00 2001 From: Raph Levien Date: Mon, 24 Oct 2016 11:54:48 -0700 Subject: [PATCH 04/17] Support for aarch64 architecture on Fuchsia This patch adds support for the aarch64-unknown-fuchsia target. Also updates src/liblibc submodule to include required libc change. --- mk/cfg/aarch64-unknown-fuchsia.mk | 1 + src/liblibc | 2 +- .../target/aarch64_unknown_fuchsia.rs | 28 +++++++++++++++++++ src/librustc_back/target/mod.rs | 1 + src/libstd/os/raw.rs | 6 ++-- 5 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 mk/cfg/aarch64-unknown-fuchsia.mk create mode 100644 src/librustc_back/target/aarch64_unknown_fuchsia.rs diff --git a/mk/cfg/aarch64-unknown-fuchsia.mk b/mk/cfg/aarch64-unknown-fuchsia.mk new file mode 100644 index 0000000000000..34aee77ae2107 --- /dev/null +++ b/mk/cfg/aarch64-unknown-fuchsia.mk @@ -0,0 +1 @@ +# rustbuild-only target diff --git a/src/liblibc b/src/liblibc index c95defce07a82..7d9b71f0971f8 160000 --- a/src/liblibc +++ b/src/liblibc @@ -1 +1 @@ -Subproject commit c95defce07a82f2f759f140c937dabd43a4f3d97 +Subproject commit 7d9b71f0971f8fa196d864d7071f216a59036d6e diff --git a/src/librustc_back/target/aarch64_unknown_fuchsia.rs b/src/librustc_back/target/aarch64_unknown_fuchsia.rs new file mode 100644 index 0000000000000..a93a46d140238 --- /dev/null +++ b/src/librustc_back/target/aarch64_unknown_fuchsia.rs @@ -0,0 +1,28 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use target::{Target, TargetResult}; + +pub fn target() -> TargetResult { + let mut base = super::fuchsia_base::opts(); + base.max_atomic_width = Some(128); + + Ok(Target { + llvm_target: "aarch64-unknown-fuchsia".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "64".to_string(), + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".to_string(), + arch: "aarch64".to_string(), + target_os: "fuchsia".to_string(), + target_env: "".to_string(), + target_vendor: "unknown".to_string(), + options: base, + }) +} diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index 931080daef271..99129ca807033 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -176,6 +176,7 @@ supported_targets! { ("x86_64-apple-darwin", x86_64_apple_darwin), ("i686-apple-darwin", i686_apple_darwin), + ("aarch64-unknown-fuchsia", aarch64_unknown_fuchsia), ("x86_64-unknown-fuchsia", x86_64_unknown_fuchsia), ("i386-apple-ios", i386_apple_ios), diff --git a/src/libstd/os/raw.rs b/src/libstd/os/raw.rs index 6c5c1b90a4a92..2a918d8aeb7b8 100644 --- a/src/libstd/os/raw.rs +++ b/src/libstd/os/raw.rs @@ -18,7 +18,8 @@ target_arch = "arm", target_arch = "powerpc", target_arch = "powerpc64", - target_arch = "s390x"))))] + target_arch = "s390x")), + all(target_os = "fuchsia", target_arch = "aarch64")))] #[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = u8; #[cfg(not(any(target_os = "android", target_os = "emscripten", @@ -26,7 +27,8 @@ target_arch = "arm", target_arch = "powerpc", target_arch = "powerpc64", - target_arch = "s390x")))))] + target_arch = "s390x")), + all(target_os = "fuchsia", target_arch = "aarch64"))))] #[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = i8; #[stable(feature = "raw_os", since = "1.1.0")] pub type c_schar = i8; #[stable(feature = "raw_os", since = "1.1.0")] pub type c_uchar = u8; From 4bb6d4e740ffc15af337d43d842b036f35577124 Mon Sep 17 00:00:00 2001 From: Taylor Cramer Date: Mon, 24 Oct 2016 17:05:58 -0700 Subject: [PATCH 05/17] rustc_typeck: Allow reification from fn item to unsafe ptr --- src/librustc_typeck/check/coercion.rs | 35 ++++++++++++------- .../run-pass/typeck-fn-to-unsafe-fn-ptr.rs | 21 +++++++++++ 2 files changed, 44 insertions(+), 12 deletions(-) create mode 100644 src/test/run-pass/typeck-fn-to-unsafe-fn-ptr.rs diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs index 5be77cb12e9ef..02a65e249c826 100644 --- a/src/librustc_typeck/check/coercion.rs +++ b/src/librustc_typeck/check/coercion.rs @@ -196,6 +196,8 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { // Function items are coercible to any closure // type; function pointers are not (that would // require double indirection). + // Additionally, we permit coercin of function + // items to drop the unsafe qualifier. self.coerce_from_fn_item(a, a_f, b) } ty::TyFnPtr(a_f) => { @@ -504,6 +506,24 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { Ok((target, AdjustDerefRef(adjustment))) } + fn coerce_from_safe_fn(&self, + a: Ty<'tcx>, + fn_ty_a: &'tcx ty::BareFnTy<'tcx>, + b: Ty<'tcx>) + -> CoerceResult<'tcx> { + if let ty::TyFnPtr(fn_ty_b) = b.sty { + match (fn_ty_a.unsafety, fn_ty_b.unsafety) { + (hir::Unsafety::Normal, hir::Unsafety::Unsafe) => { + let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a); + return self.unify_and_identity(unsafe_a, b) + .map(|(ty, _)| (ty, AdjustUnsafeFnPointer)); + } + _ => {} + } + } + self.unify_and_identity(a, b) + } + fn coerce_from_fn_pointer(&self, a: Ty<'tcx>, fn_ty_a: &'tcx ty::BareFnTy<'tcx>, @@ -516,17 +536,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { let b = self.shallow_resolve(b); debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b); - if let ty::TyFnPtr(fn_ty_b) = b.sty { - match (fn_ty_a.unsafety, fn_ty_b.unsafety) { - (hir::Unsafety::Normal, hir::Unsafety::Unsafe) => { - let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a); - return self.unify_and_identity(unsafe_a, b) - .map(|(ty, _)| (ty, AdjustUnsafeFnPointer)); - } - _ => {} - } - } - self.unify_and_identity(a, b) + self.coerce_from_safe_fn(a, fn_ty_a, b) } fn coerce_from_fn_item(&self, @@ -544,7 +554,8 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { match b.sty { ty::TyFnPtr(_) => { let a_fn_pointer = self.tcx.mk_fn_ptr(fn_ty_a); - self.unify_and_identity(a_fn_pointer, b).map(|(ty, _)| (ty, AdjustReifyFnPointer)) + self.coerce_from_safe_fn(a_fn_pointer, fn_ty_a, b) + .map(|(ty, _)| (ty, AdjustReifyFnPointer)) } _ => self.unify_and_identity(a, b), } diff --git a/src/test/run-pass/typeck-fn-to-unsafe-fn-ptr.rs b/src/test/run-pass/typeck-fn-to-unsafe-fn-ptr.rs new file mode 100644 index 0000000000000..323705f3f955f --- /dev/null +++ b/src/test/run-pass/typeck-fn-to-unsafe-fn-ptr.rs @@ -0,0 +1,21 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This tests reification from safe function to `unsafe fn` pointer + +fn do_nothing() -> () {} + +unsafe fn call_unsafe(func: unsafe fn() -> ()) -> () { + func() +} + +pub fn main() { + unsafe { call_unsafe(do_nothing); } +} From ab6119a38fa5791858610b672e56831421135d8c Mon Sep 17 00:00:00 2001 From: Taylor Cramer Date: Mon, 24 Oct 2016 17:33:41 -0700 Subject: [PATCH 06/17] Fix coercin -> coercion typo --- src/librustc_typeck/check/coercion.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs index 02a65e249c826..ccc944813ff18 100644 --- a/src/librustc_typeck/check/coercion.rs +++ b/src/librustc_typeck/check/coercion.rs @@ -196,7 +196,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { // Function items are coercible to any closure // type; function pointers are not (that would // require double indirection). - // Additionally, we permit coercin of function + // Additionally, we permit coercion of function // items to drop the unsafe qualifier. self.coerce_from_fn_item(a, a_f, b) } From 0a16a11c392d227ca845e65905c06599579e2828 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 21 Oct 2016 19:25:16 +1100 Subject: [PATCH 07/17] Use `SmallVector` for the stack in `macro_parser::parse`. This avoids 800,000 heap allocations when compiling html5ever. --- src/libsyntax/ext/tt/macro_parser.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 74def68b18504..dacc519195565 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -92,6 +92,7 @@ use parse::token; use print::pprust; use ptr::P; use tokenstream::{self, TokenTree}; +use util::small_vector::SmallVector; use std::mem; use std::rc::Rc; @@ -284,12 +285,9 @@ pub fn parse(sess: &ParseSess, mut rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult { - let mut cur_eis = Vec::new(); - cur_eis.push(initial_matcher_pos(Rc::new(ms.iter() - .cloned() - .collect()), - None, - rdr.peek().sp.lo)); + let mut cur_eis = SmallVector::one(initial_matcher_pos(Rc::new(ms.to_owned()), + None, + rdr.peek().sp.lo)); loop { let mut bb_eis = Vec::new(); // black-box parsed by parser.rs From 3fd90d8aa53d73456b5df476a2bd6cc2caf473c6 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 21 Oct 2016 19:25:53 +1100 Subject: [PATCH 08/17] Use `SmallVector` for `TtReader::stack`. This avoids 800,000 heap allocations when compiling html5ever. It requires tweaking `SmallVector` a little. --- src/libsyntax/ext/tt/transcribe.rs | 7 +++-- src/libsyntax/lib.rs | 1 + src/libsyntax/util/small_vector.rs | 50 ++++++++++++++++++++++++------ 3 files changed, 45 insertions(+), 13 deletions(-) diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 38a926b6e87c9..8a6a8e53a3e4c 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -10,14 +10,15 @@ use self::LockstepIterSize::*; use ast::Ident; -use syntax_pos::{Span, DUMMY_SP}; use errors::{Handler, DiagnosticBuilder}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use parse::token::{DocComment, MatchNt, SubstNt}; use parse::token::{Token, Interpolated, NtIdent, NtTT}; use parse::token; use parse::lexer::TokenAndSpan; +use syntax_pos::{Span, DUMMY_SP}; use tokenstream::{self, TokenTree}; +use util::small_vector::SmallVector; use std::rc::Rc; use std::ops::Add; @@ -36,7 +37,7 @@ struct TtFrame { pub struct TtReader<'a> { pub sp_diag: &'a Handler, /// the unzipped tree: - stack: Vec, + stack: SmallVector, /* for MBE-style macro transcription */ interpolations: HashMap>, @@ -74,7 +75,7 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler, -> TtReader { let mut r = TtReader { sp_diag: sp_diag, - stack: vec!(TtFrame { + stack: SmallVector::one(TtFrame { forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: src, // doesn't matter. This merely holds the root unzipping. diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 6e671c9efdcf8..169388d72b6a6 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -35,6 +35,7 @@ #![feature(rustc_diagnostic_macros)] #![feature(specialization)] +extern crate core; extern crate serialize; extern crate term; extern crate libc; diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index 373dfc4ddfac5..57258c76335a1 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -11,6 +11,7 @@ use self::SmallVectorRepr::*; use self::IntoIterRepr::*; +use core::ops; use std::iter::{IntoIterator, FromIterator}; use std::mem; use std::slice; @@ -19,10 +20,12 @@ use std::vec; use util::move_map::MoveMap; /// A vector type optimized for cases where the size is almost always 0 or 1 +#[derive(Clone)] pub struct SmallVector { repr: SmallVectorRepr, } +#[derive(Clone)] enum SmallVectorRepr { Zero, One(T), @@ -75,16 +78,11 @@ impl SmallVector { } pub fn as_slice(&self) -> &[T] { - match self.repr { - Zero => { - let result: &[T] = &[]; - result - } - One(ref v) => { - unsafe { slice::from_raw_parts(v, 1) } - } - Many(ref vs) => vs - } + self + } + + pub fn as_mut_slice(&mut self) -> &mut [T] { + self } pub fn pop(&mut self) -> Option { @@ -163,6 +161,38 @@ impl SmallVector { } } +impl ops::Deref for SmallVector { + type Target = [T]; + + fn deref(&self) -> &[T] { + match self.repr { + Zero => { + let result: &[T] = &[]; + result + } + One(ref v) => { + unsafe { slice::from_raw_parts(v, 1) } + } + Many(ref vs) => vs + } + } +} + +impl ops::DerefMut for SmallVector { + fn deref_mut(&mut self) -> &mut [T] { + match self.repr { + Zero => { + let result: &mut [T] = &mut []; + result + } + One(ref mut v) => { + unsafe { slice::from_raw_parts_mut(v, 1) } + } + Many(ref mut vs) => vs + } + } +} + impl IntoIterator for SmallVector { type Item = T; type IntoIter = IntoIter; From c440a7ae654fb641e68a9ee53b03bf3f7133c2fe Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 21 Oct 2016 21:51:15 +1100 Subject: [PATCH 09/17] Don't use `Rc` in `TokenTreeOrTokenTreeVec`. This avoids 800,000 allocations when compiling html5ever. --- src/libsyntax/ext/tt/macro_parser.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index dacc519195565..91675065eb86e 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -105,7 +105,7 @@ use std::collections::hash_map::Entry::{Vacant, Occupied}; #[derive(Clone)] enum TokenTreeOrTokenTreeVec { Tt(tokenstream::TokenTree), - TtSeq(Rc>), + TtSeq(Vec), } impl TokenTreeOrTokenTreeVec { @@ -162,7 +162,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize { }) } -pub fn initial_matcher_pos(ms: Rc>, sep: Option, lo: BytePos) +pub fn initial_matcher_pos(ms: Vec, sep: Option, lo: BytePos) -> Box { let match_idx_hi = count_names(&ms[..]); let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect(); @@ -285,7 +285,7 @@ pub fn parse(sess: &ParseSess, mut rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult { - let mut cur_eis = SmallVector::one(initial_matcher_pos(Rc::new(ms.to_owned()), + let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), None, rdr.peek().sp.lo)); From 15a95866b4dd7a64875a8388aee296d319fc497d Mon Sep 17 00:00:00 2001 From: Ulrik Sverdrup Date: Tue, 25 Oct 2016 15:50:52 +0200 Subject: [PATCH 10/17] Special case .fold() for VecDeque's iterators --- src/libcollections/vec_deque.rs | 74 ++++++++++++++++++++++++--------- 1 file changed, 54 insertions(+), 20 deletions(-) diff --git a/src/libcollections/vec_deque.rs b/src/libcollections/vec_deque.rs index cfed647f5d81c..5397193cab40f 100644 --- a/src/libcollections/vec_deque.rs +++ b/src/libcollections/vec_deque.rs @@ -743,16 +743,8 @@ impl VecDeque { #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn as_slices(&self) -> (&[T], &[T]) { unsafe { - let contiguous = self.is_contiguous(); let buf = self.buffer_as_slice(); - if contiguous { - let (empty, buf) = buf.split_at(0); - (&buf[self.tail..self.head], empty) - } else { - let (mid, right) = buf.split_at(self.tail); - let (left, _) = mid.split_at(self.head); - (right, left) - } + RingSlices::ring_slices(buf, self.head, self.tail) } } @@ -780,20 +772,10 @@ impl VecDeque { #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) { unsafe { - let contiguous = self.is_contiguous(); let head = self.head; let tail = self.tail; let buf = self.buffer_as_mut_slice(); - - if contiguous { - let (empty, buf) = buf.split_at_mut(0); - (&mut buf[tail..head], empty) - } else { - let (mid, right) = buf.split_at_mut(tail); - let (left, _) = mid.split_at_mut(head); - - (right, left) - } + RingSlices::ring_slices(buf, head, tail) } } @@ -1829,6 +1811,42 @@ fn wrap_index(index: usize, size: usize) -> usize { index & (size - 1) } +/// Returns the two slices that cover the VecDeque's valid range +trait RingSlices : Sized { + fn slice(self, from: usize, to: usize) -> Self; + fn split_at(self, i: usize) -> (Self, Self); + + fn ring_slices(buf: Self, head: usize, tail: usize) -> (Self, Self) { + let contiguous = tail <= head; + if contiguous { + let (empty, buf) = buf.split_at(0); + (buf.slice(tail, head), empty) + } else { + let (mid, right) = buf.split_at(tail); + let (left, _) = mid.split_at(head); + (right, left) + } + } +} + +impl<'a, T> RingSlices for &'a [T] { + fn slice(self, from: usize, to: usize) -> Self { + &self[from..to] + } + fn split_at(self, i: usize) -> (Self, Self) { + (*self).split_at(i) + } +} + +impl<'a, T> RingSlices for &'a mut [T] { + fn slice(self, from: usize, to: usize) -> Self { + &mut self[from..to] + } + fn split_at(self, i: usize) -> (Self, Self) { + (*self).split_at_mut(i) + } +} + /// Calculate the number of elements left to be read in the buffer #[inline] fn count(tail: usize, head: usize, size: usize) -> usize { @@ -1875,6 +1893,14 @@ impl<'a, T> Iterator for Iter<'a, T> { let len = count(self.tail, self.head, self.ring.len()); (len, Some(len)) } + + fn fold(self, mut accum: Acc, mut f: F) -> Acc + where F: FnMut(Acc, Self::Item) -> Acc, + { + let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); + accum = front.iter().fold(accum, &mut f); + back.iter().fold(accum, &mut f) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1927,6 +1953,14 @@ impl<'a, T> Iterator for IterMut<'a, T> { let len = count(self.tail, self.head, self.ring.len()); (len, Some(len)) } + + fn fold(self, mut accum: Acc, mut f: F) -> Acc + where F: FnMut(Acc, Self::Item) -> Acc, + { + let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); + accum = front.iter_mut().fold(accum, &mut f); + back.iter_mut().fold(accum, &mut f) + } } #[stable(feature = "rust1", since = "1.0.0")] From 780acda325772b15f12f08f60ca2d4ba558cee51 Mon Sep 17 00:00:00 2001 From: Ulrik Sverdrup Date: Tue, 25 Oct 2016 15:50:52 +0200 Subject: [PATCH 11/17] iter: Implement .fold() for .cloned() and .map() Implement .fold() specifically for .map() and .cloned() so that any inner fold improvements are available through map and cloned. --- src/libcore/iter/mod.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/libcore/iter/mod.rs b/src/libcore/iter/mod.rs index 9eeb2608071c2..2c3b8864a115e 100644 --- a/src/libcore/iter/mod.rs +++ b/src/libcore/iter/mod.rs @@ -399,6 +399,12 @@ impl<'a, I, T: 'a> Iterator for Cloned fn size_hint(&self) -> (usize, Option) { self.it.size_hint() } + + fn fold(self, init: Acc, mut f: F) -> Acc + where F: FnMut(Acc, Self::Item) -> Acc, + { + self.it.fold(init, move |acc, elt| f(acc, elt.clone())) + } } #[stable(feature = "iter_cloned", since = "1.1.0")] @@ -939,6 +945,13 @@ impl Iterator for Map where F: FnMut(I::Item) -> B { fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } + + fn fold(self, init: Acc, mut g: G) -> Acc + where G: FnMut(Acc, Self::Item) -> Acc, + { + let mut f = self.f; + self.iter.fold(init, move |acc, elt| g(acc, f(elt))) + } } #[stable(feature = "rust1", since = "1.0.0")] From b3e8c4c2be593562085fca03a973fb2d917e5184 Mon Sep 17 00:00:00 2001 From: Peter Atashian Date: Tue, 25 Oct 2016 10:00:16 -0400 Subject: [PATCH 12/17] Print out the error when HeapFree failures do occur --- src/liballoc_system/lib.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs index b380ba180f40e..a4fabb5a2c96d 100644 --- a/src/liballoc_system/lib.rs +++ b/src/liballoc_system/lib.rs @@ -166,6 +166,7 @@ mod imp { fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T) -> LPVOID; fn HeapReAlloc(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID, dwBytes: SIZE_T) -> LPVOID; fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID) -> BOOL; + fn GetLastError() -> DWORD; } #[repr(C)] @@ -230,11 +231,11 @@ mod imp { pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, align: usize) { if align <= MIN_ALIGN { let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID); - debug_assert!(err != 0); + debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError()); } else { let header = get_header(ptr); let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID); - debug_assert!(err != 0); + debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError()); } } From 892a05d694960c30a5568b173a25a4ed9f56c283 Mon Sep 17 00:00:00 2001 From: Srinivas Reddy Thatiparthy Date: Sat, 22 Oct 2016 18:44:32 +0530 Subject: [PATCH 13/17] run rustfmt on librustc_metadata folder --- src/librustc_metadata/encoder.rs | 318 ++++++++++++------------- src/librustc_metadata/index.rs | 16 +- src/librustc_metadata/index_builder.rs | 10 +- src/librustc_metadata/lib.rs | 6 +- src/librustc_metadata/locator.rs | 289 ++++++++++++---------- src/librustc_metadata/schema.rs | 66 +++-- 6 files changed, 363 insertions(+), 342 deletions(-) diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 21630dde5f52b..8d2646560f595 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -117,7 +117,8 @@ impl<'a, 'tcx> SpecializedEncoder> for EncodeContext<'a, 'tcx> { } impl<'a, 'tcx> SpecializedEncoder> for EncodeContext<'a, 'tcx> { - fn specialized_encode(&mut self, predicates: &ty::GenericPredicates<'tcx>) + fn specialized_encode(&mut self, + predicates: &ty::GenericPredicates<'tcx>) -> Result<(), Self::Error> { predicates.parent.encode(self)?; predicates.predicates.len().encode(self)?; @@ -142,13 +143,13 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { r } - fn emit_lazy_distance(&mut self, position: usize, min_size: usize) + fn emit_lazy_distance(&mut self, + position: usize, + min_size: usize) -> Result<(), ::Error> { let min_end = position + min_size; let distance = match self.lazy_state { - LazyState::NoNode => { - bug!("emit_lazy_distance: outside of a metadata node") - } + LazyState::NoNode => bug!("emit_lazy_distance: outside of a metadata node"), LazyState::NodeStart(start) => { assert!(min_end <= start); start - min_end @@ -172,7 +173,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } fn lazy_seq(&mut self, iter: I) -> LazySeq - where I: IntoIterator, T: Encodable { + where I: IntoIterator, + T: Encodable + { self.emit_node(|ecx, pos| { let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count(); @@ -182,7 +185,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } fn lazy_seq_ref<'b, I, T>(&mut self, iter: I) -> LazySeq - where I: IntoIterator, T: 'b + Encodable { + where I: IntoIterator, + T: 'b + Encodable + { self.emit_node(|ecx, pos| { let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count(); @@ -192,11 +197,15 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } /// Encode the given value or a previously cached shorthand. - fn encode_with_shorthand(&mut self, value: &T, variant: &U, map: M) + fn encode_with_shorthand(&mut self, + value: &T, + variant: &U, + map: M) -> Result<(), ::Error> - where M: for<'b> Fn(&'b mut Self) -> &'b mut FnvHashMap, - T: Clone + Eq + Hash, - U: Encodable { + where M: for<'b> Fn(&'b mut Self) -> &'b mut FnvHashMap, + T: Clone + Eq + Hash, + U: Encodable + { let existing_shorthand = map(self).get(value).cloned(); if let Some(shorthand) = existing_shorthand { return self.emit_usize(shorthand); @@ -208,9 +217,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { // The shorthand encoding uses the same usize as the // discriminant, with an offset so they can't conflict. - let discriminant = unsafe { - intrinsics::discriminant_value(variant) - }; + let discriminant = unsafe { intrinsics::discriminant_value(variant) }; assert!(discriminant < SHORTHAND_OFFSET as u64); let shorthand = start + SHORTHAND_OFFSET; @@ -250,8 +257,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { /// the right to access any information in the adt-def (including, /// e.g., the length of the various vectors). fn encode_enum_variant_info(&mut self, - (enum_did, Untracked(index)): - (DefId, Untracked)) -> Entry<'tcx> { + (enum_did, Untracked(index)): (DefId, Untracked)) + -> Entry<'tcx> { let tcx = self.tcx; let def = tcx.lookup_adt_def(enum_did); let variant = &def.variants[index]; @@ -260,7 +267,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let data = VariantData { ctor_kind: variant.ctor_kind, disr: variant.disr_val.to_u64_unchecked(), - struct_ctor: None + struct_ctor: None, }; let enum_id = tcx.map.as_local_node_id(enum_did).unwrap(); @@ -285,24 +292,23 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { predicates: Some(self.encode_predicates(def_id)), ast: None, - mir: None + mir: None, } } fn encode_info_for_mod(&mut self, - FromId(id, (md, attrs, vis)): - FromId<(&hir::Mod, &[ast::Attribute], &hir::Visibility)>) + FromId(id, (md, attrs, vis)): FromId<(&hir::Mod, + &[ast::Attribute], + &hir::Visibility)>) -> Entry<'tcx> { let tcx = self.tcx; let def_id = tcx.map.local_def_id(id); let data = ModData { reexports: match self.reexports.get(&id) { - Some(exports) if *vis == hir::Public => { - self.lazy_seq_ref(exports) - } - _ => LazySeq::empty() - } + Some(exports) if *vis == hir::Public => self.lazy_seq_ref(exports), + _ => LazySeq::empty(), + }, }; Entry { @@ -353,8 +359,7 @@ impl Visibility for ty::Visibility { } impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { - fn encode_fields(&mut self, - adt_def_id: DefId) { + fn encode_fields(&mut self, adt_def_id: DefId) { let def = self.tcx.lookup_adt_def(adt_def_id); for (variant_index, variant) in def.variants.iter().enumerate() { for (field_index, field) in variant.fields.iter().enumerate() { @@ -374,8 +379,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { /// the adt-def (including, e.g., the length of the various /// vectors). fn encode_field(&mut self, - (adt_def_id, Untracked((variant_index, field_index))): - (DefId, Untracked<(usize, usize)>)) -> Entry<'tcx> { + (adt_def_id, Untracked((variant_index, field_index))): (DefId, + Untracked<(usize, + usize)>)) + -> Entry<'tcx> { let tcx = self.tcx; let variant = &tcx.lookup_adt_def(adt_def_id).variants[variant_index]; let field = &variant.fields[field_index]; @@ -400,19 +407,18 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { predicates: Some(self.encode_predicates(def_id)), ast: None, - mir: None + mir: None, } } - fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) - -> Entry<'tcx> { + fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) -> Entry<'tcx> { let tcx = self.tcx; let variant = tcx.lookup_adt_def(adt_def_id).struct_variant(); let data = VariantData { ctor_kind: variant.ctor_kind, disr: variant.disr_val.to_u64_unchecked(), - struct_ctor: Some(def_id.index) + struct_ctor: Some(def_id.index), }; let struct_id = tcx.map.as_local_node_id(adt_def_id).unwrap(); @@ -434,7 +440,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { predicates: Some(self.encode_predicates(def_id)), ast: None, - mir: None + mir: None, } } @@ -469,7 +475,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let fn_data = if let hir::MethodTraitItem(ref sig, _) = ast_item.node { FnData { constness: hir::Constness::NotConst, - arg_names: self.encode_fn_arg_names(&sig.decl) + arg_names: self.encode_fn_arg_names(&sig.decl), } } else { bug!() @@ -477,13 +483,11 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let data = MethodData { fn_data: fn_data, container: container(method_ty.has_body), - explicit_self: self.lazy(&method_ty.explicit_self) + explicit_self: self.lazy(&method_ty.explicit_self), }; EntryKind::Method(self.lazy(&data)) } - ty::TypeTraitItem(_) => { - EntryKind::AssociatedType(container(false)) - } + ty::TypeTraitItem(_) => EntryKind::AssociatedType(container(false)), }; Entry { @@ -497,9 +501,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { ty: match trait_item { ty::ConstTraitItem(_) | - ty::MethodTraitItem(_) => { - Some(self.encode_item_type(def_id)) - } + ty::MethodTraitItem(_) => Some(self.encode_item_type(def_id)), ty::TypeTraitItem(ref associated_type) => { associated_type.ty.map(|ty| self.lazy(&ty)) } @@ -515,7 +517,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } else { None }, - mir: self.encode_mir(def_id) + mir: self.encode_mir(def_id), } } @@ -527,18 +529,16 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let container = match ast_item.defaultness { hir::Defaultness::Default => AssociatedContainer::ImplDefault, - hir::Defaultness::Final => AssociatedContainer::ImplFinal + hir::Defaultness::Final => AssociatedContainer::ImplFinal, }; let kind = match impl_item { - ty::ConstTraitItem(_) => { - EntryKind::AssociatedConst(container) - } + ty::ConstTraitItem(_) => EntryKind::AssociatedConst(container), ty::MethodTraitItem(ref method_ty) => { let fn_data = if let hir::ImplItemKind::Method(ref sig, _) = ast_item.node { FnData { constness: sig.constness, - arg_names: self.encode_fn_arg_names(&sig.decl) + arg_names: self.encode_fn_arg_names(&sig.decl), } } else { bug!() @@ -546,13 +546,11 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let data = MethodData { fn_data: fn_data, container: container, - explicit_self: self.lazy(&method_ty.explicit_self) + explicit_self: self.lazy(&method_ty.explicit_self), }; EntryKind::Method(self.lazy(&data)) } - ty::TypeTraitItem(_) => { - EntryKind::AssociatedType(container) - } + ty::TypeTraitItem(_) => EntryKind::AssociatedType(container), }; let (ast, mir) = if let ty::ConstTraitItem(_) = impl_item { @@ -578,9 +576,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { ty: match impl_item { ty::ConstTraitItem(_) | - ty::MethodTraitItem(_) => { - Some(self.encode_item_type(def_id)) - } + ty::MethodTraitItem(_) => Some(self.encode_item_type(def_id)), ty::TypeTraitItem(ref associated_type) => { associated_type.ty.map(|ty| self.lazy(&ty)) } @@ -595,11 +591,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } else { None }, - mir: if mir { - self.encode_mir(def_id) - } else { - None - } + mir: if mir { self.encode_mir(def_id) } else { None }, } } @@ -638,8 +630,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(&depr)) } - fn encode_info_for_item(&mut self, - (def_id, item): (DefId, &hir::Item)) -> Entry<'tcx> { + fn encode_info_for_item(&mut self, (def_id, item): (DefId, &hir::Item)) -> Entry<'tcx> { let tcx = self.tcx; debug!("encoding info for item at {}", @@ -652,7 +643,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { hir::ItemFn(ref decl, _, constness, ..) => { let data = FnData { constness: constness, - arg_names: self.encode_fn_arg_names(&decl) + arg_names: self.encode_fn_arg_names(&decl), }; EntryKind::Fn(self.lazy(&data)) @@ -666,9 +657,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { hir::ItemStruct(ref struct_def, _) => { let variant = tcx.lookup_adt_def(def_id).struct_variant(); - /* Encode def_ids for each field and method - for methods, write all the stuff get_trait_method - needs to know*/ + // Encode def_ids for each field and method + // for methods, write all the stuff get_trait_method + // needs to know let struct_ctor = if !struct_def.is_struct() { Some(tcx.map.local_def_id(struct_def.id()).index) } else { @@ -677,7 +668,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { EntryKind::Struct(self.lazy(&VariantData { ctor_kind: variant.ctor_kind, disr: variant.disr_val.to_u64_unchecked(), - struct_ctor: struct_ctor + struct_ctor: struct_ctor, })) } hir::ItemUnion(..) => { @@ -686,7 +677,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { EntryKind::Union(self.lazy(&VariantData { ctor_kind: variant.ctor_kind, disr: variant.disr_val.to_u64_unchecked(), - struct_ctor: None + struct_ctor: None, })) } hir::ItemDefaultImpl(..) => { @@ -694,7 +685,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { polarity: hir::ImplPolarity::Positive, parent_impl: None, coerce_unsized_kind: None, - trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref)) + trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref)), }; EntryKind::DefaultImpl(self.lazy(&data)) @@ -716,9 +707,11 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let data = ImplData { polarity: polarity, parent_impl: parent, - coerce_unsized_kind: tcx.custom_coerce_unsized_kinds.borrow() - .get(&def_id).cloned(), - trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)) + coerce_unsized_kind: tcx.custom_coerce_unsized_kinds + .borrow() + .get(&def_id) + .cloned(), + trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)), }; EntryKind::Impl(self.lazy(&data)) @@ -730,14 +723,13 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { paren_sugar: trait_def.paren_sugar, has_default_impl: tcx.trait_has_default_impl(def_id), trait_ref: self.lazy(&trait_def.trait_ref), - super_predicates: self.lazy(&tcx.lookup_super_predicates(def_id)) + super_predicates: self.lazy(&tcx.lookup_super_predicates(def_id)), }; EntryKind::Trait(self.lazy(&data)) } - hir::ItemExternCrate(_) | hir::ItemUse(_) => { - bug!("cannot encode info for item {:?}", item) - } + hir::ItemExternCrate(_) | + hir::ItemUse(_) => bug!("cannot encode info for item {:?}", item), }; Entry { @@ -747,9 +739,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { attributes: self.encode_attributes(&item.attrs), children: match item.node { hir::ItemForeignMod(ref fm) => { - self.lazy_seq(fm.items.iter().map(|foreign_item| { - tcx.map.local_def_id(foreign_item.id).index - })) + self.lazy_seq(fm.items + .iter() + .map(|foreign_item| tcx.map.local_def_id(foreign_item.id).index)) } hir::ItemEnum(..) => { let def = self.tcx.lookup_adt_def(def_id); @@ -773,7 +765,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { def_id.index })) } - _ => LazySeq::empty() + _ => LazySeq::empty(), }, stability: self.encode_stability(def_id), deprecation: self.encode_deprecation(def_id), @@ -786,20 +778,16 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemUnion(..) | - hir::ItemImpl(..) => { - Some(self.encode_item_type(def_id)) - } - _ => None + hir::ItemImpl(..) => Some(self.encode_item_type(def_id)), + _ => None, }, inherent_impls: self.encode_inherent_implementations(def_id), variances: match item.node { hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemUnion(..) | - hir::ItemTrait(..) => { - self.encode_item_variances(def_id) - } - _ => LazySeq::empty() + hir::ItemTrait(..) => self.encode_item_variances(def_id), + _ => LazySeq::empty(), }, generics: match item.node { hir::ItemStatic(..) | @@ -810,10 +798,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { hir::ItemStruct(..) | hir::ItemUnion(..) | hir::ItemImpl(..) | - hir::ItemTrait(..) => { - Some(self.encode_generics(def_id)) - } - _ => None + hir::ItemTrait(..) => Some(self.encode_generics(def_id)), + _ => None, }, predicates: match item.node { hir::ItemStatic(..) | @@ -824,10 +810,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { hir::ItemStruct(..) | hir::ItemUnion(..) | hir::ItemImpl(..) | - hir::ItemTrait(..) => { - Some(self.encode_predicates(def_id)) - } - _ => None + hir::ItemTrait(..) => Some(self.encode_predicates(def_id)), + _ => None, }, ast: match item.node { @@ -835,12 +819,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { hir::ItemFn(_, _, hir::Constness::Const, ..) => { Some(self.encode_inlined_item(InlinedItemRef::Item(def_id, item))) } - _ => None + _ => None, }, mir: match item.node { - hir::ItemConst(..) => { - self.encode_mir(def_id) - } + hir::ItemConst(..) => self.encode_mir(def_id), hir::ItemFn(_, _, constness, _, ref generics, _) => { let tps_len = generics.ty_params.len(); let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs); @@ -850,8 +832,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { None } } - _ => None - } + _ => None, + }, } } } @@ -861,8 +843,7 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { /// encode some sub-items. Usually we want some info from the item /// so it's easier to do that here then to wait until we would encounter /// normally in the visitor walk. - fn encode_addl_info_for_item(&mut self, - item: &hir::Item) { + fn encode_addl_info_for_item(&mut self, item: &hir::Item) { let def_id = self.tcx.map.local_def_id(item.id); match item.node { hir::ItemStatic(..) | @@ -930,12 +911,12 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { hir::ForeignItemFn(ref fndecl, _) => { let data = FnData { constness: hir::Constness::NotConst, - arg_names: self.encode_fn_arg_names(&fndecl) + arg_names: self.encode_fn_arg_names(&fndecl), }; EntryKind::ForeignFn(self.lazy(&data)) } hir::ForeignItemStatic(_, true) => EntryKind::ForeignMutStatic, - hir::ForeignItemStatic(_, false) => EntryKind::ForeignImmStatic + hir::ForeignItemStatic(_, false) => EntryKind::ForeignImmStatic, }; Entry { @@ -954,7 +935,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { predicates: Some(self.encode_predicates(def_id)), ast: None, - mir: None + mir: None, } } } @@ -972,10 +953,9 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for EncodeVisitor<'a, 'b, 'tcx> { intravisit::walk_item(self, item); let def_id = self.index.tcx.map.local_def_id(item.id); match item.node { - hir::ItemExternCrate(_) | hir::ItemUse(_) => (), // ignore these - _ => self.index.record(def_id, - EncodeContext::encode_info_for_item, - (def_id, item)), + hir::ItemExternCrate(_) | + hir::ItemUse(_) => (), // ignore these + _ => self.index.record(def_id, EncodeContext::encode_info_for_item, (def_id, item)), } self.index.encode_addl_info_for_item(item); } @@ -996,9 +976,7 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { fn encode_info_for_ty(&mut self, ty: &hir::Ty) { if let hir::TyImplTrait(_) = ty.node { let def_id = self.tcx.map.local_def_id(ty.id); - self.record(def_id, - EncodeContext::encode_info_for_anon_ty, - def_id); + self.record(def_id, EncodeContext::encode_info_for_anon_ty, def_id); } } @@ -1006,11 +984,9 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { match expr.node { hir::ExprClosure(..) => { let def_id = self.tcx.map.local_def_id(expr.id); - self.record(def_id, - EncodeContext::encode_info_for_closure, - def_id); + self.record(def_id, EncodeContext::encode_info_for_closure, def_id); } - _ => { } + _ => {} } } } @@ -1033,7 +1009,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { predicates: Some(self.encode_predicates(def_id)), ast: None, - mir: None + mir: None, } } @@ -1042,7 +1018,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let data = ClosureData { kind: tcx.closure_kind(def_id), - ty: self.lazy(&tcx.tables.borrow().closure_tys[&def_id]) + ty: self.lazy(&tcx.tables.borrow().closure_tys[&def_id]), }; Entry { @@ -1061,7 +1037,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { predicates: None, ast: None, - mir: self.encode_mir(def_id) + mir: self.encode_mir(def_id), } } @@ -1071,9 +1047,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { index.record(DefId::local(CRATE_DEF_INDEX), EncodeContext::encode_info_for_mod, FromId(CRATE_NODE_ID, (&krate.module, &krate.attrs, &hir::Public))); - let mut visitor = EncodeVisitor { - index: index, - }; + let mut visitor = EncodeVisitor { index: index }; krate.visit_all_items(&mut visitor); visitor.index.into_items() } @@ -1083,8 +1057,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } fn encode_crate_deps(&mut self) -> LazySeq { - fn get_ordered_deps(cstore: &cstore::CStore) - -> Vec<(CrateNum, Rc)> { + fn get_ordered_deps(cstore: &cstore::CStore) -> Vec<(CrateNum, Rc)> { // Pull the cnums and name,vers,hash out of cstore let mut deps = Vec::new(); cstore.iter_crate_data(|cnum, val| { @@ -1113,13 +1086,12 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { CrateDep { name: syntax::parse::token::intern(dep.name()), hash: dep.hash(), - explicitly_linked: dep.explicitly_linked.get() + explicitly_linked: dep.explicitly_linked.get(), } })) } - fn encode_lang_items(&mut self) - -> (LazySeq<(DefIndex, usize)>, LazySeq) { + fn encode_lang_items(&mut self) -> (LazySeq<(DefIndex, usize)>, LazySeq) { let tcx = self.tcx; let lang_items = tcx.lang_items.items().iter(); (self.lazy_seq(lang_items.enumerate().filter_map(|(i, &opt_def_id)| { @@ -1129,7 +1101,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } } None - })), self.lazy_seq_ref(&tcx.lang_items.missing)) + })), + self.lazy_seq_ref(&tcx.lang_items.missing)) } fn encode_native_libraries(&mut self) -> LazySeq<(NativeLibraryKind, String)> { @@ -1137,9 +1110,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.lazy_seq(used_libraries.into_iter().filter_map(|(lib, kind)| { match kind { cstore::NativeStatic => None, // these libraries are not propagated - cstore::NativeFramework | cstore::NativeUnknown => { - Some((kind, lib)) - } + cstore::NativeFramework | cstore::NativeUnknown => Some((kind, lib)), } })) } @@ -1147,13 +1118,15 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { fn encode_codemap(&mut self) -> LazySeq { let codemap = self.tcx.sess.codemap(); let all_filemaps = codemap.files.borrow(); - self.lazy_seq_ref(all_filemaps.iter().filter(|filemap| { - // No need to export empty filemaps, as they can't contain spans - // that need translation. - // Also no need to re-export imported filemaps, as any downstream - // crate will import them from their original source. - !filemap.lines.borrow().is_empty() && !filemap.is_imported() - }).map(|filemap| &**filemap)) + self.lazy_seq_ref(all_filemaps.iter() + .filter(|filemap| { + // No need to export empty filemaps, as they can't contain spans + // that need translation. + // Also no need to re-export imported filemaps, as any downstream + // crate will import them from their original source. + !filemap.lines.borrow().is_empty() && !filemap.is_imported() + }) + .map(|filemap| &**filemap)) } /// Serialize the text of the exported macros @@ -1164,15 +1137,15 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { name: def.name, attrs: def.attrs.to_vec(), span: def.span, - body: ::syntax::print::pprust::tts_to_string(&def.body) + body: ::syntax::print::pprust::tts_to_string(&def.body), } })) } } -struct ImplVisitor<'a, 'tcx:'a> { +struct ImplVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, - impls: FnvHashMap> + impls: FnvHashMap>, } impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> { @@ -1180,7 +1153,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> { if let hir::ItemImpl(..) = item.node { let impl_id = self.tcx.map.local_def_id(item.id); if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_id) { - self.impls.entry(trait_ref.def_id) + self.impls + .entry(trait_ref.def_id) .or_insert(vec![]) .push(impl_id.index); } @@ -1193,16 +1167,19 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { fn encode_impls(&mut self) -> LazySeq { let mut visitor = ImplVisitor { tcx: self.tcx, - impls: FnvHashMap() + impls: FnvHashMap(), }; self.tcx.map.krate().visit_all_items(&mut visitor); - let all_impls: Vec<_> = visitor.impls.into_iter().map(|(trait_def_id, impls)| { - TraitImpls { - trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index), - impls: self.lazy_seq(impls) - } - }).collect(); + let all_impls: Vec<_> = visitor.impls + .into_iter() + .map(|(trait_def_id, impls)| { + TraitImpls { + trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index), + impls: self.lazy_seq(impls), + } + }) + .collect(); self.lazy_seq(all_impls) } @@ -1232,7 +1209,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } })) } - None => LazySeq::empty() + None => LazySeq::empty(), } } @@ -1291,9 +1268,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { hash: link_meta.crate_hash, disambiguator: tcx.sess.local_crate_disambiguator().to_string(), panic_strategy: tcx.sess.panic_strategy(), - plugin_registrar_fn: tcx.sess.plugin_registrar_fn.get().map(|id| { - tcx.map.local_def_id(id).index - }), + plugin_registrar_fn: tcx.sess + .plugin_registrar_fn + .get() + .map(|id| tcx.map.local_def_id(id).index), macro_derive_registrar: if is_proc_macro { let id = tcx.sess.derive_registrar_fn.get().unwrap(); Some(tcx.map.local_def_id(id).index) @@ -1369,7 +1347,8 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, reexports: &def::ExportMap, link_meta: &LinkMeta, reachable: &NodeSet, - mir_map: &MirMap<'tcx>) -> Vec { + mir_map: &MirMap<'tcx>) + -> Vec { let mut cursor = Cursor::new(vec![]); cursor.write_all(METADATA_HEADER).unwrap(); @@ -1377,17 +1356,18 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cursor.write_all(&[0, 0, 0, 0]).unwrap(); let root = EncodeContext { - opaque: opaque::Encoder::new(&mut cursor), - tcx: tcx, - reexports: reexports, - link_meta: link_meta, - cstore: cstore, - reachable: reachable, - mir_map: mir_map, - lazy_state: LazyState::NoNode, - type_shorthands: Default::default(), - predicate_shorthands: Default::default() - }.encode_crate_root(); + opaque: opaque::Encoder::new(&mut cursor), + tcx: tcx, + reexports: reexports, + link_meta: link_meta, + cstore: cstore, + reachable: reachable, + mir_map: mir_map, + lazy_state: LazyState::NoNode, + type_shorthands: Default::default(), + predicate_shorthands: Default::default(), + } + .encode_crate_root(); let mut result = cursor.into_inner(); // Encode the root position. @@ -1395,8 +1375,8 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let pos = root.position; result[header + 0] = (pos >> 24) as u8; result[header + 1] = (pos >> 16) as u8; - result[header + 2] = (pos >> 8) as u8; - result[header + 3] = (pos >> 0) as u8; + result[header + 2] = (pos >> 8) as u8; + result[header + 3] = (pos >> 0) as u8; result } diff --git a/src/librustc_metadata/index.rs b/src/librustc_metadata/index.rs index ef83251f51e8d..53e6988c756c9 100644 --- a/src/librustc_metadata/index.rs +++ b/src/librustc_metadata/index.rs @@ -28,9 +28,7 @@ pub struct Index { impl Index { pub fn new(max_index: usize) -> Index { - Index { - positions: vec![u32::MAX; max_index] - } + Index { positions: vec![u32::MAX; max_index] } } pub fn record(&mut self, def_id: DefId, entry: Lazy) { @@ -46,7 +44,9 @@ impl Index { assert!(self.positions[item] == u32::MAX, "recorded position for item {:?} twice, first at {:?} and now at {:?}", - item, self.positions[item], position); + item, + self.positions[item], + position); self.positions[item] = position.to_le(); } @@ -67,7 +67,8 @@ impl<'tcx> LazySeq { let index = def_index.as_usize(); debug!("Index::lookup: index={:?} words.len={:?}", - index, words.len()); + index, + words.len()); let position = u32::from_le(words[index]); if position == u32::MAX { @@ -79,8 +80,9 @@ impl<'tcx> LazySeq { } } - pub fn iter_enumerated<'a>(&self, bytes: &'a [u8]) - -> impl Iterator>)> + 'a { + pub fn iter_enumerated<'a>(&self, + bytes: &'a [u8]) + -> impl Iterator>)> + 'a { let words = &bytes_to_words(&bytes[self.position..])[..self.len]; words.iter().enumerate().filter_map(|(index, &position)| { if position == u32::MAX { diff --git a/src/librustc_metadata/index_builder.rs b/src/librustc_metadata/index_builder.rs index aeb6f63252c6e..9938e20d1861d 100644 --- a/src/librustc_metadata/index_builder.rs +++ b/src/librustc_metadata/index_builder.rs @@ -138,11 +138,11 @@ pub trait DepGraphRead { } impl DepGraphRead for DefId { - fn read(&self, _tcx: TyCtxt) { } + fn read(&self, _tcx: TyCtxt) {} } impl DepGraphRead for ast::NodeId { - fn read(&self, _tcx: TyCtxt) { } + fn read(&self, _tcx: TyCtxt) {} } impl DepGraphRead for Option @@ -179,8 +179,8 @@ macro_rules! read_tuple { } } } -read_tuple!(A,B); -read_tuple!(A,B,C); +read_tuple!(A, B); +read_tuple!(A, B, C); macro_rules! read_hir { ($t:ty) => { @@ -208,7 +208,7 @@ read_hir!(hir::ForeignItem); pub struct Untracked(pub T); impl DepGraphRead for Untracked { - fn read(&self, _tcx: TyCtxt) { } + fn read(&self, _tcx: TyCtxt) {} } /// Newtype that can be used to package up misc data extracted from a diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index 300c5f0dec71d..ef81dbd7f29ec 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -30,8 +30,10 @@ #![feature(specialization)] #![feature(staged_api)] -#[macro_use] extern crate log; -#[macro_use] extern crate syntax; +#[macro_use] +extern crate log; +#[macro_use] +extern crate syntax; extern crate syntax_pos; extern crate flate; extern crate serialize as rustc_serialize; // used by deriving diff --git a/src/librustc_metadata/locator.rs b/src/librustc_metadata/locator.rs index e684cd16366ec..0461d7ec061d4 100644 --- a/src/librustc_metadata/locator.rs +++ b/src/librustc_metadata/locator.rs @@ -273,7 +273,7 @@ pub struct ArchiveMetadata { pub struct CratePaths { pub ident: String, pub dylib: Option, - pub rlib: Option + pub rlib: Option, } pub const METADATA_FILENAME: &'static str = "rust.metadata.bin"; @@ -281,14 +281,14 @@ pub const METADATA_FILENAME: &'static str = "rust.metadata.bin"; #[derive(Copy, Clone, PartialEq)] enum CrateFlavor { Rlib, - Dylib + Dylib, } impl fmt::Display for CrateFlavor { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(match *self { CrateFlavor::Rlib => "rlib", - CrateFlavor::Dylib => "dylib" + CrateFlavor::Dylib => "dylib", }) } } @@ -296,10 +296,10 @@ impl fmt::Display for CrateFlavor { impl CratePaths { fn paths(&self) -> Vec { match (&self.dylib, &self.rlib) { - (&None, &None) => vec!(), + (&None, &None) => vec![], (&Some(ref p), &None) | - (&None, &Some(ref p)) => vec!(p.clone()), - (&Some(ref p1), &Some(ref p2)) => vec!(p1.clone(), p2.clone()), + (&None, &Some(ref p)) => vec![p.clone()], + (&Some(ref p1), &Some(ref p2)) => vec![p1.clone(), p2.clone()], } } } @@ -316,53 +316,72 @@ impl<'a> Context<'a> { pub fn report_errs(&mut self) -> ! { let add = match self.root { &None => String::new(), - &Some(ref r) => format!(" which `{}` depends on", - r.ident) + &Some(ref r) => format!(" which `{}` depends on", r.ident), }; let mut err = if !self.rejected_via_hash.is_empty() { - struct_span_err!(self.sess, self.span, E0460, + struct_span_err!(self.sess, + self.span, + E0460, "found possibly newer version of crate `{}`{}", - self.ident, add) + self.ident, + add) } else if !self.rejected_via_triple.is_empty() { - struct_span_err!(self.sess, self.span, E0461, + struct_span_err!(self.sess, + self.span, + E0461, "couldn't find crate `{}` with expected target triple {}{}", - self.ident, self.triple, add) + self.ident, + self.triple, + add) } else if !self.rejected_via_kind.is_empty() { - struct_span_err!(self.sess, self.span, E0462, + struct_span_err!(self.sess, + self.span, + E0462, "found staticlib `{}` instead of rlib or dylib{}", - self.ident, add) + self.ident, + add) } else if !self.rejected_via_version.is_empty() { - struct_span_err!(self.sess, self.span, E0514, + struct_span_err!(self.sess, + self.span, + E0514, "found crate `{}` compiled by an incompatible version of rustc{}", - self.ident, add) + self.ident, + add) } else { - let mut err = struct_span_err!(self.sess, self.span, E0463, + let mut err = struct_span_err!(self.sess, + self.span, + E0463, "can't find crate for `{}`{}", - self.ident, add); + self.ident, + add); err.span_label(self.span, &format!("can't find crate")); err }; if !self.rejected_via_triple.is_empty() { let mismatches = self.rejected_via_triple.iter(); - for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() { + for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() { err.note(&format!("crate `{}`, path #{}, triple {}: {}", - self.ident, i+1, got, path.display())); + self.ident, + i + 1, + got, + path.display())); } } if !self.rejected_via_hash.is_empty() { err.note("perhaps that crate needs to be recompiled?"); let mismatches = self.rejected_via_hash.iter(); - for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() { - err.note(&format!("crate `{}` path #{}: {}", - self.ident, i+1, path.display())); + for (i, &CrateMismatch { ref path, .. }) in mismatches.enumerate() { + err.note(&format!("crate `{}` path #{}: {}", self.ident, i + 1, path.display())); } match self.root { &None => {} &Some(ref r) => { for (i, path) in r.paths().iter().enumerate() { err.note(&format!("crate `{}` path #{}: {}", - r.ident, i+1, path.display())); + r.ident, + i + 1, + path.display())); } } } @@ -371,8 +390,7 @@ impl<'a> Context<'a> { err.help("please recompile that crate using --crate-type lib"); let mismatches = self.rejected_via_kind.iter(); for (i, &CrateMismatch { ref path, .. }) in mismatches.enumerate() { - err.note(&format!("crate `{}` path #{}: {}", - self.ident, i+1, path.display())); + err.note(&format!("crate `{}` path #{}: {}", self.ident, i + 1, path.display())); } } if !self.rejected_via_version.is_empty() { @@ -381,7 +399,10 @@ impl<'a> Context<'a> { let mismatches = self.rejected_via_version.iter(); for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() { err.note(&format!("crate `{}` path #{}: {} compiled by {:?}", - self.ident, i+1, path.display(), got)); + self.ident, + i + 1, + path.display(), + got)); } } @@ -410,7 +431,7 @@ impl<'a> Context<'a> { let staticlib_prefix = format!("{}{}", staticpair.0, self.crate_name); let mut candidates = FnvHashMap(); - let mut staticlibs = vec!(); + let mut staticlibs = vec![]; // First, find all possible candidate rlibs and dylibs purely based on // the name of the files themselves. We're trying to match against an @@ -430,38 +451,36 @@ impl<'a> Context<'a> { None => return FileDoesntMatch, Some(file) => file, }; - let (hash, rlib) = if file.starts_with(&rlib_prefix[..]) && - file.ends_with(".rlib") { - (&file[(rlib_prefix.len()) .. (file.len() - ".rlib".len())], - true) + let (hash, rlib) = if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rlib") { + (&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], true) } else if file.starts_with(&dylib_prefix) && - file.ends_with(&dypair.1) { - (&file[(dylib_prefix.len()) .. (file.len() - dypair.1.len())], - false) + file.ends_with(&dypair.1) { + (&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], false) } else { - if file.starts_with(&staticlib_prefix[..]) && - file.ends_with(&staticpair.1) { + if file.starts_with(&staticlib_prefix[..]) && file.ends_with(&staticpair.1) { staticlibs.push(CrateMismatch { path: path.to_path_buf(), - got: "static".to_string() + got: "static".to_string(), }); } - return FileDoesntMatch + return FileDoesntMatch; }; info!("lib candidate: {}", path.display()); let hash_str = hash.to_string(); let slot = candidates.entry(hash_str) - .or_insert_with(|| (FnvHashMap(), FnvHashMap())); + .or_insert_with(|| (FnvHashMap(), FnvHashMap())); let (ref mut rlibs, ref mut dylibs) = *slot; - fs::canonicalize(path).map(|p| { - if rlib { - rlibs.insert(p, kind); - } else { - dylibs.insert(p, kind); - } - FileMatches - }).unwrap_or(FileDoesntMatch) + fs::canonicalize(path) + .map(|p| { + if rlib { + rlibs.insert(p, kind); + } else { + dylibs.insert(p, kind); + } + FileMatches + }) + .unwrap_or(FileDoesntMatch) }); self.rejected_via_kind.extend(staticlibs); @@ -479,11 +498,12 @@ impl<'a> Context<'a> { let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot); if let Some((h, m)) = slot { - libraries.insert(h, Library { - dylib: dylib, - rlib: rlib, - metadata: m, - }); + libraries.insert(h, + Library { + dylib: dylib, + rlib: rlib, + metadata: m, + }); } } @@ -494,7 +514,9 @@ impl<'a> Context<'a> { 0 => None, 1 => Some(libraries.into_iter().next().unwrap().1), _ => { - let mut err = struct_span_err!(self.sess, self.span, E0464, + let mut err = struct_span_err!(self.sess, + self.span, + E0464, "multiple matching crates for `{}`", self.crate_name); err.note("candidates:"); @@ -521,8 +543,11 @@ impl<'a> Context<'a> { // read the metadata from it if `*slot` is `None`. If the metadata couldn't // be read, it is assumed that the file isn't a valid rust library (no // errors are emitted). - fn extract_one(&mut self, m: FnvHashMap, flavor: CrateFlavor, - slot: &mut Option<(Svh, MetadataBlob)>) -> Option<(PathBuf, PathKind)> { + fn extract_one(&mut self, + m: FnvHashMap, + flavor: CrateFlavor, + slot: &mut Option<(Svh, MetadataBlob)>) + -> Option<(PathBuf, PathKind)> { let mut ret: Option<(PathBuf, PathKind)> = None; let mut error = 0; @@ -532,9 +557,9 @@ impl<'a> Context<'a> { // read both, but reading dylib metadata is quite // slow. if m.is_empty() { - return None + return None; } else if m.len() == 1 { - return Some(m.into_iter().next().unwrap()) + return Some(m.into_iter().next().unwrap()); } } @@ -547,23 +572,28 @@ impl<'a> Context<'a> { (h, blob) } else { info!("metadata mismatch"); - continue + continue; } } Err(err) => { info!("no metadata found: {}", err); - continue + continue; } }; // If we see multiple hashes, emit an error about duplicate candidates. if slot.as_ref().map_or(false, |s| s.0 != hash) { - let mut e = struct_span_err!(self.sess, self.span, E0465, + let mut e = struct_span_err!(self.sess, + self.span, + E0465, "multiple {} candidates for `{}` found", - flavor, self.crate_name); + flavor, + self.crate_name); e.span_note(self.span, &format!(r"candidate #1: {}", - ret.as_ref().unwrap().0 - .display())); + ret.as_ref() + .unwrap() + .0 + .display())); if let Some(ref mut e) = err { e.emit(); } @@ -574,9 +604,10 @@ impl<'a> Context<'a> { if error > 0 { error += 1; err.as_mut().unwrap().span_note(self.span, - &format!(r"candidate #{}: {}", error, + &format!(r"candidate #{}: {}", + error, lib.display())); - continue + continue; } *slot = Some((hash, metadata)); ret = Some((lib, kind)); @@ -595,37 +626,39 @@ impl<'a> Context<'a> { let rustc_version = rustc_version(); if root.rustc_version != rustc_version { info!("Rejecting via version: expected {} got {}", - rustc_version, root.rustc_version); + rustc_version, + root.rustc_version); self.rejected_via_version.push(CrateMismatch { path: libpath.to_path_buf(), - got: root.rustc_version + got: root.rustc_version, }); return None; } if self.should_match_name { if self.crate_name != root.name { - info!("Rejecting via crate name"); return None; + info!("Rejecting via crate name"); + return None; } } if root.triple != self.triple { info!("Rejecting via crate triple: expected {} got {}", - self.triple, root.triple); + self.triple, + root.triple); self.rejected_via_triple.push(CrateMismatch { path: libpath.to_path_buf(), - got: root.triple + got: root.triple, }); return None; } if let Some(myhash) = self.hash { if *myhash != root.hash { - info!("Rejecting via hash: expected {} got {}", - *myhash, root.hash); + info!("Rejecting via hash: expected {} got {}", *myhash, root.hash); self.rejected_via_hash.push(CrateMismatch { path: libpath.to_path_buf(), - got: myhash.to_string() + got: myhash.to_string(), }); return None; } @@ -649,8 +682,8 @@ impl<'a> Context<'a> { (t.options.staticlib_prefix.clone(), t.options.staticlib_suffix.clone()) } - fn find_commandline_library<'b, LOCS> (&mut self, locs: LOCS) -> Option - where LOCS: Iterator + fn find_commandline_library<'b, LOCS>(&mut self, locs: LOCS) -> Option + where LOCS: Iterator { // First, filter out all libraries that look suspicious. We only accept // files which actually exist that have the correct naming scheme for @@ -663,30 +696,33 @@ impl<'a> Context<'a> { let locs = locs.map(|l| PathBuf::from(l)).filter(|loc| { if !loc.exists() { sess.err(&format!("extern location for {} does not exist: {}", - self.crate_name, loc.display())); + self.crate_name, + loc.display())); return false; } let file = match loc.file_name().and_then(|s| s.to_str()) { Some(file) => file, None => { sess.err(&format!("extern location for {} is not a file: {}", - self.crate_name, loc.display())); + self.crate_name, + loc.display())); return false; } }; if file.starts_with("lib") && file.ends_with(".rlib") { - return true + return true; } else { let (ref prefix, ref suffix) = dylibname; - if file.starts_with(&prefix[..]) && - file.ends_with(&suffix[..]) { - return true + if file.starts_with(&prefix[..]) && file.ends_with(&suffix[..]) { + return true; } } sess.struct_err(&format!("extern location for {} is of an unknown type: {}", - self.crate_name, loc.display())) + self.crate_name, + loc.display())) .help(&format!("file name should be lib*.rlib or {}*.{}", - dylibname.0, dylibname.1)) + dylibname.0, + dylibname.1)) .emit(); false }); @@ -695,11 +731,9 @@ impl<'a> Context<'a> { // there's at most one rlib and at most one dylib. for loc in locs { if loc.file_name().unwrap().to_str().unwrap().ends_with(".rlib") { - rlibs.insert(fs::canonicalize(&loc).unwrap(), - PathKind::ExternFlag); + rlibs.insert(fs::canonicalize(&loc).unwrap(), PathKind::ExternFlag); } else { - dylibs.insert(fs::canonicalize(&loc).unwrap(), - PathKind::ExternFlag); + dylibs.insert(fs::canonicalize(&loc).unwrap(), PathKind::ExternFlag); } } }; @@ -709,13 +743,17 @@ impl<'a> Context<'a> { let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot); - if rlib.is_none() && dylib.is_none() { return None } + if rlib.is_none() && dylib.is_none() { + return None; + } match slot { - Some((_, metadata)) => Some(Library { - dylib: dylib, - rlib: rlib, - metadata: metadata, - }), + Some((_, metadata)) => { + Some(Library { + dylib: dylib, + rlib: rlib, + metadata: metadata, + }) + } None => None, } } @@ -728,9 +766,9 @@ pub fn note_crate_name(err: &mut DiagnosticBuilder, name: &str) { impl ArchiveMetadata { fn new(ar: ArchiveRO) -> Option { let data = { - let section = ar.iter().filter_map(|s| s.ok()).find(|sect| { - sect.name() == Some(METADATA_FILENAME) - }); + let section = ar.iter() + .filter_map(|s| s.ok()) + .find(|sect| sect.name() == Some(METADATA_FILENAME)); match section { Some(s) => s.data() as *const [u8], None => { @@ -746,12 +784,14 @@ impl ArchiveMetadata { }) } - pub fn as_slice<'a>(&'a self) -> &'a [u8] { unsafe { &*self.data } } + pub fn as_slice<'a>(&'a self) -> &'a [u8] { + unsafe { &*self.data } + } } -fn verify_decompressed_encoding_version(blob: &MetadataBlob, filename: &Path) - -> Result<(), String> -{ +fn verify_decompressed_encoding_version(blob: &MetadataBlob, + filename: &Path) + -> Result<(), String> { if !blob.is_compatible() { Err((format!("incompatible metadata version found: '{}'", filename.display()))) @@ -761,16 +801,21 @@ fn verify_decompressed_encoding_version(blob: &MetadataBlob, filename: &Path) } // Just a small wrapper to time how long reading metadata takes. -fn get_metadata_section(target: &Target, flavor: CrateFlavor, filename: &Path) +fn get_metadata_section(target: &Target, + flavor: CrateFlavor, + filename: &Path) -> Result { let start = Instant::now(); let ret = get_metadata_section_imp(target, flavor, filename); - info!("reading {:?} => {:?}", filename.file_name().unwrap(), + info!("reading {:?} => {:?}", + filename.file_name().unwrap(), start.elapsed()); - return ret + return ret; } -fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Path) +fn get_metadata_section_imp(target: &Target, + flavor: CrateFlavor, + filename: &Path) -> Result { if !filename.exists() { return Err(format!("no such file: '{}'", filename.display())); @@ -783,13 +828,11 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat Some(ar) => ar, None => { debug!("llvm didn't like `{}`", filename.display()); - return Err(format!("failed to read rlib metadata: '{}'", - filename.display())); + return Err(format!("failed to read rlib metadata: '{}'", filename.display())); } }; return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) { - None => Err(format!("failed to read rlib metadata: '{}'", - filename.display())), + None => Err(format!("failed to read rlib metadata: '{}'", filename.display())), Some(blob) => { verify_decompressed_encoding_version(&blob, filename)?; Ok(blob) @@ -800,22 +843,19 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat let buf = common::path2cstr(filename); let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr()); if mb as isize == 0 { - return Err(format!("error reading library: '{}'", - filename.display())) + return Err(format!("error reading library: '{}'", filename.display())); } let of = match ObjectFile::new(mb) { Some(of) => of, _ => { - return Err((format!("provided path not an object file: '{}'", - filename.display()))) + return Err((format!("provided path not an object file: '{}'", filename.display()))) } }; let si = mk_section_iter(of.llof); while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False { let mut name_buf = ptr::null(); let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf); - let name = slice::from_raw_parts(name_buf as *const u8, - name_len as usize).to_vec(); + let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec(); let name = String::from_utf8(name).unwrap(); debug!("get_metadata_section: name {}", name); if read_meta_section_name(target) == name { @@ -823,8 +863,7 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat let csz = llvm::LLVMGetSectionSize(si.llsi) as usize; let cvbuf: *const u8 = cbuf as *const u8; let vlen = METADATA_HEADER.len(); - debug!("checking {} bytes of metadata-version stamp", - vlen); + debug!("checking {} bytes of metadata-version stamp", vlen); let minsz = cmp::min(vlen, csz); let buf0 = slice::from_raw_parts(cvbuf, minsz); let version_ok = buf0 == METADATA_HEADER; @@ -834,8 +873,7 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat } let cvbuf1 = cvbuf.offset(vlen as isize); - debug!("inflating {} bytes of compressed metadata", - csz - vlen); + debug!("inflating {} bytes of compressed metadata", csz - vlen); let bytes = slice::from_raw_parts(cvbuf1, csz - vlen); match flate::inflate_bytes(bytes) { Ok(inflated) => { @@ -879,14 +917,15 @@ pub fn read_meta_section_name(_target: &Target) -> &'static str { } // A diagnostic function for dumping crate metadata to an output stream -pub fn list_file_metadata(target: &Target, path: &Path, - out: &mut io::Write) -> io::Result<()> { +pub fn list_file_metadata(target: &Target, path: &Path, out: &mut io::Write) -> io::Result<()> { let filename = path.file_name().unwrap().to_str().unwrap(); - let flavor = if filename.ends_with(".rlib") { CrateFlavor::Rlib } else { CrateFlavor::Dylib }; + let flavor = if filename.ends_with(".rlib") { + CrateFlavor::Rlib + } else { + CrateFlavor::Dylib + }; match get_metadata_section(target, flavor, path) { Ok(metadata) => metadata.list_crate_metadata(out), - Err(msg) => { - write!(out, "{}\n", msg) - } + Err(msg) => write!(out, "{}\n", msg), } } diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs index 0bb126ee0ffb6..4174a2ef47d33 100644 --- a/src/librustc_metadata/schema.rs +++ b/src/librustc_metadata/schema.rs @@ -27,7 +27,8 @@ use syntax_pos::{self, Span}; use std::marker::PhantomData; pub fn rustc_version() -> String { - format!("rustc {}", option_env!("CFG_VERSION").unwrap_or("unknown version")) + format!("rustc {}", + option_env!("CFG_VERSION").unwrap_or("unknown version")) } /// Metadata encoding version. @@ -41,11 +42,8 @@ pub const METADATA_VERSION: u8 = 3; /// as a length of 0 by old compilers. /// /// This header is followed by the position of the `CrateRoot`. -pub const METADATA_HEADER: &'static [u8; 12] = &[ - 0, 0, 0, 0, - b'r', b'u', b's', b't', - 0, 0, 0, METADATA_VERSION -]; +pub const METADATA_HEADER: &'static [u8; 12] = + &[0, 0, 0, 0, b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION]; /// The shorthand encoding uses an enum's variant index `usize` /// and is offset by this value so it never matches a real variant. @@ -70,14 +68,14 @@ pub const SHORTHAND_OFFSET: usize = 0x80; #[must_use] pub struct Lazy { pub position: usize, - _marker: PhantomData + _marker: PhantomData, } impl Lazy { pub fn with_position(position: usize) -> Lazy { Lazy { position: position, - _marker: PhantomData + _marker: PhantomData, } } @@ -90,7 +88,9 @@ impl Lazy { impl Copy for Lazy {} impl Clone for Lazy { - fn clone(&self) -> Self { *self } + fn clone(&self) -> Self { + *self + } } impl serialize::UseSpecializedEncodable for Lazy {} @@ -112,7 +112,7 @@ impl serialize::UseSpecializedDecodable for Lazy {} pub struct LazySeq { pub len: usize, pub position: usize, - _marker: PhantomData + _marker: PhantomData, } impl LazySeq { @@ -124,7 +124,7 @@ impl LazySeq { LazySeq { len: len, position: position, - _marker: PhantomData + _marker: PhantomData, } } @@ -136,7 +136,9 @@ impl LazySeq { impl Copy for LazySeq {} impl Clone for LazySeq { - fn clone(&self) -> Self { *self } + fn clone(&self) -> Self { + *self + } } impl serialize::UseSpecializedEncodable for LazySeq {} @@ -155,7 +157,7 @@ pub enum LazyState { /// Inside a metadata node, with a previous `Lazy` or `LazySeq`. /// The position is a conservative estimate of where that /// previous `Lazy` / `LazySeq` would end (see their comments). - Previous(usize) + Previous(usize), } #[derive(RustcEncodable, RustcDecodable)] @@ -185,13 +187,13 @@ pub struct CrateRoot { pub struct CrateDep { pub name: ast::Name, pub hash: hir::svh::Svh, - pub explicitly_linked: bool + pub explicitly_linked: bool, } #[derive(RustcEncodable, RustcDecodable)] pub struct TraitImpls { pub trait_id: (u32, DefIndex), - pub impls: LazySeq + pub impls: LazySeq, } #[derive(RustcEncodable, RustcDecodable)] @@ -199,7 +201,7 @@ pub struct MacroDef { pub name: ast::Name, pub attrs: Vec, pub span: Span, - pub body: String + pub body: String, } #[derive(RustcEncodable, RustcDecodable)] @@ -219,7 +221,7 @@ pub struct Entry<'tcx> { pub predicates: Option>>, pub ast: Option>>, - pub mir: Option>> + pub mir: Option>>, } #[derive(Copy, Clone, RustcEncodable, RustcDecodable)] @@ -245,18 +247,18 @@ pub enum EntryKind<'tcx> { DefaultImpl(Lazy>), Method(Lazy>), AssociatedType(AssociatedContainer), - AssociatedConst(AssociatedContainer) + AssociatedConst(AssociatedContainer), } #[derive(RustcEncodable, RustcDecodable)] pub struct ModData { - pub reexports: LazySeq + pub reexports: LazySeq, } #[derive(RustcEncodable, RustcDecodable)] pub struct FnData { pub constness: hir::Constness, - pub arg_names: LazySeq + pub arg_names: LazySeq, } #[derive(RustcEncodable, RustcDecodable)] @@ -266,7 +268,7 @@ pub struct VariantData { /// If this is a struct's only variant, this /// is the index of the "struct ctor" item. - pub struct_ctor: Option + pub struct_ctor: Option, } #[derive(RustcEncodable, RustcDecodable)] @@ -275,7 +277,7 @@ pub struct TraitData<'tcx> { pub paren_sugar: bool, pub has_default_impl: bool, pub trait_ref: Lazy>, - pub super_predicates: Lazy> + pub super_predicates: Lazy>, } #[derive(RustcEncodable, RustcDecodable)] @@ -283,7 +285,7 @@ pub struct ImplData<'tcx> { pub polarity: hir::ImplPolarity, pub parent_impl: Option, pub coerce_unsized_kind: Option, - pub trait_ref: Option>> + pub trait_ref: Option>>, } /// Describes whether the container of an associated item @@ -294,21 +296,17 @@ pub enum AssociatedContainer { TraitRequired, TraitWithDefault, ImplDefault, - ImplFinal + ImplFinal, } impl AssociatedContainer { pub fn with_def_id(&self, def_id: DefId) -> ty::ImplOrTraitItemContainer { match *self { AssociatedContainer::TraitRequired | - AssociatedContainer::TraitWithDefault => { - ty::TraitContainer(def_id) - } + AssociatedContainer::TraitWithDefault => ty::TraitContainer(def_id), AssociatedContainer::ImplDefault | - AssociatedContainer::ImplFinal => { - ty::ImplContainer(def_id) - } + AssociatedContainer::ImplFinal => ty::ImplContainer(def_id), } } @@ -318,7 +316,7 @@ impl AssociatedContainer { AssociatedContainer::TraitWithDefault | AssociatedContainer::ImplDefault | - AssociatedContainer::ImplFinal => true + AssociatedContainer::ImplFinal => true, } } @@ -328,7 +326,7 @@ impl AssociatedContainer { AssociatedContainer::TraitWithDefault | AssociatedContainer::ImplDefault => hir::Defaultness::Default, - AssociatedContainer::ImplFinal => hir::Defaultness::Final + AssociatedContainer::ImplFinal => hir::Defaultness::Final, } } } @@ -337,11 +335,11 @@ impl AssociatedContainer { pub struct MethodData<'tcx> { pub fn_data: FnData, pub container: AssociatedContainer, - pub explicit_self: Lazy> + pub explicit_self: Lazy>, } #[derive(RustcEncodable, RustcDecodable)] pub struct ClosureData<'tcx> { pub kind: ty::ClosureKind, - pub ty: Lazy> + pub ty: Lazy>, } From 09227b17f4daf86afb269669671f197b4d481717 Mon Sep 17 00:00:00 2001 From: Duncan Date: Tue, 25 Oct 2016 14:22:54 +1300 Subject: [PATCH 14/17] Vec docs: fix broken links and make quoting consistent --- src/libcollections/vec.rs | 44 +++++++++++++++++++-------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index df76140f687a7..7fdf7e903d5ca 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -166,7 +166,7 @@ use super::range::RangeArgument; /// # Slicing /// /// A `Vec` can be mutable. Slices, on the other hand, are read-only objects. -/// To get a slice, use "&". Example: +/// To get a slice, use `&`. Example: /// /// ``` /// fn read_slice(slice: &[usize]) { @@ -203,33 +203,33 @@ use super::range::RangeArgument; /// /// # Guarantees /// -/// Due to its incredibly fundamental nature, Vec makes a lot of guarantees +/// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees /// about its design. This ensures that it's as low-overhead as possible in /// the general case, and can be correctly manipulated in primitive ways /// by unsafe code. Note that these guarantees refer to an unqualified `Vec`. /// If additional type parameters are added (e.g. to support custom allocators), /// overriding their defaults may change the behavior. /// -/// Most fundamentally, Vec is and always will be a (pointer, capacity, length) +/// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length) /// triplet. No more, no less. The order of these fields is completely /// unspecified, and you should use the appropriate methods to modify these. /// The pointer will never be null, so this type is null-pointer-optimized. /// /// However, the pointer may not actually point to allocated memory. In particular, -/// if you construct a Vec with capacity 0 via [`Vec::new()`], [`vec![]`][`vec!`], +/// if you construct a `Vec` with capacity 0 via [`Vec::new()`], [`vec![]`][`vec!`], /// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit()`] /// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized /// types inside a `Vec`, it will not allocate space for them. *Note that in this case -/// the `Vec` may not report a [`capacity()`] of 0*. Vec will allocate if and only +/// the `Vec` may not report a [`capacity()`] of 0*. `Vec` will allocate if and only /// if [`mem::size_of::()`]` * capacity() > 0`. In general, `Vec`'s allocation /// details are subtle enough that it is strongly recommended that you only -/// free memory allocated by a Vec by creating a new Vec and dropping it. +/// free memory allocated by a `Vec` by creating a new `Vec` and dropping it. /// /// If a `Vec` *has* allocated memory, then the memory it points to is on the heap /// (as defined by the allocator Rust is configured to use by default), and its /// pointer points to [`len()`] initialized elements in order (what you would see -/// if you coerced it to a slice), followed by `[capacity()][`capacity()`] - -/// [len()][`len()`]` logically uninitialized elements. +/// if you coerced it to a slice), followed by [`capacity()`]` - `[`len()`] +/// logically uninitialized elements. /// /// `Vec` will never perform a "small optimization" where elements are actually /// stored on the stack for two reasons: @@ -249,8 +249,8 @@ use super::range::RangeArgument; /// [`shrink_to_fit`][`shrink_to_fit()`]. /// /// [`push`] and [`insert`] will never (re)allocate if the reported capacity is -/// sufficient. [`push`] and [`insert`] *will* (re)allocate if `[len()][`len()`] -/// == [capacity()][`capacity()`]`. That is, the reported capacity is completely +/// sufficient. [`push`] and [`insert`] *will* (re)allocate if +/// [`len()`]` == `[`capacity()`]. That is, the reported capacity is completely /// accurate, and can be relied on. It can even be used to manually free the memory /// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even /// when not necessary. @@ -261,11 +261,10 @@ use super::range::RangeArgument; /// strategy is used will of course guarantee `O(1)` amortized [`push`]. /// /// `vec![x; n]`, `vec![a, b, c, d]`, and -/// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all -/// produce a `Vec` with exactly the requested capacity. If `[len()][`len()`] == -/// [capacity()][`capacity()`]`, (as is the case for the [`vec!`] macro), then a -/// `Vec` can be converted to and from a [`Box<[T]>`] without reallocating or -/// moving the elements. +/// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec` +/// with exactly the requested capacity. If [`len()`]` == `[`capacity()`], +/// (as is the case for the [`vec!`] macro), then a `Vec` can be converted to +/// and from a [`Box<[T]>`][owned slice] without reallocating or moving the elements. /// /// `Vec` will not specifically overwrite any data that is removed from it, /// but also won't specifically preserve it. Its uninitialized memory is @@ -292,7 +291,7 @@ use super::range::RangeArgument; /// [`push`]: ../../std/vec/struct.Vec.html#method.push /// [`insert`]: ../../std/vec/struct.Vec.html#method.insert /// [`reserve`]: ../../std/vec/struct.Vec.html#method.reserve -/// [`Box<[T]>`]: ../../std/boxed/struct.Box.html +/// [owned slice]: ../../std/boxed/struct.Box.html #[stable(feature = "rust1", since = "1.0.0")] pub struct Vec { buf: RawVec, @@ -329,9 +328,10 @@ impl Vec { /// reallocating. If `capacity` is 0, the vector will not allocate. /// /// It is important to note that this function does not specify the *length* - /// of the returned vector, but only the *capacity*. (For an explanation of - /// the difference between length and capacity, see the main `Vec` docs - /// above, 'Capacity and reallocation'.) + /// of the returned vector, but only the *capacity*. For an explanation of + /// the difference between length and capacity, see *[Capacity and reallocation]*. + /// + /// [Capacity and reallocation]: #capacity-and-reallocation /// /// # Examples /// @@ -497,13 +497,13 @@ impl Vec { self.buf.shrink_to_fit(self.len); } - /// Converts the vector into [`Box<[T]>`]. + /// Converts the vector into [`Box<[T]>`][owned slice]. /// /// Note that this will drop any excess capacity. Calling this and /// converting back to a vector with [`into_vec()`] is equivalent to calling /// [`shrink_to_fit()`]. /// - /// [`Box<[T]>`]: ../../std/boxed/struct.Box.html + /// [owned slice]: ../../std/boxed/struct.Box.html /// [`into_vec()`]: ../../std/primitive.slice.html#method.into_vec /// [`shrink_to_fit()`]: #method.shrink_to_fit /// @@ -779,7 +779,7 @@ impl Vec { /// Retains only the elements specified by the predicate. /// - /// In other words, remove all elements `e` such that `f(&e)` returns false. + /// In other words, remove all elements `e` such that `f(&e)` returns `false`. /// This method operates in place and preserves the order of the retained /// elements. /// From 82d4200966ac3a6544390dbfd867e120b97417fd Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sat, 22 Oct 2016 03:33:36 +0300 Subject: [PATCH 15/17] Prohibit patterns in trait methods without bodies --- src/doc/reference.md | 6 ++--- src/librustc/lint/builtin.rs | 9 +++++++- src/librustc_lint/lib.rs | 4 ++++ src/librustc_passes/ast_validation.rs | 10 +++++++- src/librustc_passes/diagnostics.rs | 1 + .../compile-fail/no-patterns-in-args-2.rs | 23 +++++++++++++++++++ src/test/incremental/hashes/trait_defs.rs | 8 +++---- .../run-pass/by-value-self-in-mut-slot.rs | 2 +- src/test/run-pass/uniq-self-in-mut-slot.rs | 2 +- 9 files changed, 54 insertions(+), 11 deletions(-) create mode 100644 src/test/compile-fail/no-patterns-in-args-2.rs diff --git a/src/doc/reference.md b/src/doc/reference.md index 84f459bf8724f..94b4557098566 100644 --- a/src/doc/reference.md +++ b/src/doc/reference.md @@ -4023,9 +4023,9 @@ Methods that take either `self` or `Box` can optionally place them in a mutable variable by prefixing them with `mut` (similar to regular arguments): ``` -trait Changer { - fn change(mut self) -> Self; - fn modify(mut self: Box) -> Box; +trait Changer: Sized { + fn change(mut self) {} + fn modify(mut self: Box) {} } ``` diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 7fc3f638979ff..d7ec544c95461 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -192,6 +192,12 @@ declare_lint! { "safe access to extern statics was erroneously allowed" } +declare_lint! { + pub PATTERNS_IN_FNS_WITHOUT_BODY, + Warn, + "patterns in functions without body were erroneously allowed" +} + /// Does nothing as a lint pass, but registers some `Lint`s /// which are used by other parts of the compiler. #[derive(Copy, Clone)] @@ -228,7 +234,8 @@ impl LintPass for HardwiredLints { SUPER_OR_SELF_IN_GLOBAL_PATH, HR_LIFETIME_IN_ASSOC_TYPE, LIFETIME_UNDERSCORE, - SAFE_EXTERN_STATICS + SAFE_EXTERN_STATICS, + PATTERNS_IN_FNS_WITHOUT_BODY ) } } diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index 74483b89cea22..ef7bab8de8c23 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -212,6 +212,10 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { id: LintId::of(SAFE_EXTERN_STATICS), reference: "issue 36247 ", }, + FutureIncompatibleInfo { + id: LintId::of(PATTERNS_IN_FNS_WITHOUT_BODY), + reference: "issue #35203 ", + }, ]); // Register renamed and removed lints diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 5096a574e2ba5..828efbf373131 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -190,8 +190,16 @@ impl<'a> Visitor for AstValidator<'a> { } ItemKind::Trait(.., ref trait_items) => { for trait_item in trait_items { - if let TraitItemKind::Method(ref sig, _) = trait_item.node { + if let TraitItemKind::Method(ref sig, ref block) = trait_item.node { self.check_trait_fn_not_const(sig.constness); + if block.is_none() { + self.check_decl_no_pat(&sig.decl, |span, _| { + self.session.add_lint(lint::builtin::PATTERNS_IN_FNS_WITHOUT_BODY, + trait_item.id, span, + "patterns aren't allowed in methods \ + without bodies".to_string()); + }); + } } } } diff --git a/src/librustc_passes/diagnostics.rs b/src/librustc_passes/diagnostics.rs index 89b8aa81411b3..a54f8b72be04e 100644 --- a/src/librustc_passes/diagnostics.rs +++ b/src/librustc_passes/diagnostics.rs @@ -228,4 +228,5 @@ pub impl Foo for Bar { register_diagnostics! { E0472, // asm! is unsupported on this target E0561, // patterns aren't allowed in function pointer types + E0570, // patterns aren't allowed in methods without bodies } diff --git a/src/test/compile-fail/no-patterns-in-args-2.rs b/src/test/compile-fail/no-patterns-in-args-2.rs new file mode 100644 index 0000000000000..385d012cadee6 --- /dev/null +++ b/src/test/compile-fail/no-patterns-in-args-2.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![deny(patterns_in_fns_without_body)] + +trait Tr { + fn f1(mut arg: u8); //~ ERROR patterns aren't allowed in methods without bodies + //~^ WARN was previously accepted + fn f2(&arg: u8); //~ ERROR patterns aren't allowed in methods without bodies + //~^ WARN was previously accepted + fn g1(arg: u8); // OK + fn g2(_: u8); // OK + fn g3(u8); // OK +} + +fn main() {} diff --git a/src/test/incremental/hashes/trait_defs.rs b/src/test/incremental/hashes/trait_defs.rs index 937f053e320f6..391c2e75ba4d5 100644 --- a/src/test/incremental/hashes/trait_defs.rs +++ b/src/test/incremental/hashes/trait_defs.rs @@ -264,8 +264,8 @@ trait TraitChangeModeSelfRefToMut { #[cfg(cfail1)] -trait TraitChangeModeSelfOwnToMut { - fn method(self); +trait TraitChangeModeSelfOwnToMut: Sized { + fn method(self) {} } #[cfg(not(cfail1))] @@ -273,8 +273,8 @@ trait TraitChangeModeSelfOwnToMut { #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_metadata_dirty(cfg="cfail2")] #[rustc_metadata_clean(cfg="cfail3")] -trait TraitChangeModeSelfOwnToMut { - fn method(mut self); +trait TraitChangeModeSelfOwnToMut: Sized { + fn method(mut self) {} } diff --git a/src/test/run-pass/by-value-self-in-mut-slot.rs b/src/test/run-pass/by-value-self-in-mut-slot.rs index 5bbdec95b1576..846b695c35b3d 100644 --- a/src/test/run-pass/by-value-self-in-mut-slot.rs +++ b/src/test/run-pass/by-value-self-in-mut-slot.rs @@ -14,7 +14,7 @@ struct X { } trait Changer { - fn change(mut self) -> Self; + fn change(self) -> Self; } impl Changer for X { diff --git a/src/test/run-pass/uniq-self-in-mut-slot.rs b/src/test/run-pass/uniq-self-in-mut-slot.rs index baca157a488b7..7910380abeef0 100644 --- a/src/test/run-pass/uniq-self-in-mut-slot.rs +++ b/src/test/run-pass/uniq-self-in-mut-slot.rs @@ -17,7 +17,7 @@ struct X { } trait Changer { - fn change(mut self: Box) -> Box; + fn change(self: Box) -> Box; } impl Changer for X { From a16626fc422f9fdcd1d02f56b628f764d5282261 Mon Sep 17 00:00:00 2001 From: Ulrik Sverdrup Date: Tue, 25 Oct 2016 15:21:49 +0200 Subject: [PATCH 16/17] iter: Implement .fold() for .chain() Chain can do something interesting here where it passes on the fold into its inner iterators. The lets the underlying iterator's custom fold() be used, and skips the regular chain logic in next. --- src/libcore/iter/mod.rs | 19 +++++++++++++++++++ src/libcoretest/iter.rs | 12 ++++++++++++ 2 files changed, 31 insertions(+) diff --git a/src/libcore/iter/mod.rs b/src/libcore/iter/mod.rs index 2c3b8864a115e..df4f5e5c57643 100644 --- a/src/libcore/iter/mod.rs +++ b/src/libcore/iter/mod.rs @@ -550,6 +550,25 @@ impl Iterator for Chain where } } + fn fold(self, init: Acc, mut f: F) -> Acc + where F: FnMut(Acc, Self::Item) -> Acc, + { + let mut accum = init; + match self.state { + ChainState::Both | ChainState::Front => { + accum = self.a.fold(accum, &mut f); + } + _ => { } + } + match self.state { + ChainState::Both | ChainState::Back => { + accum = self.b.fold(accum, &mut f); + } + _ => { } + } + accum + } + #[inline] fn nth(&mut self, mut n: usize) -> Option { match self.state { diff --git a/src/libcoretest/iter.rs b/src/libcoretest/iter.rs index 27eb25537f31b..58b6444ef88cd 100644 --- a/src/libcoretest/iter.rs +++ b/src/libcoretest/iter.rs @@ -985,6 +985,18 @@ fn test_empty() { assert_eq!(it.next(), None); } +#[test] +fn test_chain_fold() { + let xs = [1, 2, 3]; + let ys = [1, 2, 0]; + + let mut iter = xs.iter().chain(&ys); + iter.next(); + let mut result = Vec::new(); + iter.fold((), |(), &elt| result.push(elt)); + assert_eq!(&[2, 3, 1, 2, 0], &result[..]); +} + #[bench] fn bench_rposition(b: &mut Bencher) { let it: Vec = (0..300).collect(); From d230a7ecc3a8c5dd16a953d3331cbf91c8b44b8f Mon Sep 17 00:00:00 2001 From: Manish Goregaokar Date: Wed, 26 Oct 2016 07:20:09 -0400 Subject: [PATCH 17/17] Remove duplicate error code (fixup #37378) --- src/librustc_passes/diagnostics.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/librustc_passes/diagnostics.rs b/src/librustc_passes/diagnostics.rs index a54f8b72be04e..89b8aa81411b3 100644 --- a/src/librustc_passes/diagnostics.rs +++ b/src/librustc_passes/diagnostics.rs @@ -228,5 +228,4 @@ pub impl Foo for Bar { register_diagnostics! { E0472, // asm! is unsupported on this target E0561, // patterns aren't allowed in function pointer types - E0570, // patterns aren't allowed in methods without bodies }